Index: compile_rules_graph.py ================================================================== --- compile_rules_graph.py +++ compile_rules_graph.py @@ -73,13 +73,12 @@ return sCode def genTokenLines (sTokenLine, dDef): "tokenize a string and return a list of lines of tokens" - lToken = sTokenLine.split() lTokenLines = [] - for sToken in lToken: + for sToken in sTokenLine.split(): # replace merger characters by spaces if "␣" in sToken: sToken = sToken.replace("␣", " ") # optional token? bNullPossible = sToken.startswith("?") and sToken.endswith("¿") @@ -204,11 +203,11 @@ sAction = sAction[m.end():].strip() if nPriority == -1: nPriority = dOptPriority.get(sOption, 4) # valid action? - m = re.search(r"(?P[-~=/!>])(?P-?\d+\.?|)(?P:\.?-?\d+|)(?P:|)>>", sAction) + m = re.search(r"(?P[-=~/!>])(?P-?\d+\.?|)(?P:\.?-?\d+|)(?P:|)>>", sAction) if not m: print(" # Error. No action found at: ", sActionId) return None # Condition @@ -361,11 +360,11 @@ nPriority = int(m.group(2)[1:]) if m.group(2) else -1 else: print("Syntax error in rule group: ", sLine, " -- line:", i) exit() elif re.search("^ +<<- ", sLine) or (sLine.startswith(" ") and not sLine.startswith(" ||")) \ - or re.search("^ +#", sLine) or re.search(r"[-~=>/!](?:-?\d\.?(?::\.?-?\d+|)|)>> ", sLine) : + or re.search("^ +#", sLine) or re.search(r"[-=~/!>](?:-?\d\.?(?::\.?-?\d+|)|)>> ", sLine) : # actions sActions += " " + sLine.strip() elif re.match("[  ]*$", sLine): # empty line to end merging if not lTokenLine: