Index: compile_rules.py ================================================================== --- compile_rules.py +++ compile_rules.py @@ -583,9 +583,9 @@ "paragraph_rules_JS": jsconv.writeRulesToJSArray(mergeRulesByOption(lParagraphRulesJS)), "sentence_rules_JS": jsconv.writeRulesToJSArray(mergeRulesByOption(lSentenceRulesJS)) } d.update(dOptions) # compile graph rules - d2 = crg.make(lGraphRule, sLang, bJavaScript) + d2 = crg.make(lGraphRule, dDEF, sLang, bJavaScript) d.update(d2) return d Index: compile_rules_graph.py ================================================================== --- compile_rules_graph.py +++ compile_rules_graph.py @@ -32,17 +32,17 @@ # tokens used as parameter s = re.sub(r"[\\](\d+)", 'lToken[\\1+nTokenOffset]', s) return s -def genTokenLines (sTokenLine): +def genTokenLines (sTokenLine, dDef): "tokenize a string and return a list of lines of tokens" lToken = sTokenLine.split() lTokenLines = None for i, sToken in enumerate(lToken): - if sToken.startswith("{") and sToken.endswith("}") and sToken in dDEF: - lToken[i] = dDEF[sToken] + if sToken.startswith("{") and sToken.endswith("}") and sToken in dDef: + sToken = dDef[sToken] if ( (sToken.startswith("[") and sToken.endswith("]")) or (sToken.startswith("([") and sToken.endswith("])")) ): bSelectedGroup = sToken.startswith("(") and sToken.endswith(")") if bSelectedGroup: sToken = sToken[1:-1] # multiple token @@ -72,13 +72,13 @@ aRule.append(sToken) for aRule in lTokenLines: yield aRule -def createRule (iLine, sRuleName, sTokenLine, iActionBlock, sActions, nPriority): +def createRule (iLine, sRuleName, sTokenLine, iActionBlock, sActions, nPriority, dDef): # print(iLine, "//", sRuleName, "//", sTokenLine, "//", sActions, "//", nPriority) - for lToken in genTokenLines(sTokenLine): + for lToken in genTokenLines(sTokenLine, dDef): # Calculate positions dPos = {} # key: iGroup, value: iToken iGroup = 0 for i, sToken in enumerate(lToken): if sToken.startswith("(") and sToken.endswith(")"): @@ -223,11 +223,11 @@ else: print("# Unknown action at line " + sIdAction) return None -def make (lRule, sLang, bJavaScript): +def make (lRule, dDef, sLang, bJavaScript): "compile rules, returns a dictionary of values" # for clarity purpose, don’t create any file here # removing comments, zeroing empty lines, creating definitions, storing tests, merging rule lines print(" parsing rules...") @@ -294,11 +294,11 @@ # processing rules print(" preparing rules...") for sGraphName, lRuleLine in dAllGraph.items(): lPreparedRule = [] for i, sRuleGroup, sTokenLine, iActionBlock, sActions, nPriority in lRuleLine: - for lRule in createRule(i, sRuleGroup, sTokenLine, iActionBlock, sActions, nPriority): + for lRule in createRule(i, sRuleGroup, sTokenLine, iActionBlock, sActions, nPriority, dDef): lPreparedRule.append(lRule) # Show rules for e in lPreparedRule: print(e) # Graph creation Index: gc_core/py/lang_core/gc_engine.py ================================================================== --- gc_core/py/lang_core/gc_engine.py +++ gc_core/py/lang_core/gc_engine.py @@ -708,11 +708,11 @@ dPriority[nErrorStart] = eAct[2] if bDebug: print("-", sRuleId, dErrs[nErrorStart]) elif cActionType == "~": # text processor - self._tagAndPrepareTokenForRewriting(sWhat, nTokenOffset + eAct[0], nTokenOffset + eAct[1]) + self._tagAndPrepareTokenForRewriting(sWhat, nTokenOffset + eAct[0], nTokenOffset + eAct[1], bDebug) if bDebug: print("~", sRuleId) bChange = True elif cActionType == "=": # disambiguation @@ -729,11 +729,11 @@ elif cActionType == ">": if bDebug: print(">!", sRuleId) break except Exception as e: - raise Exception(str(e), sLineId) + raise Exception(str(e), sLineId, sRuleId, self.sSentence) return bChange, dErrs def _createWriterError (self, sSugg, nTokenOffset, iFirstToken, nStart, nEnd, sLineId, sRuleId, bUppercase, sMsg, sURL, bShowRuleId, sOption, bContext): "error for Writer (LO/OO)" xErr = SingleProofreadingError() @@ -818,12 +818,14 @@ for m in re.finditer(r"\\([0-9]+)", sMsg): sMsg = sMsg.replace(m.group(0), self.lToken[int(m.group(1))+nTokenOffset]["sValue"]) #print(">", sMsg) return sMsg - def _tagAndPrepareTokenForRewriting (self, sWhat, nTokenRewriteStart, nTokenRewriteEnd, bUppercase=True): + def _tagAndPrepareTokenForRewriting (self, sWhat, nTokenRewriteStart, nTokenRewriteEnd, bUppercase=True, bDebug=False): "text processor: rewrite tokens between and position" + if bDebug: + print("REWRITING:", nTokenRewriteStart, nTokenRewriteEnd) if sWhat == "*": # purge text if nTokenRewriteEnd - nTokenRewriteStart == 0: self.lToken[nTokenRewriteStart]["bToRemove"] = True else: