Index: compile_rules_graph.py ================================================================== --- compile_rules_graph.py +++ compile_rules_graph.py @@ -20,24 +20,23 @@ s = re.sub(r"isRealStart0 *\(\)", 'before0([""])', s) s = re.sub(r"isEnd *\(\)", 'after(["", ","])', s) s = re.sub(r"isRealEnd *\(\)", 'after([""])', s) s = re.sub(r"isEnd0 *\(\)", 'after0(["", ","])', s) s = re.sub(r"isRealEnd0 *\(\)", 'after0([""])', s) - s = re.sub(r"(select|exclude)[(][\\](\d+)", '\\1(lToken[\\2]', s) - s = re.sub(r"define[(][\\](\d+)", 'define(lToken[\\1]', s) - s = re.sub(r"(morph|morphex|displayInfo)[(]\\(\d+)", '\\1(lToken[\\2]', s) + s = re.sub(r"(select|exclude|define)[(][\\](\d+)", 'g_\\1(lToken[\\2+nTokenOffset]', s) + s = re.sub(r"(morph|morphex|displayInfo)[(]\\(\d+)", 'g_\\1(lToken[\\2+nTokenOffset]', s) s = re.sub(r"token\(\s*(\d)", 'nextToken(\\1', s) # token(n) s = re.sub(r"token\(\s*-(\d)", 'prevToken(\\1', s) # token(-n) s = re.sub(r"before\(\s*", 'look(s[:m.start()], ', s) # before(s) s = re.sub(r"after\(\s*", 'look(s[m.end():], ', s) # after(s) s = re.sub(r"textarea\(\s*", 'look(s, ', s) # textarea(s) s = re.sub(r"before_chk1\(\s*", 'look_chk1(dDA, s[:m.start()], 0, ', s) # before_chk1(s) s = re.sub(r"after_chk1\(\s*", 'look_chk1(dDA, s[m.end():], m.end(), ', s) # after_chk1(s) s = re.sub(r"textarea_chk1\(\s*", 'look_chk1(dDA, s, 0, ', s) # textarea_chk1(s) - s = re.sub(r"isEndOfNG\(\s*\)", 'isEndOfNG(dDA, s[m.end():], m.end())', s) # isEndOfNG(s) - s = re.sub(r"isNextNotCOD\(\s*\)", 'isNextNotCOD(dDA, s[m.end():], m.end())', s) # isNextNotCOD(s) - s = re.sub(r"isNextVerb\(\s*\)", 'isNextVerb(dDA, s[m.end():], m.end())', s) # isNextVerb(s) + #s = re.sub(r"isEndOfNG\(\s*\)", 'isEndOfNG(dDA, s[m.end():], m.end())', s) # isEndOfNG(s) + #s = re.sub(r"isNextNotCOD\(\s*\)", 'isNextNotCOD(dDA, s[m.end():], m.end())', s) # isNextNotCOD(s) + #s = re.sub(r"isNextVerb\(\s*\)", 'isNextVerb(dDA, s[m.end():], m.end())', s) # isNextVerb(s) s = re.sub(r"\bspell *[(]", '_oSpellChecker.isValid(', s) s = re.sub(r"[\\](\d+)", 'lToken[\\1]', s) return s @@ -315,11 +314,11 @@ print(" creating callables...") sPyCallables = "# generated code, do not edit\n" #sJSCallables = "// generated code, do not edit\nconst oEvalFunc = {\n" for sFuncName, sReturn in lFUNCTIONS: if sFuncName.startswith("g_c_"): # condition - sParams = "lToken, sCountry, bCondMemo" + sParams = "lToken, nTokenOffset, sCountry, bCondMemo" elif sFuncName.startswith("g_m_"): # message sParams = "lToken" elif sFuncName.startswith("g_s_"): # suggestion sParams = "lToken" elif sFuncName.startswith("g_p_"): # preprocessor Index: gc_core/py/lang_core/gc_engine.py ================================================================== --- gc_core/py/lang_core/gc_engine.py +++ gc_core/py/lang_core/gc_engine.py @@ -728,19 +728,19 @@ for dNode in self._getNextMatchingNodes(dToken, dGraph[0]): lPointer.append({"nOffset": dToken["i"], "dNode": dNode}) # check if there is rules to check for each pointer for dPointer in lPointer: if "" in dPointer["dNode"]: - bHasChanged, errs = self._executeActions(dPointer["dNode"][""], dPointer["nOffset"]-1, dPriority, dOpt, bShowRuleId, bContext) + bHasChanged, errs = self._executeActions(dPointer["dNode"][""], dPointer["nOffset"]-1, dPriority, dOpt, sCountry, bShowRuleId, bContext) dErr.update(errs) if bHasChanged: bChange = True if dErr: print(dErr) return (bChange, dErr) - def _executeActions (self, dNode, nTokenOffset, dPriority, dOpt, bShowRuleId, bContext): + def _executeActions (self, dNode, nTokenOffset, dPriority, dOpt, sCountry, bShowRuleId, bContext): #print(locals()) dErrs = {} bChange = False for sLineId, nextNodeKey in dNode.items(): for sRuleId in dGraph[nextNodeKey]: @@ -747,11 +747,11 @@ print(sRuleId) bCondMemo = None sFuncCond, cActionType, sWhat, *eAct = dRule[sRuleId] # action in lActions: [ condition, action type, replacement/suggestion/action[, iTokenStart, iTokenEnd[, nPriority, message, URL]] ] try: - bCondMemo = not sFuncCond or globals()[sFuncCond](self, sCountry, bCondMemo) + bCondMemo = not sFuncCond or globals()[sFuncCond](self.lToken, nTokenOffset, sCountry, bCondMemo) if bCondMemo: if cActionType == "-": # grammar error print("-") nTokenErrorStart = nTokenOffset + eAct[0] @@ -858,24 +858,10 @@ # search sPattern zPattern = re.compile(sPattern) return any(zPattern.search(sMorph) for sMorph in lMorph) -#### Go outside the rule scope - -def g_nextToken (i): - pass - -def g_prevToken (i): - pass - -def g_look (): - pass - -def g_lookAndCheck (): - pass - #### Disambiguator def g_select (dToken, sPattern, lDefault=None): "select morphologies for according to , always return True"