27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
|
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
|
-
+
|
return sFuncName if not bStartWithEqual else "="+sFuncName
def storeAction (sActionId, aAction):
"store <aAction> in <dACTIONS> avoiding duplicates"
nVar = 0
while True:
sActionName = sActionId + str(nVar)
sActionName = sActionId + "_" + str(nVar)
if sActionName not in dACTIONS:
dACTIONS[sActionName] = aAction
return sActionName
elif aAction == dACTIONS[sActionName]:
return sActionName
nVar += 1
|
62
63
64
65
66
67
68
69
70
71
72
73
74
75
|
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
|
+
|
sCode = re.sub(r"(morph|analyse|value)\(>(\d+)", 'g_\\1(g_token(lToken, nLastToken+\\2)', sCode) # next token
sCode = re.sub(r"(morph|analyse|value)\(<(\d+)", 'g_\\1(g_token(lToken, nTokenOffset+1-\\2)', sCode) # previous token
sCode = re.sub(r"\bspell *[(]", '_oSpellChecker.isValid(', sCode)
sCode = re.sub(r"\bbefore\(\s*", 'look(sSentence[:lToken[1+nTokenOffset]["nStart"]], ', sCode) # before(sCode)
sCode = re.sub(r"\bafter\(\s*", 'look(sSentence[lToken[nLastToken]["nEnd"]:], ', sCode) # after(sCode)
sCode = re.sub(r"\bbefore0\(\s*", 'look(sSentence0[:lToken[1+nTokenOffset]["nStart"]], ', sCode) # before0(sCode)
sCode = re.sub(r"\bafter0\(\s*", 'look(sSentence[lToken[nLastToken]["nEnd"]:], ', sCode) # after0(sCode)
sCode = re.sub(r"analyseWord[(]", 'analyse(', sCode)
sCode = re.sub(r"[\\](\d+)", 'lToken[nTokenOffset+\\1]["sValue"]', sCode)
sCode = re.sub(r"[\\]-(\d+)", 'lToken[nLastToken-\\1+1]["sValue"]', sCode)
return sCode
def genTokenLines (sTokenLine, dDef):
"tokenize a string and return a list of lines of tokens"
|