Overview
| Comment: | [build][core] look before, look after |
|---|---|
| Downloads: | Tarball | ZIP archive | SQL archive |
| Timelines: | family | ancestors | descendants | both | core | build | rg |
| Files: | files | file ages | folders |
| SHA3-256: |
09da5a46c8e534edc9633062d76e36b5 |
| User & Date: | olr on 2018-06-22 12:50:12 |
| Other Links: | branch diff | manifest | tags |
Context
|
2018-06-22
| ||
| 12:51 | [build][core] look before, look after (fix spaces) check-in: cdcc60d8eb user: olr tags: core, build, rg | |
| 12:50 | [build][core] look before, look after check-in: 09da5a46c8 user: olr tags: core, build, rg | |
| 12:23 | [build] check casing within rules condition check-in: 0d097b9fcd user: olr tags: build, rg | |
Changes
Modified compile_rules_graph.py from [8db3f5f069] to [53e98a16a4].
| ︙ | |||
18 19 20 21 22 23 24 25 26 27 28 29 30 31 | 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 | + + + + |
s = re.sub(r"(select|exclude|define)[(][\\](\d+)", 'g_\\1(lToken[\\2+nTokenOffset]', s)
s = re.sub(r"(tag_before|tag_after)[(][\\](\d+)", 'g_\\1(lToken[\\2+nTokenOffset]', s)
s = re.sub(r"(switchGender|has(?:Mas|Fem)Form)[(]\\(\d+)", '\\1(lToken[\\2+nTokenOffset]["sValue"]', s)
s = re.sub(r"(morph|analyse)\(>1", 'g_\\1(lToken[nLastToken+1]', s) # next token
s = re.sub(r"(morph|analyse)\(<1", 'g_\\1(lToken[nTokenOffset]', s) # previous token
s = re.sub(r"[\\](\d+)\.is(upper|lower|title)\(\)", 'lToken[\\1+nTokenOffset]["sValue"].is(\\2)()', s)
s = re.sub(r"\bspell *[(]", '_oSpellChecker.isValid(', s)
s = re.sub(r"before\(\s*", 'look(sSentence[:m.start()], ', s) # before(s)
s = re.sub(r"after\(\s*", 'look(sSentence[lToken[nLastToken]["nEnd"]:], ', s) # after(s)
s = re.sub(r"before0\(\s*", 'look(sSentence0[:m.start()], ', s) # before0(s)
s = re.sub(r"after0\(\s*", 'look(sSentence[lToken[nLastToken]["nEnd"]:], ', s) # after0(s)
if bTokenValue:
# token values are used as parameter
s = re.sub(r"[\\](\d+)", 'lToken[\\1+nTokenOffset]["sValue"]', s)
else:
# tokens used as parameter
s = re.sub(r"[\\](\d+)", 'lToken[\\1+nTokenOffset]', s)
return s
|
| ︙ | |||
332 333 334 335 336 337 338 | 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 | - + |
# creating file with all functions callable by rules
print(" creating callables...")
sPyCallables = "# generated code, do not edit\n"
#sJSCallables = "// generated code, do not edit\nconst oEvalFunc = {\n"
for sFuncName, sReturn in dFUNCTIONS.items():
if sFuncName.startswith("g_c_"): # condition
|
| ︙ |
Modified gc_core/py/lang_core/gc_engine.py from [3f95e0d88e] to [16275f0b5d].
| ︙ | |||
710 711 712 713 714 715 716 | 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 | - + |
sOption, sFuncCond, cActionType, sWhat, *eAct = dRule[sRuleId]
# Suggestion [ option, condition, "-", replacement/suggestion/action, iTokenStart, iTokenEnd, nPriority, message, URL ]
# TextProcessor [ option, condition, "~", replacement/suggestion/action, iTokenStart, iTokenEnd ]
# Disambiguator [ option, condition, "=", replacement/suggestion/action ]
# Tag [ option, condition, "/", replacement/suggestion/action, iTokenStart, iTokenEnd ]
# Test [ option, condition, ">", "" ]
if not sOption or dOptions.get(sOption, False):
|
| ︙ |