Overview
| Comment: | [build] option -uc (--use_cache) to avoid rebuilding rules | 
|---|---|
| Downloads: | Tarball | ZIP archive | SQL archive | 
| Timelines: | family | ancestors | descendants | both | trunk | build | 
| Files: | files | file ages | folders | 
| SHA3-256: | 495352b0328c836afa64fc70b9bfb8a5 | 
| User & Date: | olr on 2018-10-12 08:43:43 | 
| Other Links: | manifest | tags | 
Context
| 2018-10-12 | ||
| 08:45 | [build] useless path check-in: cc74e8c3a7 user: olr tags: trunk, build | |
| 08:43 | [build] option -uc (--use_cache) to avoid rebuilding rules check-in: 495352b032 user: olr tags: trunk, build | |
| 2018-10-11 | ||
| 07:11 | [graphspell][core][js] several syntax and bug fixes (thanks to IllusionPerdu) check-in: 9bb0f089b9 user: olr tags: trunk, core, graphspell | |
Changes
Modified compile_rules.py from [6c0309c95b] to [3c1cc360a0].
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 | 
"""
Grammalecte: compile rules
"""
import re
import traceback
import json
import colorsys
import compile_rules_js_convert as jsconv
import compile_rules_graph as crg
dDEF = {}
lFUNCTIONS = []
 | > > | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 | 
"""
Grammalecte: compile rules
"""
import re
import os
import traceback
import json
import colorsys
import time
import compile_rules_js_convert as jsconv
import compile_rules_graph as crg
dDEF = {}
lFUNCTIONS = []
 | 
| ︙ | ︙ | |||
| 415 416 417 418 419 420 421 | 
    dColor = {}
    dOptLabel = {}
    dOptPriority = {}
    for sLine in lOptionLines:
        sLine = sLine.strip()
        if sLine.startswith("OPTGROUP/"):
            m = re.match("OPTGROUP/([a-z0-9]+):(.+)$", sLine)
 | | | 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 | 
    dColor = {}
    dOptLabel = {}
    dOptPriority = {}
    for sLine in lOptionLines:
        sLine = sLine.strip()
        if sLine.startswith("OPTGROUP/"):
            m = re.match("OPTGROUP/([a-z0-9]+):(.+)$", sLine)
            lStructOpt.append( [m.group(1), list(map(str.split, m.group(2).split(",")))] )
        elif sLine.startswith("OPTSOFTWARE:"):
            lOpt = [ [s, {}]  for s in sLine[12:].strip().split() ]  # don’t use tuples (s, {}), because unknown to JS
        elif sLine.startswith("OPT/"):
            m = re.match("OPT/([a-z0-9]+):(.+)$", sLine)
            for i, sOpt in enumerate(m.group(2).split()):
                lOpt[i][1][m.group(1)] = eval(sOpt)
        elif sLine.startswith("OPTCOLORTHEME:"):
 | 
| ︙ | ︙ | |||
| 461 462 463 464 465 466 467 | 
def printBookmark (nLevel, sComment, nLine):
    "print bookmark within the rules file"
    print("  {:>6}:  {}".format(nLine, "  " * nLevel + sComment))
 | | > > > > > > > > > | 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 | 
def printBookmark (nLevel, sComment, nLine):
    "print bookmark within the rules file"
    print("  {:>6}:  {}".format(nLine, "  " * nLevel + sComment))
def make (spLang, sLang, bUseCache=False):
    "compile rules, returns a dictionary of values"
    # for clarity purpose, don’t create any file here
    if bUseCache and os.path.isfile("_build/data_cache.json"):
        print("> don’t rebuild rules, use cache...")
        sJSON = open("_build/data_cache.json", "r", encoding="utf-8").read()
        dCacheVars = json.loads(sJSON)
        print("  build made at: " + time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(dCacheVars.get("fBuildTime", 0))))
        return dCacheVars
    fBuildTime = time.time()
    print("> read rules file...")
    try:
        lRules = open(spLang + "/rules.grx", 'r', encoding="utf-8").readlines()
    except:
        print("Error. Rules file in project [" + sLang + "] not found.")
        exit()
 | 
| ︙ | ︙ | |||
| 608 609 610 611 612 613 614 | 
        sJSCallables += "        return " + jsconv.py2js(sReturn) + ";\n"
        sJSCallables += "    },\n"
    displayStats(lParagraphRules, lSentenceRules)
    print("Unnamed rules: " + str(nRULEWITHOUTNAME))
 | > > | | | | | | | | > | > > | 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 | 
        sJSCallables += "        return " + jsconv.py2js(sReturn) + ";\n"
        sJSCallables += "    },\n"
    displayStats(lParagraphRules, lSentenceRules)
    print("Unnamed rules: " + str(nRULEWITHOUTNAME))
    dVars = {
        "fBuildTime": fBuildTime,
        "callables": sPyCallables,
        "callablesJS": sJSCallables,
        "gctests": sGCTests,
        "gctestsJS": sGCTestsJS,
        "paragraph_rules": mergeRulesByOption(lParagraphRules),
        "sentence_rules": mergeRulesByOption(lSentenceRules),
        "paragraph_rules_JS": jsconv.writeRulesToJSArray(mergeRulesByOption(lParagraphRulesJS)),
        "sentence_rules_JS": jsconv.writeRulesToJSArray(mergeRulesByOption(lSentenceRulesJS))
    }
    dVars.update(dOptions)
    # compile graph rules
    dVars2 = crg.make(lGraphRule, dDEF, sLang, dOptPriority)
    dVars.update(dVars2)
    with open("_build/data_cache.json", "w", encoding="utf-8") as hDst:
        hDst.write(json.dumps(dVars, ensure_ascii=False))
    return dVars
 | 
Modified compile_rules_graph.py from [f9b11563d7] to [f9c246b8ef].
| ︙ | ︙ | |||
| 314 315 316 317 318 319 320 | 
        sAction = createFunction("da", sActionId, sAction)
        return [sOption, sCondition, cAction, sAction]
    else:
        print(" # Unknown action.", sActionId)
        return None
 | | | 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 | 
        sAction = createFunction("da", sActionId, sAction)
        return [sOption, sCondition, cAction, sAction]
    else:
        print(" # Unknown action.", sActionId)
        return None
def make (lRule, dDef, sLang, dOptPriority):
    "compile rules, returns a dictionary of values"
    # for clarity purpose, don’t create any file here
    # removing comments, zeroing empty lines, creating definitions, storing tests, merging rule lines
    print("  parsing rules...")
    lTokenLine = []
    sActions = ""
 | 
| ︙ | ︙ | |||
| 450 451 452 453 454 455 456 | 
        print("\nFunctions:")
        print(sPyCallables)
    # Result
    return {
        "graph_callables": sPyCallables,
        "graph_callablesJS": sJSCallables,
 | | | | 450 451 452 453 454 455 456 457 458 459 460 461 | 
        print("\nFunctions:")
        print(sPyCallables)
    # Result
    return {
        "graph_callables": sPyCallables,
        "graph_callablesJS": sJSCallables,
        "rules_graphs": str(dAllGraph),
        "rules_graphsJS": str(dAllGraph).replace("True", "true").replace("False", "false"),
        "rules_actions": str(dACTIONS),
        "rules_actionsJS": str(dACTIONS).replace("True", "true").replace("False", "false")
    }
 | 
Modified make.py from [b3fb947153] to [d55bc7d114].
| ︙ | ︙ | |||
| 182 183 184 185 186 187 188 | 
        if not os.path.isdir("grammalecte/graphspell/_dictionaries/"+sf):
            hZip.write("grammalecte/graphspell/_dictionaries/"+sf, sAddPath+"grammalecte/graphspell/_dictionaries/"+sf)
    for sf in os.listdir(spLangPack):
        if not os.path.isdir(spLangPack+"/"+sf):
            hZip.write(spLangPack+"/"+sf, sAddPath+spLangPack+"/"+sf)
 | | | | 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 | 
        if not os.path.isdir("grammalecte/graphspell/_dictionaries/"+sf):
            hZip.write("grammalecte/graphspell/_dictionaries/"+sf, sAddPath+"grammalecte/graphspell/_dictionaries/"+sf)
    for sf in os.listdir(spLangPack):
        if not os.path.isdir(spLangPack+"/"+sf):
            hZip.write(spLangPack+"/"+sf, sAddPath+spLangPack+"/"+sf)
def create (sLang, xConfig, bInstallOXT, bJavaScript, bUseCache):
    "make Grammalecte for project <sLang>"
    oNow = datetime.datetime.now()
    print("============== MAKE GRAMMALECTE [{0}] at {1.hour:>2} h {1.minute:>2} min {1.second:>2} s ==============".format(sLang, oNow))
    #### READ CONFIGURATION
    print("> read configuration...")
    spLang = "gc_lang/" + sLang
    dVars = xConfig._sections['args']
    dVars['locales'] = dVars["locales"].replace("_", "-")
    dVars['loc'] = str(dict([ [s, [s[0:2], s[3:5], ""]] for s in dVars["locales"].split(" ") ]))
    ## COMPILE RULES
    dResult = compile_rules.make(spLang, dVars['lang'], bUseCache)
    dVars.update(dResult)
    ## READ GRAMMAR CHECKER PLUGINS
    print("PYTHON:")
    print("+ Plugins: ", end="")
    sCodePlugins = ""
    for sf in os.listdir(spLang+"/modules"):
 | 
| ︙ | ︙ | |||
| 371 372 373 374 375 376 377 378 379 380 381 382 383 384 | 
def main ():
    "build Grammalecte with requested options"
    print("Python: " + sys.version)
    xParser = argparse.ArgumentParser()
    xParser.add_argument("lang", type=str, nargs='+', help="lang project to generate (name of folder in /lang)")
    xParser.add_argument("-b", "--build_data", help="launch build_data.py (part 1 and 2)", action="store_true")
    xParser.add_argument("-bb", "--build_data_before", help="launch build_data.py (only part 1: before dictionary building)", action="store_true")
    xParser.add_argument("-ba", "--build_data_after", help="launch build_data.py (only part 2: before dictionary building)", action="store_true")
    xParser.add_argument("-d", "--dict", help="generate FSA dictionary", action="store_true")
    xParser.add_argument("-t", "--tests", help="run unit tests", action="store_true")
    xParser.add_argument("-p", "--perf", help="run performance tests", action="store_true")
    xParser.add_argument("-pm", "--perf_memo", help="run performance tests and store results in perf_memo.txt", action="store_true")
 | > | 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 | 
def main ():
    "build Grammalecte with requested options"
    print("Python: " + sys.version)
    xParser = argparse.ArgumentParser()
    xParser.add_argument("lang", type=str, nargs='+', help="lang project to generate (name of folder in /lang)")
    xParser.add_argument("-uc", "--use_cache", help="use data cache instead of rebuilding rules", action="store_true")
    xParser.add_argument("-b", "--build_data", help="launch build_data.py (part 1 and 2)", action="store_true")
    xParser.add_argument("-bb", "--build_data_before", help="launch build_data.py (only part 1: before dictionary building)", action="store_true")
    xParser.add_argument("-ba", "--build_data_after", help="launch build_data.py (only part 2: before dictionary building)", action="store_true")
    xParser.add_argument("-d", "--dict", help="generate FSA dictionary", action="store_true")
    xParser.add_argument("-t", "--tests", help="run unit tests", action="store_true")
    xParser.add_argument("-p", "--perf", help="run performance tests", action="store_true")
    xParser.add_argument("-pm", "--perf_memo", help="run performance tests and store results in perf_memo.txt", action="store_true")
 | 
| ︙ | ︙ | |||
| 394 395 396 397 398 399 400 | 
    xParser.add_argument("-i", "--install", help="install the extension in Writer (path of unopkg must be set in config.ini)", action="store_true")
    xArgs = xParser.parse_args()
    if xArgs.build_data:
        xArgs.build_data_before = True
        xArgs.build_data_after = True
 | | | 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 | 
    xParser.add_argument("-i", "--install", help="install the extension in Writer (path of unopkg must be set in config.ini)", action="store_true")
    xArgs = xParser.parse_args()
    if xArgs.build_data:
        xArgs.build_data_before = True
        xArgs.build_data_after = True
    dir_util.mkpath("_build/cache")
    dir_util.mkpath("grammalecte")
    if xArgs.javascript:
        dir_util.mkpath("grammalecte-js")
    copyGraphspellCore(xArgs.javascript)
    for sLang in xArgs.lang:
 | 
| ︙ | ︙ | |||
| 438 439 440 441 442 443 444 | 
            if databuild and xArgs.build_data_after:
                databuild.after('gc_lang/'+sLang, dVars, xArgs.javascript)
            # copy dictionaries from Graphspell
            copyGraphspellDictionaries(dVars, xArgs.javascript, xArgs.add_extended_dictionary, xArgs.add_community_dictionary, xArgs.add_personal_dictionary)
            # make
 | | | 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 | 
            if databuild and xArgs.build_data_after:
                databuild.after('gc_lang/'+sLang, dVars, xArgs.javascript)
            # copy dictionaries from Graphspell
            copyGraphspellDictionaries(dVars, xArgs.javascript, xArgs.add_extended_dictionary, xArgs.add_community_dictionary, xArgs.add_personal_dictionary)
            # make
            sVersion = create(sLang, xConfig, xArgs.install, xArgs.javascript, xArgs.use_cache)
            # tests
            if xArgs.tests or xArgs.perf or xArgs.perf_memo:
                print("> Running tests")
                try:
                    tests = importlib.import_module("grammalecte."+sLang+".tests")
                    print(tests.__file__)
 | 
| ︙ | ︙ |