Grammalecte  Check-in [9cd97ae91f]

Overview
Comment:[build] Apply change in build
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | build | nodejs
Files: files | file ages | folders
SHA3-256: 9cd97ae91f94930d48aaef35708b4ccda48b7cb515973941488841131e058002
User & Date: IllusionPerdu on 2018-10-12 10:33:01
Other Links: branch diff | manifest | tags
Context
2018-10-12
12:03
[core][js] Fix textformater check-in: dcba084a8d user: IllusionPerdu tags: core, nodejs
10:33
[build] Apply change in build check-in: 9cd97ae91f user: IllusionPerdu tags: build, nodejs
2018-10-11
15:39
[js] Revert syntax change (oubli) check-in: 977fd9b488 user: IllusionPerdu tags: njs, nodejs
Changes

Modified compile_rules.py from [6c0309c95b] to [3c1cc360a0].

1
2
3
4
5

6
7
8

9
10
11
12
13
14
15
"""
Grammalecte: compile rules
"""

import re

import traceback
import json
import colorsys


import compile_rules_js_convert as jsconv
import compile_rules_graph as crg


dDEF = {}
lFUNCTIONS = []





>



>







1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
"""
Grammalecte: compile rules
"""

import re
import os
import traceback
import json
import colorsys
import time

import compile_rules_js_convert as jsconv
import compile_rules_graph as crg


dDEF = {}
lFUNCTIONS = []
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
    dColor = {}
    dOptLabel = {}
    dOptPriority = {}
    for sLine in lOptionLines:
        sLine = sLine.strip()
        if sLine.startswith("OPTGROUP/"):
            m = re.match("OPTGROUP/([a-z0-9]+):(.+)$", sLine)
            lStructOpt.append( (m.group(1), list(map(str.split, m.group(2).split(",")))) )
        elif sLine.startswith("OPTSOFTWARE:"):
            lOpt = [ [s, {}]  for s in sLine[12:].strip().split() ]  # don’t use tuples (s, {}), because unknown to JS
        elif sLine.startswith("OPT/"):
            m = re.match("OPT/([a-z0-9]+):(.+)$", sLine)
            for i, sOpt in enumerate(m.group(2).split()):
                lOpt[i][1][m.group(1)] = eval(sOpt)
        elif sLine.startswith("OPTCOLORTHEME:"):







|







417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
    dColor = {}
    dOptLabel = {}
    dOptPriority = {}
    for sLine in lOptionLines:
        sLine = sLine.strip()
        if sLine.startswith("OPTGROUP/"):
            m = re.match("OPTGROUP/([a-z0-9]+):(.+)$", sLine)
            lStructOpt.append( [m.group(1), list(map(str.split, m.group(2).split(",")))] )
        elif sLine.startswith("OPTSOFTWARE:"):
            lOpt = [ [s, {}]  for s in sLine[12:].strip().split() ]  # don’t use tuples (s, {}), because unknown to JS
        elif sLine.startswith("OPT/"):
            m = re.match("OPT/([a-z0-9]+):(.+)$", sLine)
            for i, sOpt in enumerate(m.group(2).split()):
                lOpt[i][1][m.group(1)] = eval(sOpt)
        elif sLine.startswith("OPTCOLORTHEME:"):
461
462
463
464
465
466
467
468
469
470









471
472
473
474
475
476
477


def printBookmark (nLevel, sComment, nLine):
    "print bookmark within the rules file"
    print("  {:>6}:  {}".format(nLine, "  " * nLevel + sComment))


def make (spLang, sLang, bJavaScript):
    "compile rules, returns a dictionary of values"
    # for clarity purpose, don’t create any file here










    print("> read rules file...")
    try:
        lRules = open(spLang + "/rules.grx", 'r', encoding="utf-8").readlines()
    except:
        print("Error. Rules file in project [" + sLang + "] not found.")
        exit()







|


>
>
>
>
>
>
>
>
>







463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488


def printBookmark (nLevel, sComment, nLine):
    "print bookmark within the rules file"
    print("  {:>6}:  {}".format(nLine, "  " * nLevel + sComment))


def make (spLang, sLang, bUseCache=False):
    "compile rules, returns a dictionary of values"
    # for clarity purpose, don’t create any file here

    if bUseCache and os.path.isfile("_build/data_cache.json"):
        print("> don’t rebuild rules, use cache...")
        sJSON = open("_build/data_cache.json", "r", encoding="utf-8").read()
        dCacheVars = json.loads(sJSON)
        print("  build made at: " + time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(dCacheVars.get("fBuildTime", 0))))
        return dCacheVars

    fBuildTime = time.time()

    print("> read rules file...")
    try:
        lRules = open(spLang + "/rules.grx", 'r', encoding="utf-8").readlines()
    except:
        print("Error. Rules file in project [" + sLang + "] not found.")
        exit()
608
609
610
611
612
613
614


615
616
617
618
619
620
621
622

623
624
625
626
627
628


629
        sJSCallables += "        return " + jsconv.py2js(sReturn) + ";\n"
        sJSCallables += "    },\n"

    displayStats(lParagraphRules, lSentenceRules)

    print("Unnamed rules: " + str(nRULEWITHOUTNAME))



    dVars = {   "callables": sPyCallables,
                "callablesJS": sJSCallables,
                "gctests": sGCTests,
                "gctestsJS": sGCTestsJS,
                "paragraph_rules": mergeRulesByOption(lParagraphRules),
                "sentence_rules": mergeRulesByOption(lSentenceRules),
                "paragraph_rules_JS": jsconv.writeRulesToJSArray(mergeRulesByOption(lParagraphRulesJS)),
                "sentence_rules_JS": jsconv.writeRulesToJSArray(mergeRulesByOption(lSentenceRulesJS)) }

    dVars.update(dOptions)

    # compile graph rules
    dVars2 = crg.make(lGraphRule, dDEF, sLang, dOptPriority, bJavaScript)
    dVars.update(dVars2)



    return dVars







>
>
|
|
|
|
|
|
|
|
>



|


>
>

619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
        sJSCallables += "        return " + jsconv.py2js(sReturn) + ";\n"
        sJSCallables += "    },\n"

    displayStats(lParagraphRules, lSentenceRules)

    print("Unnamed rules: " + str(nRULEWITHOUTNAME))

    dVars = {
        "fBuildTime": fBuildTime,
        "callables": sPyCallables,
        "callablesJS": sJSCallables,
        "gctests": sGCTests,
        "gctestsJS": sGCTestsJS,
        "paragraph_rules": mergeRulesByOption(lParagraphRules),
        "sentence_rules": mergeRulesByOption(lSentenceRules),
        "paragraph_rules_JS": jsconv.writeRulesToJSArray(mergeRulesByOption(lParagraphRulesJS)),
        "sentence_rules_JS": jsconv.writeRulesToJSArray(mergeRulesByOption(lSentenceRulesJS))
    }
    dVars.update(dOptions)

    # compile graph rules
    dVars2 = crg.make(lGraphRule, dDEF, sLang, dOptPriority)
    dVars.update(dVars2)

    with open("_build/data_cache.json", "w", encoding="utf-8") as hDst:
        hDst.write(json.dumps(dVars, ensure_ascii=False))
    return dVars

Modified compile_rules_graph.py from [f9b11563d7] to [f9c246b8ef].

314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
        sAction = createFunction("da", sActionId, sAction)
        return [sOption, sCondition, cAction, sAction]
    else:
        print(" # Unknown action.", sActionId)
        return None


def make (lRule, dDef, sLang, dOptPriority, bJavaScript):
    "compile rules, returns a dictionary of values"
    # for clarity purpose, don’t create any file here

    # removing comments, zeroing empty lines, creating definitions, storing tests, merging rule lines
    print("  parsing rules...")
    lTokenLine = []
    sActions = ""







|







314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
        sAction = createFunction("da", sActionId, sAction)
        return [sOption, sCondition, cAction, sAction]
    else:
        print(" # Unknown action.", sActionId)
        return None


def make (lRule, dDef, sLang, dOptPriority):
    "compile rules, returns a dictionary of values"
    # for clarity purpose, don’t create any file here

    # removing comments, zeroing empty lines, creating definitions, storing tests, merging rule lines
    print("  parsing rules...")
    lTokenLine = []
    sActions = ""
450
451
452
453
454
455
456
457
458
459
460
461
        print("\nFunctions:")
        print(sPyCallables)

    # Result
    return {
        "graph_callables": sPyCallables,
        "graph_callablesJS": sJSCallables,
        "rules_graphs": dAllGraph,
        "rules_graphsJS": str(dAllGraph).replace("True", "true").replace("False", "false"),
        "rules_actions": dACTIONS,
        "rules_actionsJS": str(dACTIONS).replace("True", "true").replace("False", "false")
    }







|

|


450
451
452
453
454
455
456
457
458
459
460
461
        print("\nFunctions:")
        print(sPyCallables)

    # Result
    return {
        "graph_callables": sPyCallables,
        "graph_callablesJS": sJSCallables,
        "rules_graphs": str(dAllGraph),
        "rules_graphsJS": str(dAllGraph).replace("True", "true").replace("False", "false"),
        "rules_actions": str(dACTIONS),
        "rules_actionsJS": str(dACTIONS).replace("True", "true").replace("False", "false")
    }

Modified make.py from [b3fb947153] to [7a5afc5f1b].

182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
        if not os.path.isdir("grammalecte/graphspell/_dictionaries/"+sf):
            hZip.write("grammalecte/graphspell/_dictionaries/"+sf, sAddPath+"grammalecte/graphspell/_dictionaries/"+sf)
    for sf in os.listdir(spLangPack):
        if not os.path.isdir(spLangPack+"/"+sf):
            hZip.write(spLangPack+"/"+sf, sAddPath+spLangPack+"/"+sf)


def create (sLang, xConfig, bInstallOXT, bJavaScript):
    "make Grammalecte for project <sLang>"
    oNow = datetime.datetime.now()
    print("============== MAKE GRAMMALECTE [{0}] at {1.hour:>2} h {1.minute:>2} min {1.second:>2} s ==============".format(sLang, oNow))

    #### READ CONFIGURATION
    print("> read configuration...")
    spLang = "gc_lang/" + sLang

    dVars = xConfig._sections['args']
    dVars['locales'] = dVars["locales"].replace("_", "-")
    dVars['loc'] = str(dict([ [s, [s[0:2], s[3:5], ""]] for s in dVars["locales"].split(" ") ]))

    ## COMPILE RULES
    dResult = compile_rules.make(spLang, dVars['lang'], bJavaScript)
    dVars.update(dResult)

    ## READ GRAMMAR CHECKER PLUGINS
    print("PYTHON:")
    print("+ Plugins: ", end="")
    sCodePlugins = ""
    for sf in os.listdir(spLang+"/modules"):







|













|







182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
        if not os.path.isdir("grammalecte/graphspell/_dictionaries/"+sf):
            hZip.write("grammalecte/graphspell/_dictionaries/"+sf, sAddPath+"grammalecte/graphspell/_dictionaries/"+sf)
    for sf in os.listdir(spLangPack):
        if not os.path.isdir(spLangPack+"/"+sf):
            hZip.write(spLangPack+"/"+sf, sAddPath+spLangPack+"/"+sf)


def create (sLang, xConfig, bInstallOXT, bJavaScript, bUseCache):
    "make Grammalecte for project <sLang>"
    oNow = datetime.datetime.now()
    print("============== MAKE GRAMMALECTE [{0}] at {1.hour:>2} h {1.minute:>2} min {1.second:>2} s ==============".format(sLang, oNow))

    #### READ CONFIGURATION
    print("> read configuration...")
    spLang = "gc_lang/" + sLang

    dVars = xConfig._sections['args']
    dVars['locales'] = dVars["locales"].replace("_", "-")
    dVars['loc'] = str(dict([ [s, [s[0:2], s[3:5], ""]] for s in dVars["locales"].split(" ") ]))

    ## COMPILE RULES
    dResult = compile_rules.make(spLang, dVars['lang'], bUseCache)
    dVars.update(dResult)

    ## READ GRAMMAR CHECKER PLUGINS
    print("PYTHON:")
    print("+ Plugins: ", end="")
    sCodePlugins = ""
    for sf in os.listdir(spLang+"/modules"):
371
372
373
374
375
376
377

378
379
380
381
382
383
384


def main ():
    "build Grammalecte with requested options"
    print("Python: " + sys.version)
    xParser = argparse.ArgumentParser()
    xParser.add_argument("lang", type=str, nargs='+', help="lang project to generate (name of folder in /lang)")

    xParser.add_argument("-b", "--build_data", help="launch build_data.py (part 1 and 2)", action="store_true")
    xParser.add_argument("-bb", "--build_data_before", help="launch build_data.py (only part 1: before dictionary building)", action="store_true")
    xParser.add_argument("-ba", "--build_data_after", help="launch build_data.py (only part 2: before dictionary building)", action="store_true")
    xParser.add_argument("-d", "--dict", help="generate FSA dictionary", action="store_true")
    xParser.add_argument("-t", "--tests", help="run unit tests", action="store_true")
    xParser.add_argument("-p", "--perf", help="run performance tests", action="store_true")
    xParser.add_argument("-pm", "--perf_memo", help="run performance tests and store results in perf_memo.txt", action="store_true")







>







371
372
373
374
375
376
377
378
379
380
381
382
383
384
385


def main ():
    "build Grammalecte with requested options"
    print("Python: " + sys.version)
    xParser = argparse.ArgumentParser()
    xParser.add_argument("lang", type=str, nargs='+', help="lang project to generate (name of folder in /lang)")
    xParser.add_argument("-uc", "--use_cache", help="use data cache instead of rebuilding rules", action="store_true")
    xParser.add_argument("-b", "--build_data", help="launch build_data.py (part 1 and 2)", action="store_true")
    xParser.add_argument("-bb", "--build_data_before", help="launch build_data.py (only part 1: before dictionary building)", action="store_true")
    xParser.add_argument("-ba", "--build_data_after", help="launch build_data.py (only part 2: before dictionary building)", action="store_true")
    xParser.add_argument("-d", "--dict", help="generate FSA dictionary", action="store_true")
    xParser.add_argument("-t", "--tests", help="run unit tests", action="store_true")
    xParser.add_argument("-p", "--perf", help="run performance tests", action="store_true")
    xParser.add_argument("-pm", "--perf_memo", help="run performance tests and store results in perf_memo.txt", action="store_true")
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
            if databuild and xArgs.build_data_after:
                databuild.after('gc_lang/'+sLang, dVars, xArgs.javascript)

            # copy dictionaries from Graphspell
            copyGraphspellDictionaries(dVars, xArgs.javascript, xArgs.add_extended_dictionary, xArgs.add_community_dictionary, xArgs.add_personal_dictionary)

            # make
            sVersion = create(sLang, xConfig, xArgs.install, xArgs.javascript, )

            # tests
            if xArgs.tests or xArgs.perf or xArgs.perf_memo:
                print("> Running tests")
                try:
                    tests = importlib.import_module("grammalecte."+sLang+".tests")
                    print(tests.__file__)







|







439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
            if databuild and xArgs.build_data_after:
                databuild.after('gc_lang/'+sLang, dVars, xArgs.javascript)

            # copy dictionaries from Graphspell
            copyGraphspellDictionaries(dVars, xArgs.javascript, xArgs.add_extended_dictionary, xArgs.add_community_dictionary, xArgs.add_personal_dictionary)

            # make
            sVersion = create(sLang, xConfig, xArgs.install, xArgs.javascript, xArgs.use_cache)

            # tests
            if xArgs.tests or xArgs.perf or xArgs.perf_memo:
                print("> Running tests")
                try:
                    tests = importlib.import_module("grammalecte."+sLang+".tests")
                    print(tests.__file__)