Grammalecte  Check-in [e31ec0bfe5]

Overview
Comment:[build] save hash of rules file in data cache, automatically use cache if rules file hasn’t been modified
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | trunk | build
Files: files | file ages | folders
SHA3-256: e31ec0bfe5d1a5ef8e5f6a21ab51fa83e2bf74c157bf7bc3593638f2a7359e61
User & Date: olr on 2020-04-18 13:30:52
Other Links: manifest | tags
Context
2020-04-18
16:14
[fr] ajustements check-in: 5ae7d109af user: olr tags: trunk, fr
13:30
[build] save hash of rules file in data cache, automatically use cache if rules file hasn’t been modified check-in: e31ec0bfe5 user: olr tags: trunk, build
12:32
[fx] gc panel: tooltip > rework appearance and access to the db check-in: b8188ff0e5 user: olr tags: trunk, fx
Changes

Modified compile_rules.py from [ed8f69534c] to [cec64f3971].

1
2
3
4
5
6
7
8
9
10

11
12
13
14
15
16
17
"""
Grammalecte: compile rules
"""

import re
import os
import traceback
import json
import colorsys
import time


import compile_rules_js_convert as jsconv
import compile_rules_graph as crg


dDEFINITIONS = {}
dDECLENSIONS = {}










>







1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
"""
Grammalecte: compile rules
"""

import re
import os
import traceback
import json
import colorsys
import time
import hashlib

import compile_rules_js_convert as jsconv
import compile_rules_graph as crg


dDEFINITIONS = {}
dDECLENSIONS = {}
467
468
469
470
471
472
473


474
475
476
477
478



479
480
481
482
483
484
485
486
487
488









489
490
491

492
493
494
495
496
497
498
499
500
501
502
503
504
505
    print("  {:>6}:  {}".format(nLine, "  " * nLevel + sComment))


def make (spLang, sLang, bUseCache=False):
    "compile rules, returns a dictionary of values"
    # for clarity purpose, don’t create any file here



    if bUseCache and os.path.isfile("_build/data_cache.json"):
        print("> don’t rebuild rules, use cache...")
        sJSON = open("_build/data_cache.json", "r", encoding="utf-8").read()
        dCacheVars = json.loads(sJSON)
        print("  build made at: " + time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(dCacheVars.get("fBuildTime", 0))))



        return dCacheVars

    fBuildTime = time.time()

    print("> read rules file...")
    try:
        lRules = open(spLang + "/rules.grx", 'r', encoding="utf-8").readlines()
    except OSError:
        print("Error. Rules file in project [" + sLang + "] not found.")
        exit()










    # removing comments, zeroing empty lines, creating definitions, storing tests, merging rule lines
    print("  parsing rules...")

    lRuleLine = []
    lTest = []
    lOpt = []
    bGraph = False
    lGraphRule = []

    for i, sLine in enumerate(lRules, 1):
        if sLine.startswith('#END'):
            # arbitrary end
            printBookmark(0, "BREAK BY #END", i)
            break
        elif sLine.startswith("#"):
            # comment
            pass







>
>
|
|


|
>
>
>
|
<
<



|



>
>
>
>
>
>
>
>
>



>






|







468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485


486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
    print("  {:>6}:  {}".format(nLine, "  " * nLevel + sComment))


def make (spLang, sLang, bUseCache=False):
    "compile rules, returns a dictionary of values"
    # for clarity purpose, don’t create any file here

    dCacheVars = None

    if os.path.isfile("_build/data_cache.json"):
        print("> data cache found")
        sJSON = open("_build/data_cache.json", "r", encoding="utf-8").read()
        dCacheVars = json.loads(sJSON)
        sBuildDate = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(dCacheVars.get("fBuildTime", 0)))
        if bUseCache:
            print("> use cache (no rebuild asked)")
            print("  build made at: " + sBuildDate)
            return dCacheVars



    print("> read rules file...")
    try:
        sFileContent = open(spLang + "/rules.grx", 'r', encoding="utf-8").read()
    except OSError:
        print("Error. Rules file in project [" + sLang + "] not found.")
        exit()

    xHasher = hashlib.new("sha3_512")
    xHasher.update(sFileContent.encode("utf-8"))
    sFileHash = xHasher.hexdigest()

    if dCacheVars and sFileHash == dCacheVars.get("sFileHash", ""):
        print("> cache hash identical to file hash, use cache")
        print("  build made at: " + sBuildDate)
        return dCacheVars

    # removing comments, zeroing empty lines, creating definitions, storing tests, merging rule lines
    print("  parsing rules...")
    fBuildTime = time.time()
    lRuleLine = []
    lTest = []
    lOpt = []
    bGraph = False
    lGraphRule = []

    for i, sLine in enumerate(sFileContent.split("\n"), 1):
        if sLine.startswith('#END'):
            # arbitrary end
            printBookmark(0, "BREAK BY #END", i)
            break
        elif sLine.startswith("#"):
            # comment
            pass
624
625
626
627
628
629
630

631
632
633
634
635
636
637
        sJSCallables += "        return " + jsconv.py2js(sReturn) + ";\n"
        sJSCallables += "    },\n"

    displayStats(lParagraphRules, lSentenceRules)

    dVars = {
        "fBuildTime": fBuildTime,

        "callables": sPyCallables,
        "callablesJS": sJSCallables,
        "gctests": sGCTests,
        "gctestsJS": sGCTestsJS,
        "paragraph_rules": mergeRulesByOption(lParagraphRules),
        "sentence_rules": mergeRulesByOption(lSentenceRules),
        "paragraph_rules_JS": jsconv.writeRulesToJSArray(mergeRulesByOption(lParagraphRulesJS)),







>







638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
        sJSCallables += "        return " + jsconv.py2js(sReturn) + ";\n"
        sJSCallables += "    },\n"

    displayStats(lParagraphRules, lSentenceRules)

    dVars = {
        "fBuildTime": fBuildTime,
        "sFileHash": sFileHash,
        "callables": sPyCallables,
        "callablesJS": sJSCallables,
        "gctests": sGCTests,
        "gctestsJS": sGCTestsJS,
        "paragraph_rules": mergeRulesByOption(lParagraphRules),
        "sentence_rules": mergeRulesByOption(lSentenceRules),
        "paragraph_rules_JS": jsconv.writeRulesToJSArray(mergeRulesByOption(lParagraphRulesJS)),

Modified compile_rules_graph.py from [b7d9668574] to [07d8ba5d53].

453
454
455
456
457
458
459
460
461
462
463
464
465
466
467


def make (lRule, sLang, dDef, dDecl, dOptPriority):
    "compile rules, returns a dictionary of values"
    # for clarity purpose, don’t create any file here

    # removing comments, zeroing empty lines, creating definitions, storing tests, merging rule lines
    print("  parsing rules...")
    lTokenLine = []
    lActions = []
    bActionBlock = False
    nPriority = -1
    dAllGraph = {}
    dGraphCode = {}
    sGraphName = ""







|







453
454
455
456
457
458
459
460
461
462
463
464
465
466
467


def make (lRule, sLang, dDef, dDecl, dOptPriority):
    "compile rules, returns a dictionary of values"
    # for clarity purpose, don’t create any file here

    # removing comments, zeroing empty lines, creating definitions, storing tests, merging rule lines
    print("  parsing graph rules...")
    lTokenLine = []
    lActions = []
    bActionBlock = False
    nPriority = -1
    dAllGraph = {}
    dGraphCode = {}
    sGraphName = ""
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
            lActions.clear()
            iActionBlock += 1
        else:
            print("Unknown line at:", iLine)
            print(sLine)

    # processing rules
    print("  processing rules...")
    initProcessPoolExecutor()
    fStartTimer = time.time()
    # build graph
    lResult = []
    nRule = 0
    for sGraphName, lRuleLine in dAllGraph.items():
        nRule += len(lRuleLine)







|







541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
            lActions.clear()
            iActionBlock += 1
        else:
            print("Unknown line at:", iLine)
            print(sLine)

    # processing rules
    print("  processing graph rules...")
    initProcessPoolExecutor()
    fStartTimer = time.time()
    # build graph
    lResult = []
    nRule = 0
    for sGraphName, lRuleLine in dAllGraph.items():
        nRule += len(lRuleLine)