Grammalecte  Diff

Differences From Artifact [03f1f86c56]:

To Artifact [a2a625b112]:


13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
import configparser
import datetime
import argparse
import importlib
import unittest
import json
import platform

from distutils import dir_util, file_util

#import dialog_bundled
import compile_rules
import helpers
import lex_build


sWarningMessage = "The content of this folder is generated by code and replaced at each build.\n"








|
<

|







13
14
15
16
17
18
19
20

21
22
23
24
25
26
27
28
29
import configparser
import datetime
import argparse
import importlib
import unittest
import json
import platform
import shutil


#im2port dialog_bundled
import compile_rules
import helpers
import lex_build


sWarningMessage = "The content of this folder is generated by code and replaced at each build.\n"

204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
    print()
    dVars["plugins"] = sCodePlugins

    ## COPY GC_CORE COMMON FILES
    for sf in os.listdir("gc_core/py"):
        if not os.path.isdir("gc_core/py/"+sf):
            helpers.copyAndFileTemplate("gc_core/py/"+sf, "grammalecte/"+sf, dVars)
    file_util.copy_file("3rd/bottle.py", "grammalecte/bottle.py")
    open("grammalecte/WARNING.txt", "w", encoding="utf-8", newline="\n").write(sWarningMessage)

    ## CREATE GRAMMAR CHECKER PACKAGE
    spLangPack = "grammalecte/"+sLang
    helpers.createCleanFolder(spLangPack)
    for sf in os.listdir("gc_core/py/lang_core"):
        if not os.path.isdir("gc_core/py/lang_core/"+sf):







|







203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
    print()
    dVars["plugins"] = sCodePlugins

    ## COPY GC_CORE COMMON FILES
    for sf in os.listdir("gc_core/py"):
        if not os.path.isdir("gc_core/py/"+sf):
            helpers.copyAndFileTemplate("gc_core/py/"+sf, "grammalecte/"+sf, dVars)
    shutil.copy2("3rd/bottle.py", "grammalecte/bottle.py")
    open("grammalecte/WARNING.txt", "w", encoding="utf-8", newline="\n").write(sWarningMessage)

    ## CREATE GRAMMAR CHECKER PACKAGE
    spLangPack = "grammalecte/"+sLang
    helpers.createCleanFolder(spLangPack)
    for sf in os.listdir("gc_core/py/lang_core"):
        if not os.path.isdir("gc_core/py/lang_core/"+sf):
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
    return dVars['version']


def copyGraphspellCore (bJavaScript=False):
    "copy Graphspell package in Grammalecte package"
    print("> Copy Graphspell package in Grammalecte package")
    helpers.createCleanFolder("grammalecte/graphspell")
    dir_util.mkpath("grammalecte/graphspell/_dictionaries")
    for sf in os.listdir("graphspell"):
        if not os.path.isdir("graphspell/"+sf):
            file_util.copy_file("graphspell/"+sf, "grammalecte/graphspell")
    if bJavaScript:
        helpers.createCleanFolder("grammalecte-js/graphspell")
        dir_util.mkpath("grammalecte-js/graphspell/_dictionaries")
        dVars = {}
        for sf in os.listdir("js_extension"):
            dVars[sf[:-3]] = open("js_extension/"+sf, "r", encoding="utf-8").read()
        for sf in os.listdir("graphspell-js"):
            if not os.path.isdir("graphspell-js/"+sf):
                file_util.copy_file("graphspell-js/"+sf, "grammalecte-js/graphspell")
                helpers.copyAndFileTemplate("graphspell-js/"+sf, "grammalecte-js/graphspell/"+sf, dVars)


def copyGraphspellDictionaries (dVars, bJavaScript=False, bCommunityDict=False, bPersonalDict=False):
    "copy requested Graphspell dictionaries in Grammalecte package"
    print("> Copy requested Graphspell dictionaries in Grammalecte package")
    dVars["dic_main_filename_py"] = ""







|


|


|





|







281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
    return dVars['version']


def copyGraphspellCore (bJavaScript=False):
    "copy Graphspell package in Grammalecte package"
    print("> Copy Graphspell package in Grammalecte package")
    helpers.createCleanFolder("grammalecte/graphspell")
    helpers.createFolder("grammalecte/graphspell/_dictionaries")
    for sf in os.listdir("graphspell"):
        if not os.path.isdir("graphspell/"+sf):
            shutil.copy2("graphspell/"+sf, "grammalecte/graphspell")
    if bJavaScript:
        helpers.createCleanFolder("grammalecte-js/graphspell")
        helpers.createFolder("grammalecte-js/graphspell/_dictionaries")
        dVars = {}
        for sf in os.listdir("js_extension"):
            dVars[sf[:-3]] = open("js_extension/"+sf, "r", encoding="utf-8").read()
        for sf in os.listdir("graphspell-js"):
            if not os.path.isdir("graphspell-js/"+sf):
                shutil.copy2("graphspell-js/"+sf, "grammalecte-js/graphspell")
                helpers.copyAndFileTemplate("graphspell-js/"+sf, "grammalecte-js/graphspell/"+sf, dVars)


def copyGraphspellDictionaries (dVars, bJavaScript=False, bCommunityDict=False, bPersonalDict=False):
    "copy requested Graphspell dictionaries in Grammalecte package"
    print("> Copy requested Graphspell dictionaries in Grammalecte package")
    dVars["dic_main_filename_py"] = ""
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
        lDict.append(("personal", dVars['dic_personal_filename']))
    for sType, sFileName in lDict:
        spfPyDic = f"graphspell/_dictionaries/{sFileName}.json"
        spfJSDic = f"graphspell-js/_dictionaries/{sFileName}.json"
        if not os.path.isfile(spfPyDic) or (bJavaScript and not os.path.isfile(spfJSDic)):
            buildDictionary(dVars, sType, bJavaScript)
        print("  +", spfPyDic)
        file_util.copy_file(spfPyDic, "grammalecte/graphspell/_dictionaries")
        dVars['dic_'+sType+'_filename_py'] = sFileName + '.json'
        if bJavaScript:
            print("  +", spfJSDic)
            file_util.copy_file(spfJSDic, "grammalecte-js/graphspell/_dictionaries")
            dVars['dic_'+sType+'_filename_js'] = sFileName + '.json'
    dVars['dic_main_filename_py'] = dVars['dic_default_filename_py'] + ".json"
    dVars['dic_main_filename_js'] = dVars['dic_default_filename_js'] + ".json"


def buildDictionary (dVars, sType, bJavaScript=False):
    "build binary dictionary for Graphspell from lexicons"







|



|







317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
        lDict.append(("personal", dVars['dic_personal_filename']))
    for sType, sFileName in lDict:
        spfPyDic = f"graphspell/_dictionaries/{sFileName}.json"
        spfJSDic = f"graphspell-js/_dictionaries/{sFileName}.json"
        if not os.path.isfile(spfPyDic) or (bJavaScript and not os.path.isfile(spfJSDic)):
            buildDictionary(dVars, sType, bJavaScript)
        print("  +", spfPyDic)
        shutil.copy2(spfPyDic, "grammalecte/graphspell/_dictionaries")
        dVars['dic_'+sType+'_filename_py'] = sFileName + '.json'
        if bJavaScript:
            print("  +", spfJSDic)
            shutil.copy2(spfJSDic, "grammalecte-js/graphspell/_dictionaries")
            dVars['dic_'+sType+'_filename_js'] = sFileName + '.json'
    dVars['dic_main_filename_py'] = dVars['dic_default_filename_py'] + ".json"
    dVars['dic_main_filename_js'] = dVars['dic_default_filename_js'] + ".json"


def buildDictionary (dVars, sType, bJavaScript=False):
    "build binary dictionary for Graphspell from lexicons"
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
        return
    xParser = argparse.ArgumentParser()
    xParser.add_argument("lang", type=str, nargs='+', help="lang project to generate (name of folder in /lang)")
    xParser.add_argument("-uc", "--use_cache", help="use data cache instead of rebuilding rules", action="store_true")
    xParser.add_argument("-frb", "--force_rebuild", help="force rebuilding rules", action="store_true")
    xParser.add_argument("-b", "--build_data", help="launch build_data.py (part 1 and 2)", action="store_true")
    xParser.add_argument("-bb", "--build_data_before", help="launch build_data.py (only part 1: before dictionary building)", action="store_true")
    xParser.add_argument("-ba", "--build_data_after", help="launch build_data.py (only part 2: before dictionary building)", action="store_true")
    xParser.add_argument("-d", "--dict", help="generate FSA dictionary", action="store_true")
    xParser.add_argument("-t", "--tests", help="run unit tests", action="store_true")
    xParser.add_argument("-tt", "--test_texts", help="perform gc tests on texts", action="store_true")
    xParser.add_argument("-p", "--perf", help="run performance tests", action="store_true")
    xParser.add_argument("-pm", "--perf_memo", help="run performance tests and store results in perf_memo.txt", action="store_true")
    xParser.add_argument("-js", "--javascript", help="JavaScript build for Firefox", action="store_true")
    xParser.add_argument("-acd", "--add_community_dictionary", help="add community dictionary to the build", action="store_true")







|







392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
        return
    xParser = argparse.ArgumentParser()
    xParser.add_argument("lang", type=str, nargs='+', help="lang project to generate (name of folder in /lang)")
    xParser.add_argument("-uc", "--use_cache", help="use data cache instead of rebuilding rules", action="store_true")
    xParser.add_argument("-frb", "--force_rebuild", help="force rebuilding rules", action="store_true")
    xParser.add_argument("-b", "--build_data", help="launch build_data.py (part 1 and 2)", action="store_true")
    xParser.add_argument("-bb", "--build_data_before", help="launch build_data.py (only part 1: before dictionary building)", action="store_true")
    xParser.add_argument("-ba", "--build_data_after", help="launch build_data.py (only part 2: after dictionary building)", action="store_true")
    xParser.add_argument("-d", "--dict", help="generate FSA dictionary", action="store_true")
    xParser.add_argument("-t", "--tests", help="run unit tests", action="store_true")
    xParser.add_argument("-tt", "--test_texts", help="perform gc tests on texts", action="store_true")
    xParser.add_argument("-p", "--perf", help="run performance tests", action="store_true")
    xParser.add_argument("-pm", "--perf_memo", help="run performance tests and store results in perf_memo.txt", action="store_true")
    xParser.add_argument("-js", "--javascript", help="JavaScript build for Firefox", action="store_true")
    xParser.add_argument("-acd", "--add_community_dictionary", help="add community dictionary to the build", action="store_true")
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
    oNow = datetime.datetime.now()
    print("============== MAKE GRAMMALECTE at {0.hour:>2} h {0.minute:>2} min {0.second:>2} s ==============".format(oNow))

    if xArgs.build_data:
        xArgs.build_data_before = True
        xArgs.build_data_after = True

    dir_util.mkpath("_build")
    dir_util.mkpath("grammalecte")
    if xArgs.javascript:
        dir_util.mkpath("grammalecte-js")

    copyGraphspellCore(xArgs.javascript)

    for sLang in xArgs.lang:
        if os.path.exists("gc_lang/"+sLang) and os.path.isdir("gc_lang/"+sLang):
            xConfig = getConfig(sLang)
            dVars = xConfig._sections['args']







|
|

|







417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
    oNow = datetime.datetime.now()
    print("============== MAKE GRAMMALECTE at {0.hour:>2} h {0.minute:>2} min {0.second:>2} s ==============".format(oNow))

    if xArgs.build_data:
        xArgs.build_data_before = True
        xArgs.build_data_after = True

    helpers.createFolder("_build")
    helpers.createFolder("grammalecte")
    if xArgs.javascript:
        helpers.createFolder("grammalecte-js")

    copyGraphspellCore(xArgs.javascript)

    for sLang in xArgs.lang:
        if os.path.exists("gc_lang/"+sLang) and os.path.isdir("gc_lang/"+sLang):
            xConfig = getConfig(sLang)
            dVars = xConfig._sections['args']