351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
|
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
|
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
|
elif sType == "personal":
spfLexSrc = dVars['lexicon_personal_src']
sfDictDst = dVars['dic_personal_filename']
sDicName = dVars['dic_personal_name']
sDescription = dVars['dic_personal_description']
lex_build.build(spfLexSrc, dVars['lang'], dVars['lang_name'], sfDictDst, bJavaScript, sDicName, sDescription, "", dVars['stemming_method'], int(dVars['fsa_method']))
def extraTest (sLang):
"test grammar checker with files in <gc_lang/xx/tests>"
if os.path.isdir(f"gc_lang/{sLang}/tests"):
grammalecte = importlib.import_module("grammalecte")
oGrammarChecker = grammalecte.GrammarChecker(sLang)
for sf in os.listdir(f"gc_lang/{sLang}/tests"):
if sf.startswith("test_") and sf.endswith(".txt"):
spf = f"gc_lang/{sLang}/tests/" + sf
with open(spf, "r", encoding="utf-8") as hSrc:
print(f"> Test text: {spf}", end="")
nLine = sum(1 for _ in hSrc)
nPercent = max(nLine // 100, 1)
hSrc.seek(0) # rewind to start of file
for i, sLine in enumerate(hSrc, 1):
if (i % nPercent == 0):
print(f"\r> Test text: {spf} ({i // nPercent} %)", end="")
aGrammErrs, aSpellErrs = oGrammarChecker.getParagraphErrors(sLine)
if aGrammErrs:
sText, _ = grammalecte.text.generateParagraph(sLine, aGrammErrs, aSpellErrs, 160)
print(f"\n# Line {i}")
print(sText)
print(f"\r> Test text: {spf} ({i // nPercent} %): {i} lines.")
else:
print(f"# Error. No folder <gc_lang/{sLang}/tests>. With option -tt, all texts named <test_*.txt> in this folder will be parsed by the grammar checker.")
def main ():
"build Grammalecte with requested options"
print("Python: " + sys.version)
if sys.version < "3.7":
print("Python 3.7+ required")
return
xParser = argparse.ArgumentParser()
xParser.add_argument("lang", type=str, nargs='+', help="lang project to generate (name of folder in /lang)")
xParser.add_argument("-uc", "--use_cache", help="use data cache instead of rebuilding rules", action="store_true")
xParser.add_argument("-frb", "--force_rebuild", help="force rebuilding rules", action="store_true")
xParser.add_argument("-b", "--build_data", help="launch build_data.py (part 1 and 2)", action="store_true")
xParser.add_argument("-bb", "--build_data_before", help="launch build_data.py (only part 1: before dictionary building)", action="store_true")
xParser.add_argument("-ba", "--build_data_after", help="launch build_data.py (only part 2: before dictionary building)", action="store_true")
xParser.add_argument("-d", "--dict", help="generate FSA dictionary", action="store_true")
xParser.add_argument("-t", "--tests", help="run unit tests", action="store_true")
xParser.add_argument("-tt", "--test_texts", help="perform gc tests on texts", action="store_true")
xParser.add_argument("-p", "--perf", help="run performance tests", action="store_true")
xParser.add_argument("-pm", "--perf_memo", help="run performance tests and store results in perf_memo.txt", action="store_true")
xParser.add_argument("-js", "--javascript", help="JavaScript build for Firefox", action="store_true")
xParser.add_argument("-acd", "--add_community_dictionary", help="add community dictionary to the build", action="store_true")
xParser.add_argument("-apd", "--add_personal_dictionary", help="add personal dictionary to the build", action="store_true")
xParser.add_argument("-fx", "--firefox", help="Launch Firefox for WebExtension testing", action="store_true")
xParser.add_argument("-fxd", "--firefox_dev", help="Launch Firefox Developper for WebExtension testing", action="store_true")
|