Grammalecte  Check-in [974fc74edb]

Overview
Comment:[build] extra-tests
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | trunk | build
Files: files | file ages | folders
SHA3-256: 974fc74edb3ab373e8cbeef8a386a7edce790323789a7e22a1cd32ec67b7eab0
User & Date: olr on 2021-02-18 17:07:47
Other Links: manifest | tags
Context
2021-02-20
11:38
[fr] faux positifs et ajustements check-in: 12eb1d89a3 user: olr tags: trunk, fr
2021-02-18
17:07
[build] extra-tests check-in: 974fc74edb user: olr tags: trunk, build
12:33
[core][fr] update suggestion tests check-in: 475e6bc674 user: olr tags: trunk, fr, core
Changes

Modified make.py from [a76be310e9] to [b370bc0e26].

351
352
353
354
355
356
357


























358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373

374
375
376
377
378
379
380
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407







+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
















+







        elif sType == "personal":
            spfLexSrc = dVars['lexicon_personal_src']
            sfDictDst = dVars['dic_personal_filename']
            sDicName = dVars['dic_personal_name']
            sDescription = dVars['dic_personal_description']
        lex_build.build(spfLexSrc, dVars['lang'], dVars['lang_name'], sfDictDst, bJavaScript, sDicName, sDescription, "", dVars['stemming_method'], int(dVars['fsa_method']))


def extraTest (sLang):
    "test grammar checker with files in <gc_lang/xx/tests>"
    if os.path.isdir(f"gc_lang/{sLang}/tests"):
        grammalecte = importlib.import_module("grammalecte")
        oGrammarChecker = grammalecte.GrammarChecker(sLang)
        for sf in os.listdir(f"gc_lang/{sLang}/tests"):
            if sf.startswith("test_") and sf.endswith(".txt"):
                spf = f"gc_lang/{sLang}/tests/" + sf
                with open(spf, "r", encoding="utf-8") as hSrc:
                    print(f"> Test text: {spf}", end="")
                    nLine = sum(1 for _ in hSrc)
                    nPercent = max(nLine // 100, 1)
                    hSrc.seek(0) # rewind to start of file
                    for i, sLine in enumerate(hSrc, 1):
                        if (i % nPercent == 0):
                            print(f"\r> Test text: {spf} ({i // nPercent} %)", end="")
                        aGrammErrs, aSpellErrs = oGrammarChecker.getParagraphErrors(sLine)
                        if aGrammErrs:
                            sText, _ = grammalecte.text.generateParagraph(sLine, aGrammErrs, aSpellErrs, 160)
                            print(f"\n# Line {i}")
                            print(sText)
                    print(f"\r> Test text: {spf} ({i // nPercent} %): {i} lines.")
    else:
        print(f"# Error. No folder <gc_lang/{sLang}/tests>. With option -tt, all texts named <test_*.txt> in this folder will be parsed by the grammar checker.")


def main ():
    "build Grammalecte with requested options"
    print("Python: " + sys.version)
    if sys.version < "3.7":
        print("Python 3.7+ required")
        return
    xParser = argparse.ArgumentParser()
    xParser.add_argument("lang", type=str, nargs='+', help="lang project to generate (name of folder in /lang)")
    xParser.add_argument("-uc", "--use_cache", help="use data cache instead of rebuilding rules", action="store_true")
    xParser.add_argument("-frb", "--force_rebuild", help="force rebuilding rules", action="store_true")
    xParser.add_argument("-b", "--build_data", help="launch build_data.py (part 1 and 2)", action="store_true")
    xParser.add_argument("-bb", "--build_data_before", help="launch build_data.py (only part 1: before dictionary building)", action="store_true")
    xParser.add_argument("-ba", "--build_data_after", help="launch build_data.py (only part 2: before dictionary building)", action="store_true")
    xParser.add_argument("-d", "--dict", help="generate FSA dictionary", action="store_true")
    xParser.add_argument("-t", "--tests", help="run unit tests", action="store_true")
    xParser.add_argument("-tt", "--test_texts", help="perform gc tests on texts", action="store_true")
    xParser.add_argument("-p", "--perf", help="run performance tests", action="store_true")
    xParser.add_argument("-pm", "--perf_memo", help="run performance tests and store results in perf_memo.txt", action="store_true")
    xParser.add_argument("-js", "--javascript", help="JavaScript build for Firefox", action="store_true")
    xParser.add_argument("-acd", "--add_community_dictionary", help="add community dictionary to the build", action="store_true")
    xParser.add_argument("-apd", "--add_personal_dictionary", help="add personal dictionary to the build", action="store_true")
    xParser.add_argument("-fx", "--firefox", help="Launch Firefox for WebExtension testing", action="store_true")
    xParser.add_argument("-fxd", "--firefox_dev", help="Launch Firefox Developper for WebExtension testing", action="store_true")
451
452
453
454
455
456
457



458
459
460
461
462
463
464
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494







+
+
+







                try:
                    tests = importlib.import_module(f"grammalecte.{sLang}.tests_core")
                except ImportError:
                    print(f"# Error. Import failed: grammalecte.{sLang}.tests_core")
                else:
                    sResultFile = f"gc_lang/{sLang}/perf_memo.txt"  if xArgs.perf_memo  else ""
                    tests.perf(sVersion, sResultFile)

            if xArgs.test_texts:
                extraTest(sLang)

            # JavaScript linter
            if xArgs.lint_web_ext:
                with helpers.CD("_build/webext/"+sLang):
                    os.system(r'web-ext lint -o text')

            # Firefox