Overview
| Comment: | [build][fr] drop the obsolete distutils library and fix the thesaurus builder |
|---|---|
| Downloads: | Tarball | ZIP archive | SQL archive |
| Timelines: | family | ancestors | trunk | fr | build |
| Files: | files | file ages | folders |
| SHA3-256: |
652870dcf1f26dbc9ed9ce20f6d56930 |
| User & Date: | olr on 2025-11-24 16:56:04 |
| Other Links: | manifest | tags |
Context
|
2025-11-24
| ||
| 16:56 | [build][fr] drop the obsolete distutils library and fix the thesaurus builder Leaf check-in: 652870dcf1 user: olr tags: trunk, fr, build | |
| 15:13 | [fr] Thésaurus: corrections diverses check-in: e5e387416d user: olr tags: trunk, fr | |
Changes
Modified gc_lang/fr/build.py from [fab4b52b52] to [b75fae61f7].
1 2 3 4 5 6 7 8 | # Builder for French language import os import platform import zipfile import shutil import json import traceback | < | | | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
# Builder for French language
import os
import platform
import zipfile
import shutil
import json
import traceback
import helpers
def build (sLang, dVars):
"complementary build launched from make.py"
dVars['webextOptionsHTML'] = _createOptionsForWebExtension(dVars)
createWebExtension(sLang, dVars)
convertWebExtensionForChrome(sLang, dVars)
createMailExtension(sLang, dVars)
createNodeJSPackage(sLang)
def createWebExtension (sLang, dVars):
"create Web-extension"
print("> Building WebExtension for Firefox")
helpers.createCleanFolder("_build/webext/"+sLang)
shutil.copytree("gc_lang/"+sLang+"/webext/", "_build/webext/"+sLang, dirs_exist_ok=True)
shutil.copytree("grammalecte-js", "_build/webext/"+sLang+"/grammalecte", dirs_exist_ok=True)
helpers.copyAndFileTemplate("_build/webext/"+sLang+"/manifest.json", "_build/webext/"+sLang+"/manifest.json", dVars)
helpers.copyAndFileTemplate("_build/webext/"+sLang+"/panel/main.html", "_build/webext/"+sLang+"/panel/main.html", dVars)
with helpers.CD("_build/webext/"+sLang):
os.system("web-ext build")
# Copy Firefox zip extension to _build
helpers.moveFolderContent("_build/webext/"+sLang+"/web-ext-artifacts", "_build", "firefox-", True)
|
| ︙ | ︙ | |||
85 86 87 88 89 90 91 |
helpers.addFolderToZipAndFileFile(hZip, f"gc_lang/{sLang}/webext/panel", "panel", dVars, True)
hZip.close()
# Note about copying Thunderbird extension directly into the profile:
# In Options > Configuration editor (about:config), deactivate option <xpinstall.whitelist.required>
# If <manifest.json> is changed, you must reinstall the extension manually
spExtension = dVars['win_tb_debug_extension_path'] if platform.system() == "Windows" else dVars['linux_tb_debug_extension_path']
if os.path.isdir(spExtension):
| | | | | | 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 |
helpers.addFolderToZipAndFileFile(hZip, f"gc_lang/{sLang}/webext/panel", "panel", dVars, True)
hZip.close()
# Note about copying Thunderbird extension directly into the profile:
# In Options > Configuration editor (about:config), deactivate option <xpinstall.whitelist.required>
# If <manifest.json> is changed, you must reinstall the extension manually
spExtension = dVars['win_tb_debug_extension_path'] if platform.system() == "Windows" else dVars['linux_tb_debug_extension_path']
if os.path.isdir(spExtension):
shutil.copy2(spfZip, f"{spExtension}/{dVars['tb_identifier']}.xpi") # Filename for TB is just <identifier.xpi>
print(f"Thunderbird extension copied in <{spExtension}>")
spExtension = dVars['win_tb_beta_extension_path'] if platform.system() == "Windows" else dVars['linux_tb_beta_extension_path']
if os.path.isdir(spExtension):
shutil.copy2(spfZip, f"{spExtension}/{dVars['tb_identifier']}.xpi") # Filename for TB is just <identifier.xpi>
print(f"Thunderbird extension copied in <{spExtension}>")
def _copyGrammalecteJSPackageInZipFile (hZip, sLang, sAddPath=""):
for sf in os.listdir("grammalecte-js"):
if not os.path.isdir("grammalecte-js/"+sf):
hZip.write("grammalecte-js/"+sf, sAddPath+"grammalecte/"+sf)
for sf in os.listdir("grammalecte-js/graphspell"):
if not os.path.isdir("grammalecte-js/graphspell/"+sf):
hZip.write("grammalecte-js/graphspell/"+sf, sAddPath+"grammalecte/graphspell/"+sf)
for sf in os.listdir("grammalecte-js/graphspell/_dictionaries"):
if not os.path.isdir("grammalecte-js/graphspell/_dictionaries/"+sf):
hZip.write("grammalecte-js/graphspell/_dictionaries/"+sf, sAddPath+"grammalecte/graphspell/_dictionaries/"+sf)
for sf in os.listdir("grammalecte-js/"+sLang):
if not os.path.isdir("grammalecte-js/"+sLang+"/"+sf):
hZip.write("grammalecte-js/"+sLang+"/"+sf, sAddPath+"grammalecte/"+sLang+"/"+sf)
def createNodeJSPackage (sLang):
helpers.createCleanFolder("_build/nodejs/"+sLang)
shutil.copytree("gc_lang/"+sLang+"/nodejs/", "_build/nodejs/"+sLang, dirs_exist_ok=True)
shutil.copytree("grammalecte-js", "_build/nodejs/"+sLang+"/core/grammalecte", dirs_exist_ok=True)
|
Modified gc_lang/fr/dictionnaire/genfrdic.py from [23c6a753b0] to [7c1521954d].
| ︙ | ︙ | |||
9 10 11 12 13 14 15 16 | import sys import re import collections import zipfile import math import argparse import tags from enum import Enum | > < < < | 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 | import sys import re import collections import zipfile import math import argparse import tags import shutil from enum import Enum from string import Template import tags import metagraphe import metaphone2 import thes_build |
| ︙ | ︙ | |||
193 194 195 196 197 198 199 200 201 202 203 204 205 206 |
if os.path.isfile(spf):
with open(spf, "r", encoding="utf-8") as hSrc:
for sLine in hSrc:
yield sLine
else:
print("# Error: file not found.")
class Dictionnaire:
def __init__ (self, version, name):
# Dictionary
self.sName = name
self.lEntry = []
| > > > > > | 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 |
if os.path.isfile(spf):
with open(spf, "r", encoding="utf-8") as hSrc:
for sLine in hSrc:
yield sLine
else:
print("# Error: file not found.")
def createFolder (sp):
"make a folder if it doesn’t exist; don’t change anything if it exists"
if not os.path.exists(sp):
os.mkdir(sp)
class Dictionnaire:
def __init__ (self, version, name):
# Dictionary
self.sName = name
self.lEntry = []
|
| ︙ | ︙ | |||
523 524 525 526 527 528 529 |
hDst.write(Flexion.simpleHeader())
for oFlex in self.lFlexions:
hDst.write(oFlex.getGrammarCheckerRepr())
def createFiles (self, spDst, lDictVars, nMode, bSimplified):
sDicName = PREFIX_DICT_PATH + self.sVersion
spDic = spDst + '/' + sDicName
| | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 |
hDst.write(Flexion.simpleHeader())
for oFlex in self.lFlexions:
hDst.write(oFlex.getGrammarCheckerRepr())
def createFiles (self, spDst, lDictVars, nMode, bSimplified):
sDicName = PREFIX_DICT_PATH + self.sVersion
spDic = spDst + '/' + sDicName
createFolder(spDic)
for dVars in lDictVars:
# template vars
dVars['version'] = self.sVersion
# Dictionaries files (.dic) (.aff)
self.writeAffixes(spDic, dVars, nMode, bSimplified)
self.writeDictionary(spDic, dVars, nMode, bSimplified)
copyTemplate('orthographe', spDic, 'README_dict_fr.txt', dVars)
createZipFiles(spDic, spDst, sDicName + '.zip')
def createLibreOfficeExtension (self, spBuild, dTplVars, lDictVars, sThesVer, spDestGL=""):
# LibreOffice extension
echo(" * Dictionnaire >> extension pour LibreOffice")
dTplVars['version'] = self.sVersion
sExtensionName = EXT_PREFIX_OOO + self.sVersion
spExt = spBuild + '/' + sExtensionName
createFolder(spExt+'/META-INF')
createFolder(spExt+'/ui')
createFolder(spExt+'/dictionaries')
createFolder(spExt+'/pythonpath')
shutil.copy2('_templates/ooo/manifest.xml', spExt+'/META-INF')
shutil.copy2('_templates/ooo/DictionarySwitcher.py', spExt)
shutil.copy2('_templates/ooo/ds_strings.py', spExt+'/pythonpath')
shutil.copy2('_templates/ooo/addons.xcu', spExt+'/ui')
shutil.copy2('_templates/ooo/french_flag.png', spExt)
shutil.copy2('_templates/ooo/french_flag_16.bmp', spExt+'/ui')
copyTemplate('_templates/ooo', spExt, 'description.xml', dTplVars)
copyTemplate('_templates/ooo', spExt, 'dictionaries.xcu', dTplVars)
#shutil.copy2('_templates/ooo/dictionaries.xcu.tpl.xml', spExt)
copyTemplate('_templates/ooo', spExt, 'package-description.txt', dTplVars)
for dVars in lDictVars:
dicPath = spBuild + '/' + PREFIX_DICT_PATH + self.sVersion
shutil.copy2(dicPath+'/'+dVars['asciiName']+'.dic', spExt+'/dictionaries/'+dVars['asciiName']+'.dic')
shutil.copy2(dicPath+'/'+dVars['asciiName']+'.aff', spExt+'/dictionaries/'+dVars['asciiName']+'.aff')
copyTemplate('orthographe', spExt+'/dictionaries', 'README_dict_fr.txt', dTplVars)
# hyphenation
shutil.copy2('césures/hyph_fr.dic', spExt+'/dictionaries')
shutil.copy2('césures/hyph_fr.iso8859-1.dic', spExt+'/dictionaries')
shutil.copy2('césures/frhyph.tex', spExt+'/dictionaries')
shutil.copy2('césures/hyph-fr.tex', spExt+'/dictionaries')
shutil.copy2('césures/README_hyph_fr-3.0.txt', spExt+'/dictionaries')
shutil.copy2('césures/README_hyph_fr-2.9.txt', spExt+'/dictionaries')
# thesaurus
shutil.copy2(spBuild+"/thesaurus-v"+sThesVer+'/thes_fr.dat', spExt+"/dictionaries")
shutil.copy2(spBuild+"/thesaurus-v"+sThesVer+'/thes_fr.idx', spExt+"/dictionaries")
shutil.copy2(spBuild+"/thesaurus-v"+sThesVer+'/README_thes_fr.txt', spExt+"/dictionaries")
# zip
createZipFiles(spExt, spBuild, sExtensionName + '.oxt')
# copy to Grammalecte Project
if spDestGL:
echo(" Dictionnaires Hunspell copiés dans Grammalecte pour LibreOffice...")
shutil.copytree(spExt+'/dictionaries', spDestGL, dirs_exist_ok=True)
def createMozillaExtensions (self, spBuild, dTplVars, lDictVars, spDestGL=""):
# Mozilla extension 1
echo(" * Dictionnaire >> extension pour Mozilla")
dTplVars['version'] = self.sVersion
sExtensionName = EXT_PREFIX_MOZ + self.sVersion
spExt = spBuild + '/' + sExtensionName
createFolder(spExt+'/dictionaries')
copyTemplate('_templates/moz', spExt, 'manifest.json', dTplVars)
spDict = spBuild + '/' + PREFIX_DICT_PATH + self.sVersion
shutil.copy2(spDict+'/fr-classique.dic', spExt+'/dictionaries/fr-classic.dic')
shutil.copy2(spDict+'/fr-classique.aff', spExt+'/dictionaries/fr-classic.aff')
copyTemplate('orthographe', spExt, 'README_dict_fr.txt', dTplVars)
createZipFiles(spExt, spBuild, sExtensionName + '.xpi')
# Grammalecte
if spDestGL:
echo(" Dictionnaires Hunspell copiés dans Grammalecte pour Mozilla")
for dVars in lDictVars:
shutil.copy2(spDict+'/'+dVars['asciiName']+'.dic', spDestGL+'/'+dVars['mozAsciiName']+"/"+dVars['mozAsciiName']+'.dic')
shutil.copy2(spDict+'/'+dVars['asciiName']+'.aff', spDestGL+'/'+dVars['mozAsciiName']+"/"+dVars['mozAsciiName']+'.aff')
def createFileIfqForDB (self, spBuild):
echo(" * Dictionnaire >> indices de fréquence pour la DB...")
with open(spBuild+'/dictIdxIfq-'+self.sVersion+'.diff.txt', 'w', encoding='utf-8', newline="\n") as hDiff, \
open(spBuild+'/dictIdxIfq-'+self.sVersion+'.notes.txt', 'w', encoding='utf-8', newline="\n") as hNotes:
for oEntry in self.lEntry:
if oEntry.fq != oEntry.oldFq:
hDiff.write("{0.iD}\t{0.fq}\n".format(oEntry))
hNotes.write("{0.lemma}/{0.flags}\t{0.oldFq} > {0.fq}\n".format(oEntry))
def createLexiconPackages (self, spBuild, version, oStatsLex, spDestGL=""):
sLexName = LEX_PREFIX + version
spLex = spBuild + '/' + sLexName
createFolder(spLex)
# write lexicon
self.sortLexiconByFreq()
self.writeLexicon(spLex + '/' + sLexName + '.txt', version, oStatsLex)
self.writeGrammarCheckerLexicon(spBuild + '/' + sLexName + '.lex', version)
copyTemplate('lexique', spLex, 'README_lexique.txt', {'version': version})
# zip
createZipFiles(spLex, spBuild, sLexName + '.zip')
# copy GC lexicon to Grammalecte
if spDestGL:
shutil.copy2(spBuild + '/' + sLexName + '.lex', spDestGL + '/French.lex')
shutil.copy2('lexique/French.tagset.txt', spDestGL)
def createDictConj (self, spBuild, spDestGL=""):
echo(" * Dictionnaire >> fichier de conjugaison...")
with open(spBuild+'/dictConj.txt', 'w', encoding='utf-8', newline="\n") as hDst:
for oEntry in self.lEntry:
if oEntry.po.startswith("v"):
hDst.write(oEntry.getConjugation())
if spDestGL:
echo(" Fichier de conjugaison copié dans Grammalecte...")
shutil.copy2(spBuild+'/dictConj.txt', spDestGL)
def createDictDecl (self, spBuild, spDestGL=""):
echo(" * Dictionnaire >> fichier de déclinaison...")
with open(spBuild+'/dictDecl.txt', 'w', encoding='utf-8', newline="\n") as hDst:
for oEntry in self.lEntry:
if re.match("[SXFWIA]", oEntry.flags) and (oEntry.po.startswith("nom") or oEntry.po.startswith("adj")):
hDst.write(oEntry.getDeclination())
if spDestGL:
echo(" Fichier de déclinaison copié dans Grammalecte...")
shutil.copy2(spBuild+'/dictDecl.txt', spDestGL)
class Entree:
def __init__ (self, sLine):
self.lemma = ''
self.flags = ''
# champs morphologiques Hunspell
|
| ︙ | ︙ | |||
1409 1410 1411 1412 1413 1414 1415 |
for e in self.dFlexions.items():
hDst.write("{} - {}\n".format(e[0], e[1]))
def createThesaurusPackage (spBuild, sVersion, spCopy="", spDataDestGL=""):
print(" * Création du thésaurus")
spThesaurus = spBuild+"/thesaurus-v"+sVersion
| | | | | | | | | 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 |
for e in self.dFlexions.items():
hDst.write("{} - {}\n".format(e[0], e[1]))
def createThesaurusPackage (spBuild, sVersion, spCopy="", spDataDestGL=""):
print(" * Création du thésaurus")
spThesaurus = spBuild+"/thesaurus-v"+sVersion
createFolder(spThesaurus)
thes_build.build("thesaurus/thes_fr.dat", "thesaurus/synsets_fr.dat", spThesaurus)
shutil.copy2('thesaurus/README_thes_fr.txt', spThesaurus)
if spCopy:
# copy in libreoffice extension package
print(" Copie du thésaurus dans:", spCopy)
shutil.copy2(spThesaurus+'/thes_fr.dat', spCopy)
shutil.copy2(spThesaurus+'/thes_fr.idx', spCopy)
shutil.copy2(spThesaurus+'/README_thes_fr.txt', spCopy)
if spDataDestGL:
# copy in data source folder of Grammalecte
shutil.copy2(spThesaurus+'/thes_fr.json', spDataDestGL)
def main ():
xParser = argparse.ArgumentParser()
xParser.add_argument("-v", "--verdic", help="set dictionary version, i.e. 5.4", type=str, default="X.Y.z")
xParser.add_argument("-m", "--mode", help="0: no tags, 1: Hunspell tags (default), 2: All tags", type=int, choices=[0, 1, 2], default=1)
xParser.add_argument("-u", "--uncompress", help="do not use Hunspell compression", action="store_true")
|
| ︙ | ︙ | |||
1444 1445 1446 1447 1448 1449 1450 |
echo("Version: " + xArgs.verdic)
echo("Simplify: " + str(xArgs.simplify))
echo("Mode: " + str(xArgs.mode))
echo("Compression: " + str(not(xArgs.uncompress)))
### création du répertoire
spBuild = BUILD_PATH + '/' + xArgs.verdic
| | | 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 |
echo("Version: " + xArgs.verdic)
echo("Simplify: " + str(xArgs.simplify))
echo("Mode: " + str(xArgs.mode))
echo("Compression: " + str(not(xArgs.uncompress)))
### création du répertoire
spBuild = BUILD_PATH + '/' + xArgs.verdic
createFolder(spBuild)
### Lecture des fichiers et création du dictionnaire
oFrenchDict = Dictionnaire(xArgs.verdic, "French dictionary")
for sFile in ['orthographe/FRANCAIS.dic']:
oFrenchDict.readDictionary(sFile)
oFrenchDict.readAffixes('orthographe/FRANCAIS_7.aff')
|
| ︙ | ︙ | |||
1476 1477 1478 1479 1480 1481 1482 |
oStatsLex.addLexFromFile('lexique/corpus_data/stats_litterature.txt', 'L', 'Littérature')
oStatsLex.write(spBuild+'/test_lex.txt')
oFrenchDict.calculateStats(oStatsLex, spfStats)
### Écriture des paquets
echo("Création des paquets...")
| | | | | 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 |
oStatsLex.addLexFromFile('lexique/corpus_data/stats_litterature.txt', 'L', 'Littérature')
oStatsLex.write(spBuild+'/test_lex.txt')
oFrenchDict.calculateStats(oStatsLex, spfStats)
### Écriture des paquets
echo("Création des paquets...")
sThesaurusVersion = "3.0"
spLexiconDestGL = "../../../lexicons" if xArgs.grammalecte else ""
spLibreOfficeExtDestGL = "../oxt/Dictionnaires/dictionaries" if xArgs.grammalecte else ""
spMozillaExtDestGL = "" if xArgs.grammalecte else "" # no more Hunspell dictionaries in Mozilla extensions for now
spDataDestGL = "../data" if xArgs.grammalecte else ""
### dictionnaires
if not xArgs.uncompress:
oFrenchDict.defineAbreviatedTags(xArgs.mode, spfStats)
oFrenchDict.createFiles(spBuild, [dTOUTESVAR, dCLASSIQUE, dREFORME1990], xArgs.mode, xArgs.simplify)
oFrenchDict.createLexiconPackages(spBuild, xArgs.verdic, oStatsLex, spLexiconDestGL)
oFrenchDict.createFileIfqForDB(spBuild)
createThesaurusPackage(spBuild, sThesaurusVersion, spLibreOfficeExtDestGL, spDataDestGL)
oFrenchDict.createLibreOfficeExtension(spBuild, dMOZEXT, [dTOUTESVAR, dCLASSIQUE, dREFORME1990], sThesaurusVersion, spLibreOfficeExtDestGL)
oFrenchDict.createMozillaExtensions(spBuild, dMOZEXT, [dTOUTESVAR, dCLASSIQUE, dREFORME1990], spMozillaExtDestGL)
oFrenchDict.createDictConj(spBuild, spDataDestGL)
oFrenchDict.createDictDecl(spBuild, spDataDestGL)
if __name__ == '__main__':
main()
|
Modified helpers.py from [3b783d8a38] to [864faa54a3].
| ︙ | ︙ | |||
72 73 74 75 76 77 78 79 80 81 82 83 84 85 |
def createCleanFolder (sp):
"make an empty folder or erase its content if not empty"
if not os.path.exists(sp):
os.makedirs(sp, exist_ok=True)
else:
eraseFolderContent(sp)
def copyFolder (spSrc, spDst):
"copy folder content from src to dst"
try:
shutil.copytree(spSrc, spDst)
except OSError as e:
if e.errno == errno.ENOTDIR:
| > > > > > > | 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 |
def createCleanFolder (sp):
"make an empty folder or erase its content if not empty"
if not os.path.exists(sp):
os.makedirs(sp, exist_ok=True)
else:
eraseFolderContent(sp)
def createFolder (sp):
"make a folder if it doesn’t exist; don’t change anything if it exists"
if not os.path.exists(sp):
os.mkdir(sp)
def copyFolder (spSrc, spDst):
"copy folder content from src to dst"
try:
shutil.copytree(spSrc, spDst)
except OSError as e:
if e.errno == errno.ENOTDIR:
|
| ︙ | ︙ |
Modified lex_build.py from [c6d9c2afcf] to [f032803b08].
1 2 3 4 5 6 7 | #!python3 """ Lexicon builder """ import argparse | | | | | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 |
#!python3
"""
Lexicon builder
"""
import argparse
import helpers
import graphspell.dawg as fsa
def build (spfSrc, sLangCode, sLangName, sfDict, bJavaScript=False, sDicName="", sDescription="", sFilter="", cStemmingMethod="S", nCompressMethod=1):
"transform a text lexicon as a binary indexable dictionary"
oDAWG = fsa.DAWG(spfSrc, cStemmingMethod, sLangCode, sLangName, sDicName, sDescription, sFilter)
helpers.createFolder("graphspell/_dictionaries")
oDAWG.writeAsJSObject("graphspell/_dictionaries/" + sfDict + ".json")
if bJavaScript:
helpers.createFolder("graphspell-js/_dictionaries")
oDAWG.writeAsJSObject("graphspell-js/_dictionaries/" + sfDict + ".json")
def main ():
"parse args from CLI"
xParser = argparse.ArgumentParser()
xParser.add_argument("src_lexicon", type=str, help="path and file name of the source lexicon")
|
| ︙ | ︙ |
Modified make.py from [03f1f86c56] to [a2a625b112].
| ︙ | ︙ | |||
13 14 15 16 17 18 19 | import configparser import datetime import argparse import importlib import unittest import json import platform | | < | | 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 | import configparser import datetime import argparse import importlib import unittest import json import platform import shutil #im2port dialog_bundled import compile_rules import helpers import lex_build sWarningMessage = "The content of this folder is generated by code and replaced at each build.\n" |
| ︙ | ︙ | |||
204 205 206 207 208 209 210 |
print()
dVars["plugins"] = sCodePlugins
## COPY GC_CORE COMMON FILES
for sf in os.listdir("gc_core/py"):
if not os.path.isdir("gc_core/py/"+sf):
helpers.copyAndFileTemplate("gc_core/py/"+sf, "grammalecte/"+sf, dVars)
| | | 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 |
print()
dVars["plugins"] = sCodePlugins
## COPY GC_CORE COMMON FILES
for sf in os.listdir("gc_core/py"):
if not os.path.isdir("gc_core/py/"+sf):
helpers.copyAndFileTemplate("gc_core/py/"+sf, "grammalecte/"+sf, dVars)
shutil.copy2("3rd/bottle.py", "grammalecte/bottle.py")
open("grammalecte/WARNING.txt", "w", encoding="utf-8", newline="\n").write(sWarningMessage)
## CREATE GRAMMAR CHECKER PACKAGE
spLangPack = "grammalecte/"+sLang
helpers.createCleanFolder(spLangPack)
for sf in os.listdir("gc_core/py/lang_core"):
if not os.path.isdir("gc_core/py/lang_core/"+sf):
|
| ︙ | ︙ | |||
282 283 284 285 286 287 288 |
return dVars['version']
def copyGraphspellCore (bJavaScript=False):
"copy Graphspell package in Grammalecte package"
print("> Copy Graphspell package in Grammalecte package")
helpers.createCleanFolder("grammalecte/graphspell")
| | | | | | 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 |
return dVars['version']
def copyGraphspellCore (bJavaScript=False):
"copy Graphspell package in Grammalecte package"
print("> Copy Graphspell package in Grammalecte package")
helpers.createCleanFolder("grammalecte/graphspell")
helpers.createFolder("grammalecte/graphspell/_dictionaries")
for sf in os.listdir("graphspell"):
if not os.path.isdir("graphspell/"+sf):
shutil.copy2("graphspell/"+sf, "grammalecte/graphspell")
if bJavaScript:
helpers.createCleanFolder("grammalecte-js/graphspell")
helpers.createFolder("grammalecte-js/graphspell/_dictionaries")
dVars = {}
for sf in os.listdir("js_extension"):
dVars[sf[:-3]] = open("js_extension/"+sf, "r", encoding="utf-8").read()
for sf in os.listdir("graphspell-js"):
if not os.path.isdir("graphspell-js/"+sf):
shutil.copy2("graphspell-js/"+sf, "grammalecte-js/graphspell")
helpers.copyAndFileTemplate("graphspell-js/"+sf, "grammalecte-js/graphspell/"+sf, dVars)
def copyGraphspellDictionaries (dVars, bJavaScript=False, bCommunityDict=False, bPersonalDict=False):
"copy requested Graphspell dictionaries in Grammalecte package"
print("> Copy requested Graphspell dictionaries in Grammalecte package")
dVars["dic_main_filename_py"] = ""
|
| ︙ | ︙ | |||
318 319 320 321 322 323 324 |
lDict.append(("personal", dVars['dic_personal_filename']))
for sType, sFileName in lDict:
spfPyDic = f"graphspell/_dictionaries/{sFileName}.json"
spfJSDic = f"graphspell-js/_dictionaries/{sFileName}.json"
if not os.path.isfile(spfPyDic) or (bJavaScript and not os.path.isfile(spfJSDic)):
buildDictionary(dVars, sType, bJavaScript)
print(" +", spfPyDic)
| | | | 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 |
lDict.append(("personal", dVars['dic_personal_filename']))
for sType, sFileName in lDict:
spfPyDic = f"graphspell/_dictionaries/{sFileName}.json"
spfJSDic = f"graphspell-js/_dictionaries/{sFileName}.json"
if not os.path.isfile(spfPyDic) or (bJavaScript and not os.path.isfile(spfJSDic)):
buildDictionary(dVars, sType, bJavaScript)
print(" +", spfPyDic)
shutil.copy2(spfPyDic, "grammalecte/graphspell/_dictionaries")
dVars['dic_'+sType+'_filename_py'] = sFileName + '.json'
if bJavaScript:
print(" +", spfJSDic)
shutil.copy2(spfJSDic, "grammalecte-js/graphspell/_dictionaries")
dVars['dic_'+sType+'_filename_js'] = sFileName + '.json'
dVars['dic_main_filename_py'] = dVars['dic_default_filename_py'] + ".json"
dVars['dic_main_filename_js'] = dVars['dic_default_filename_js'] + ".json"
def buildDictionary (dVars, sType, bJavaScript=False):
"build binary dictionary for Graphspell from lexicons"
|
| ︙ | ︙ | |||
393 394 395 396 397 398 399 |
return
xParser = argparse.ArgumentParser()
xParser.add_argument("lang", type=str, nargs='+', help="lang project to generate (name of folder in /lang)")
xParser.add_argument("-uc", "--use_cache", help="use data cache instead of rebuilding rules", action="store_true")
xParser.add_argument("-frb", "--force_rebuild", help="force rebuilding rules", action="store_true")
xParser.add_argument("-b", "--build_data", help="launch build_data.py (part 1 and 2)", action="store_true")
xParser.add_argument("-bb", "--build_data_before", help="launch build_data.py (only part 1: before dictionary building)", action="store_true")
| | | 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 |
return
xParser = argparse.ArgumentParser()
xParser.add_argument("lang", type=str, nargs='+', help="lang project to generate (name of folder in /lang)")
xParser.add_argument("-uc", "--use_cache", help="use data cache instead of rebuilding rules", action="store_true")
xParser.add_argument("-frb", "--force_rebuild", help="force rebuilding rules", action="store_true")
xParser.add_argument("-b", "--build_data", help="launch build_data.py (part 1 and 2)", action="store_true")
xParser.add_argument("-bb", "--build_data_before", help="launch build_data.py (only part 1: before dictionary building)", action="store_true")
xParser.add_argument("-ba", "--build_data_after", help="launch build_data.py (only part 2: after dictionary building)", action="store_true")
xParser.add_argument("-d", "--dict", help="generate FSA dictionary", action="store_true")
xParser.add_argument("-t", "--tests", help="run unit tests", action="store_true")
xParser.add_argument("-tt", "--test_texts", help="perform gc tests on texts", action="store_true")
xParser.add_argument("-p", "--perf", help="run performance tests", action="store_true")
xParser.add_argument("-pm", "--perf_memo", help="run performance tests and store results in perf_memo.txt", action="store_true")
xParser.add_argument("-js", "--javascript", help="JavaScript build for Firefox", action="store_true")
xParser.add_argument("-acd", "--add_community_dictionary", help="add community dictionary to the build", action="store_true")
|
| ︙ | ︙ | |||
418 419 420 421 422 423 424 |
oNow = datetime.datetime.now()
print("============== MAKE GRAMMALECTE at {0.hour:>2} h {0.minute:>2} min {0.second:>2} s ==============".format(oNow))
if xArgs.build_data:
xArgs.build_data_before = True
xArgs.build_data_after = True
| | | | | 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 |
oNow = datetime.datetime.now()
print("============== MAKE GRAMMALECTE at {0.hour:>2} h {0.minute:>2} min {0.second:>2} s ==============".format(oNow))
if xArgs.build_data:
xArgs.build_data_before = True
xArgs.build_data_after = True
helpers.createFolder("_build")
helpers.createFolder("grammalecte")
if xArgs.javascript:
helpers.createFolder("grammalecte-js")
copyGraphspellCore(xArgs.javascript)
for sLang in xArgs.lang:
if os.path.exists("gc_lang/"+sLang) and os.path.isdir("gc_lang/"+sLang):
xConfig = getConfig(sLang)
dVars = xConfig._sections['args']
|
| ︙ | ︙ |