566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
|
file_util.copy_file('césures/hyph_fr.dic', spExt+'/dictionaries')
file_util.copy_file('césures/hyph_fr.iso8859-1.dic', spExt+'/dictionaries')
file_util.copy_file('césures/frhyph.tex', spExt+'/dictionaries')
file_util.copy_file('césures/hyph-fr.tex', spExt+'/dictionaries')
file_util.copy_file('césures/README_hyph_fr-3.0.txt', spExt+'/dictionaries')
file_util.copy_file('césures/README_hyph_fr-2.9.txt', spExt+'/dictionaries')
# thesaurus
dir_util.copy_tree(spBuild + "/thesaurus-v" + sThesVer, spExt + "/dictionaries")
# zip
createZipFiles(spExt, spBuild, sExtensionName + '.oxt')
# copy to Grammalecte Project
if spDestGL:
echo(" Dictionnaires Hunspell copiés dans Grammalecte pour LibreOffice...")
dir_util.copy_tree(spExt+'/dictionaries', spDestGL)
|
|
>
>
|
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
|
file_util.copy_file('césures/hyph_fr.dic', spExt+'/dictionaries')
file_util.copy_file('césures/hyph_fr.iso8859-1.dic', spExt+'/dictionaries')
file_util.copy_file('césures/frhyph.tex', spExt+'/dictionaries')
file_util.copy_file('césures/hyph-fr.tex', spExt+'/dictionaries')
file_util.copy_file('césures/README_hyph_fr-3.0.txt', spExt+'/dictionaries')
file_util.copy_file('césures/README_hyph_fr-2.9.txt', spExt+'/dictionaries')
# thesaurus
file_util.copy_file(spBuild+"/thesaurus-v"+sThesVer+'/thes_fr.dat', spExt+"/dictionaries")
file_util.copy_file(spBuild+"/thesaurus-v"+sThesVer+'/thes_fr.idx', spExt+"/dictionaries")
file_util.copy_file(spBuild+"/thesaurus-v"+sThesVer+'/README_thes_fr.txt', spExt+"/dictionaries")
# zip
createZipFiles(spExt, spBuild, sExtensionName + '.oxt')
# copy to Grammalecte Project
if spDestGL:
echo(" Dictionnaires Hunspell copiés dans Grammalecte pour LibreOffice...")
dir_util.copy_tree(spExt+'/dictionaries', spDestGL)
|
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
|
with open(sPathFile, 'w', encoding='utf-8', newline="\n") as hDst:
for t in self.lLex:
hDst.write(str(t)+"\n")
for e in self.dFlexions.items():
hDst.write("{} - {}\n".format(e[0], e[1]))
def createThesaurusPackage (spBuild, sVersion, spCopy=""):
print(" * Création du thésaurus")
spThesaurus = spBuild+"/thesaurus-v"+sVersion
dir_util.mkpath(spThesaurus)
thes_build.build("thesaurus/thes_fr.dat", "thesaurus/synsets_fr.dat", spThesaurus)
file_util.copy_file('thesaurus/README_thes_fr.txt', spThesaurus)
if spCopy:
# copy in libreoffice extension package
print(" Copie du thésaurus dans:", spCopy)
file_util.copy_file(spThesaurus+'/thes_fr.dat', spCopy)
file_util.copy_file(spThesaurus+'/thes_fr.idx', spCopy)
file_util.copy_file(spThesaurus+'/README_thes_fr.txt', spCopy)
def main ():
xParser = argparse.ArgumentParser()
xParser.add_argument("-v", "--verdic", help="set dictionary version, i.e. 5.4", type=str, default="X.Y.z")
xParser.add_argument("-m", "--mode", help="0: no tags, 1: Hunspell tags (default), 2: All tags", type=int, choices=[0, 1, 2], default=1)
xParser.add_argument("-u", "--uncompress", help="do not use Hunspell compression", action="store_true")
|
|
>
>
>
|
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
|
with open(sPathFile, 'w', encoding='utf-8', newline="\n") as hDst:
for t in self.lLex:
hDst.write(str(t)+"\n")
for e in self.dFlexions.items():
hDst.write("{} - {}\n".format(e[0], e[1]))
def createThesaurusPackage (spBuild, sVersion, spCopy="", spDataDestGL=""):
print(" * Création du thésaurus")
spThesaurus = spBuild+"/thesaurus-v"+sVersion
dir_util.mkpath(spThesaurus)
thes_build.build("thesaurus/thes_fr.dat", "thesaurus/synsets_fr.dat", spThesaurus)
file_util.copy_file('thesaurus/README_thes_fr.txt', spThesaurus)
if spCopy:
# copy in libreoffice extension package
print(" Copie du thésaurus dans:", spCopy)
file_util.copy_file(spThesaurus+'/thes_fr.dat', spCopy)
file_util.copy_file(spThesaurus+'/thes_fr.idx', spCopy)
file_util.copy_file(spThesaurus+'/README_thes_fr.txt', spCopy)
if spModulesDestGL:
# copy in data source folder of Grammalecte
file_util.copy_file(spThesaurus+'/thes_fr.json', spDataDestGL)
def main ():
xParser = argparse.ArgumentParser()
xParser.add_argument("-v", "--verdic", help="set dictionary version, i.e. 5.4", type=str, default="X.Y.z")
xParser.add_argument("-m", "--mode", help="0: no tags, 1: Hunspell tags (default), 2: All tags", type=int, choices=[0, 1, 2], default=1)
xParser.add_argument("-u", "--uncompress", help="do not use Hunspell compression", action="store_true")
|
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
|
oStatsLex.addLexFromFile('lexique/corpus_data/stats_litterature.txt', 'L', 'Littérature')
oStatsLex.write(spBuild+'/test_lex.txt')
oFrenchDict.calculateStats(oStatsLex, spfStats)
### Écriture des paquets
echo("Création des paquets...")
spLexiconDestGL = "../../../lexicons" if xArgs.grammalecte else ""
spLibreOfficeExtDestGL = "../oxt/Dictionnaires/dictionaries" if xArgs.grammalecte else ""
spMozillaExtDestGL = "" if xArgs.grammalecte else "" # no more Hunspell dictionaries in Mozilla extensions for now
spDataDestGL = "../data" if xArgs.grammalecte else ""
### dictionnaires
if not xArgs.uncompress:
oFrenchDict.defineAbreviatedTags(xArgs.mode, spfStats)
oFrenchDict.createFiles(spBuild, [dTOUTESVAR, dCLASSIQUE, dREFORME1990], xArgs.mode, xArgs.simplify)
oFrenchDict.createLexiconPackages(spBuild, xArgs.verdic, oStatsLex, spLexiconDestGL)
oFrenchDict.createFileIfqForDB(spBuild)
createThesaurusPackage(spBuild, "2.4", spLibreOfficeExtDestGL)
oFrenchDict.createLibreOfficeExtension(spBuild, dMOZEXT, [dTOUTESVAR, dCLASSIQUE, dREFORME1990], "2.4", spLibreOfficeExtDestGL)
oFrenchDict.createMozillaExtensions(spBuild, dMOZEXT, [dTOUTESVAR, dCLASSIQUE, dREFORME1990], spMozillaExtDestGL)
oFrenchDict.createDictConj(spBuild, spDataDestGL)
oFrenchDict.createDictDecl(spBuild, spDataDestGL)
if __name__ == '__main__':
main()
|
>
|
|
|
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
|
oStatsLex.addLexFromFile('lexique/corpus_data/stats_litterature.txt', 'L', 'Littérature')
oStatsLex.write(spBuild+'/test_lex.txt')
oFrenchDict.calculateStats(oStatsLex, spfStats)
### Écriture des paquets
echo("Création des paquets...")
nThesaurusVersion = 2.4
spLexiconDestGL = "../../../lexicons" if xArgs.grammalecte else ""
spLibreOfficeExtDestGL = "../oxt/Dictionnaires/dictionaries" if xArgs.grammalecte else ""
spMozillaExtDestGL = "" if xArgs.grammalecte else "" # no more Hunspell dictionaries in Mozilla extensions for now
spDataDestGL = "../data" if xArgs.grammalecte else ""
### dictionnaires
if not xArgs.uncompress:
oFrenchDict.defineAbreviatedTags(xArgs.mode, spfStats)
oFrenchDict.createFiles(spBuild, [dTOUTESVAR, dCLASSIQUE, dREFORME1990], xArgs.mode, xArgs.simplify)
oFrenchDict.createLexiconPackages(spBuild, xArgs.verdic, oStatsLex, spLexiconDestGL)
oFrenchDict.createFileIfqForDB(spBuild)
createThesaurusPackage(spBuild, nThesaurusVersion, spLibreOfficeExtDestGL, spDataDestGL)
oFrenchDict.createLibreOfficeExtension(spBuild, dMOZEXT, [dTOUTESVAR, dCLASSIQUE, dREFORME1990], nThesaurusVersion, spLibreOfficeExtDestGL)
oFrenchDict.createMozillaExtensions(spBuild, dMOZEXT, [dTOUTESVAR, dCLASSIQUE, dREFORME1990], spMozillaExtDestGL)
oFrenchDict.createDictConj(spBuild, spDataDestGL)
oFrenchDict.createDictDecl(spBuild, spDataDestGL)
if __name__ == '__main__':
main()
|