Overview
Comment: | [graphspell][build][core] add community dictionary |
---|---|
Downloads: | Tarball | ZIP archive | SQL archive |
Timelines: | family | ancestors | descendants | both | core | build | graphspell | multid |
Files: | files | file ages | folders |
SHA3-256: |
6fe239d96648197a4d4aa0ff002a95d7 |
User & Date: | olr on 2018-03-20 12:32:08 |
Other Links: | branch diff | manifest | tags |
Context
2018-03-20
| ||
12:43 | [build][fr][bug] config.ini: fix build check-in: 5d03787a55 user: olr tags: fr, build, multid | |
12:32 | [graphspell][build][core] add community dictionary check-in: 6fe239d966 user: olr tags: core, build, graphspell, multid | |
12:04 | [tb] comment: spelling mistake check-in: 0726b05211 user: olr tags: tb, multid | |
Changes
Modified gc_core/js/lang_core/gc_engine.js from [2ba338046d] to [7ee1350cd7].
︙ | ︙ | |||
319 320 321 322 323 324 325 | //// Initialization load: function (sContext="JavaScript", sPath="") { try { if (typeof(require) !== 'undefined') { var spellchecker = require("resource://grammalecte/graphspell/spellchecker.js"); | | | | 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 | //// Initialization load: function (sContext="JavaScript", sPath="") { try { if (typeof(require) !== 'undefined') { var spellchecker = require("resource://grammalecte/graphspell/spellchecker.js"); _oSpellChecker = new spellchecker.SpellChecker("${lang}", "", "${dic_main_filename_js}", "${dic_extended_filename_js}", "${dic_community_filename_js}", "${dic_personal_filename_js}"); } else { _oSpellChecker = new SpellChecker("${lang}", sPath, "${dic_main_filename_js}", "${dic_extended_filename_js}", "${dic_community_filename_js}", "${dic_personal_filename_js}"); } _sAppContext = sContext; _dOptions = gc_options.getOptions(sContext).gl_shallowCopy(); // duplication necessary, to be able to reset to default } catch (e) { helpers.logerror(e); } |
︙ | ︙ |
Modified gc_core/py/lang_core/gc_engine.py from [b15edd00ca] to [72ecd7c680].
︙ | ︙ | |||
288 289 290 291 292 293 294 | def load (sContext="Python"): global _oSpellChecker global _sAppContext global _dOptions try: | | | 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 | def load (sContext="Python"): global _oSpellChecker global _sAppContext global _dOptions try: _oSpellChecker = SpellChecker("${lang}", "${dic_main_filename_py}", "${dic_extended_filename_py}", "${dic_community_filename_py}", "${dic_personal_filename_py}") _sAppContext = sContext _dOptions = dict(gc_options.getOptions(sContext)) # duplication necessary, to be able to reset to default except: traceback.print_exc() def setOption (sOpt, bVal): |
︙ | ︙ |
Modified gc_lang/fr/config.ini from [5898e7ef09] to [25d4196521].
︙ | ︙ | |||
18 19 20 21 22 23 24 25 26 27 28 29 30 31 | lexicon_src = lexicons/French.lex dic_filename = fr dic_name = French # extended dictionary lexicon_extended_src = lexicons/French.extended.lex dic_extended_filename = fr.extended dic_extended_name = Français - dictionnaire étendu # personal dictionary lexicon_personal_src = lexicons/French.personal.lex dic_personal_filename = fr.personal dic_personal_name = Français - dictionnaire personnel # Finite state automaton compression: 1, 2 (experimental) or 3 (experimental) fsa_method = 1 # stemming method: S for suffixes only, A for prefixes and suffixes | > > > > | 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 | lexicon_src = lexicons/French.lex dic_filename = fr dic_name = French # extended dictionary lexicon_extended_src = lexicons/French.extended.lex dic_extended_filename = fr.extended dic_extended_name = Français - dictionnaire étendu # community dictionary lexicon_extended_src = lexicons/French.community.lex dic_extended_filename = fr.community dic_extended_name = Français - dictionnaire communautaire # personal dictionary lexicon_personal_src = lexicons/French.personal.lex dic_personal_filename = fr.personal dic_personal_name = Français - dictionnaire personnel # Finite state automaton compression: 1, 2 (experimental) or 3 (experimental) fsa_method = 1 # stemming method: S for suffixes only, A for prefixes and suffixes |
︙ | ︙ |
Modified graphspell-js/spellchecker.js from [a4e4ee731d] to [a6a425b5cc].
︙ | ︙ | |||
24 25 26 27 28 29 30 | ["fr", "fr.json"], ["en", "en.json"] ]); class SpellChecker { | | > | 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 | ["fr", "fr.json"], ["en", "en.json"] ]); class SpellChecker { constructor (sLangCode, sPath="", mainDic="", extentedDic="", communityDic="", personalDic="") { // returns true if the main dictionary is loaded this.sLangCode = sLangCode; if (!mainDic) { mainDic = dDefaultDictionaries.gl_get(sLangCode, ""); } this.oMainDic = this._loadDictionary(mainDic, sPath, true); this.oExtendedDic = this._loadDictionary(extentedDic, sPath); this.oCommunityDic = this._loadDictionary(communityDic, sPath); this.oPersonalDic = this._loadDictionary(personalDic, sPath); this.oTokenizer = null; } _loadDictionary (dictionary, sPath, bNecessary=false) { // returns an IBDAWG object if (!dictionary) { |
︙ | ︙ | |||
85 86 87 88 89 90 91 92 93 94 95 96 97 98 | } setExtendedDictionary (dictionary) { // returns true if the dictionary is loaded this.oExtendedDic = this._loadDictionary(dictionary); return Boolean(this.oExtendedDic); } setPersonalDictionary (dictionary) { // returns true if the dictionary is loaded this.oPersonalDic = this._loadDictionary(dictionary); return Boolean(this.oPersonalDic); } | > > > > > > | 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 | } setExtendedDictionary (dictionary) { // returns true if the dictionary is loaded this.oExtendedDic = this._loadDictionary(dictionary); return Boolean(this.oExtendedDic); } setCommunityDictionary (dictionary) { // returns true if the dictionary is loaded this.oCommunityDic = this._loadDictionary(dictionary); return Boolean(this.oCommunityDic); } setPersonalDictionary (dictionary) { // returns true if the dictionary is loaded this.oPersonalDic = this._loadDictionary(dictionary); return Boolean(this.oPersonalDic); } |
︙ | ︙ | |||
116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 | isValidToken (sToken) { // checks if sToken is valid (if there is hyphens in sToken, sToken is split, each part is checked) if (this.oMainDic.isValidToken(sToken)) { return true; } if (this.oExtendedDic && this.oExtendedDic.isValidToken(sToken)) { return true; } if (this.oPersonalDic && this.oPersonalDic.isValidToken(sToken)) { return true; } return false; } isValid (sWord) { // checks if sWord is valid (different casing tested if the first letter is a capital) if (this.oMainDic.isValid(sWord)) { return true; } if (this.oExtendedDic && this.oExtendedDic.isValid(sWord)) { return true; } if (this.oPersonalDic && this.oPersonalDic.isValid(sWord)) { return true; } return false; } lookup (sWord) { // checks if sWord is in dictionary as is (strict verification) if (this.oMainDic.lookup(sWord)) { return true; } if (this.oExtendedDic && this.oExtendedDic.lookup(sWord)) { return true; } if (this.oPersonalDic && this.oPersonalDic.lookup(sWord)) { return true; } return false; } getMorph (sWord) { // retrieves morphologies list, different casing allowed let lResult = this.oMainDic.getMorph(sWord); if (this.oExtendedDic) { lResult.push(...this.oExtendedDic.getMorph(sWord)); } if (this.oPersonalDic) { lResult.push(...this.oPersonalDic.getMorph(sWord)); } return lResult; } * suggest (sWord, nSuggLimit=10) { // generator: returns 1, 2 or 3 lists of suggestions yield this.oMainDic.suggest(sWord, nSuggLimit); if (this.oExtendedDic) { yield this.oExtendedDic.suggest(sWord, nSuggLimit); } if (this.oPersonalDic) { yield this.oPersonalDic.suggest(sWord, nSuggLimit); } } * select (sPattern="") { // generator: returns all entries which morphology fits <sPattern> yield* this.oMainDic.select(sPattern) if (this.oExtendedDic) { yield* this.oExtendedDic.select(sPattern); } if (this.oPersonalDic) { yield* this.oPersonalDic.select(sPattern); } } } if (typeof(exports) !== 'undefined') { exports.SpellChecker = SpellChecker; } | > > > > > > > > > > > > > > > > > > | 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 | isValidToken (sToken) { // checks if sToken is valid (if there is hyphens in sToken, sToken is split, each part is checked) if (this.oMainDic.isValidToken(sToken)) { return true; } if (this.oExtendedDic && this.oExtendedDic.isValidToken(sToken)) { return true; } if (this.oCommunityDic && this.oCommunityDic.isValidToken(sToken)) { return true; } if (this.oPersonalDic && this.oPersonalDic.isValidToken(sToken)) { return true; } return false; } isValid (sWord) { // checks if sWord is valid (different casing tested if the first letter is a capital) if (this.oMainDic.isValid(sWord)) { return true; } if (this.oExtendedDic && this.oExtendedDic.isValid(sWord)) { return true; } if (this.oCommunityDic && this.oCommunityDic.isValid(sToken)) { return true; } if (this.oPersonalDic && this.oPersonalDic.isValid(sWord)) { return true; } return false; } lookup (sWord) { // checks if sWord is in dictionary as is (strict verification) if (this.oMainDic.lookup(sWord)) { return true; } if (this.oExtendedDic && this.oExtendedDic.lookup(sWord)) { return true; } if (this.oCommunityDic && this.oCommunityDic.lookup(sToken)) { return true; } if (this.oPersonalDic && this.oPersonalDic.lookup(sWord)) { return true; } return false; } getMorph (sWord) { // retrieves morphologies list, different casing allowed let lResult = this.oMainDic.getMorph(sWord); if (this.oExtendedDic) { lResult.push(...this.oExtendedDic.getMorph(sWord)); } if (this.oCommunityDic) { lResult.push(...this.oCommunityDic.getMorph(sWord)); } if (this.oPersonalDic) { lResult.push(...this.oPersonalDic.getMorph(sWord)); } return lResult; } * suggest (sWord, nSuggLimit=10) { // generator: returns 1, 2 or 3 lists of suggestions yield this.oMainDic.suggest(sWord, nSuggLimit); if (this.oExtendedDic) { yield this.oExtendedDic.suggest(sWord, nSuggLimit); } if (this.oCommunityDic) { yield this.oCommunityDic.suggest(sWord, nSuggLimit); } if (this.oPersonalDic) { yield this.oPersonalDic.suggest(sWord, nSuggLimit); } } * select (sPattern="") { // generator: returns all entries which morphology fits <sPattern> yield* this.oMainDic.select(sPattern) if (this.oExtendedDic) { yield* this.oExtendedDic.select(sPattern); } if (this.oCommunityDic) { yield* this.oCommunityDic.select(sPattern); } if (this.oPersonalDic) { yield* this.oPersonalDic.select(sPattern); } } } if (typeof(exports) !== 'undefined') { exports.SpellChecker = SpellChecker; } |
Modified graphspell/spellchecker.py from [aaa0229431] to [4d329ce1f7].
︙ | ︙ | |||
18 19 20 21 22 23 24 | "fr": "fr.bdic", "en": "en.bdic" } class SpellChecker (): | | > | 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 | "fr": "fr.bdic", "en": "en.bdic" } class SpellChecker (): def __init__ (self, sLangCode, sfMainDic="", sfExtendedDic="", sfCommunityDic="", sfPersonalDic=""): "returns True if the main dictionary is loaded" self.sLangCode = sLangCode if not sfMainDic: sfMainDic = dDefaultDictionaries.get(sLangCode, "") self.oMainDic = self._loadDictionary(sfMainDic, True) self.oExtendedDic = self._loadDictionary(sfExtendedDic) self.oCommunityDic = self._loadDictionary(sfCommunityDic) self.oPersonalDic = self._loadDictionary(sfPersonalDic) self.oTokenizer = None def _loadDictionary (self, source, bNecessary=False): "returns an IBDAWG object" if not source: return None |
︙ | ︙ | |||
58 59 60 61 62 63 64 65 66 67 68 69 70 71 | self.oMainDic = self._loadDictionary(source) return bool(self.oMainDic) def setExtendedDictionary (self, source): "returns True if the dictionary is loaded" self.oExtendedDic = self._loadDictionary(source) return bool(self.oExtendedDic) def setPersonalDictionary (self, source): "returns True if the dictionary is loaded" self.oPersonalDic = self._loadDictionary(source) return bool(self.oPersonalDic) # parse text functions | > > > > > | 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 | self.oMainDic = self._loadDictionary(source) return bool(self.oMainDic) def setExtendedDictionary (self, source): "returns True if the dictionary is loaded" self.oExtendedDic = self._loadDictionary(source) return bool(self.oExtendedDic) def setCommunityDictionary (self, source): "returns True if the dictionary is loaded" self.oCommunityDic = self._loadDictionary(source) return bool(self.oPersonalDic) def setPersonalDictionary (self, source): "returns True if the dictionary is loaded" self.oPersonalDic = self._loadDictionary(source) return bool(self.oPersonalDic) # parse text functions |
︙ | ︙ | |||
103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 | def isValidToken (self, sToken): "checks if sToken is valid (if there is hyphens in sToken, sToken is split, each part is checked)" if self.oMainDic.isValidToken(sToken): return True if self.oExtendedDic and self.oExtendedDic.isValidToken(sToken): return True if self.oPersonalDic and self.oPersonalDic.isValidToken(sToken): return True return False def isValid (self, sWord): "checks if sWord is valid (different casing tested if the first letter is a capital)" if self.oMainDic.isValid(sWord): return True if self.oExtendedDic and self.oExtendedDic.isValid(sWord): return True if self.oPersonalDic and self.oPersonalDic.isValid(sWord): return True return False def lookup (self, sWord): "checks if sWord is in dictionary as is (strict verification)" if self.oMainDic.lookup(sWord): return True if self.oExtendedDic and self.oExtendedDic.lookup(sWord): return True if self.oPersonalDic and self.oPersonalDic.lookup(sWord): return True return False def getMorph (self, sWord): "retrieves morphologies list, different casing allowed" lResult = self.oMainDic.getMorph(sWord) if self.oExtendedDic: lResult.extend(self.oExtendedDic.getMorph(sWord)) if self.oPersonalDic: lResult.extend(self.oPersonalDic.getMorph(sWord)) return lResult def getLemma (self, sWord): return set([ s[1:s.find(" ")] for s in self.getMorph(sWord) ]) def suggest (self, sWord, nSuggLimit=10): "generator: returns 1, 2 or 3 lists of suggestions" yield self.oMainDic.suggest(sWord, nSuggLimit) if self.oExtendedDic: yield self.oExtendedDic.suggest(sWord, nSuggLimit) if self.oPersonalDic: yield self.oPersonalDic.suggest(sWord, nSuggLimit) def select (self, sPattern=""): "generator: returns all entries which morphology fits <sPattern>" yield from self.oMainDic.select(sPattern) if self.oExtendedDic: yield from self.oExtendedDic.select(sPattern) if self.oPersonalDic: yield from self.oPersonalDic.select(sPattern) def drawPath (self, sWord): self.oMainDic.drawPath(sWord) if self.oExtendedDic: print("-----") self.oExtendedDic.drawPath(sWord) if self.oPersonalDic: print("-----") self.oPersonalDic.drawPath(sWord) | > > > > > > > > > > > > > > > | 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 | def isValidToken (self, sToken): "checks if sToken is valid (if there is hyphens in sToken, sToken is split, each part is checked)" if self.oMainDic.isValidToken(sToken): return True if self.oExtendedDic and self.oExtendedDic.isValidToken(sToken): return True if self.oCommunityDic and self.oCommunityDic.isValidToken(sToken): return True if self.oPersonalDic and self.oPersonalDic.isValidToken(sToken): return True return False def isValid (self, sWord): "checks if sWord is valid (different casing tested if the first letter is a capital)" if self.oMainDic.isValid(sWord): return True if self.oExtendedDic and self.oExtendedDic.isValid(sWord): return True if self.oCommunityDic and self.oCommunityDic.isValid(sToken): return True if self.oPersonalDic and self.oPersonalDic.isValid(sWord): return True return False def lookup (self, sWord): "checks if sWord is in dictionary as is (strict verification)" if self.oMainDic.lookup(sWord): return True if self.oExtendedDic and self.oExtendedDic.lookup(sWord): return True if self.oCommunityDic and self.oCommunityDic.lookup(sToken): return True if self.oPersonalDic and self.oPersonalDic.lookup(sWord): return True return False def getMorph (self, sWord): "retrieves morphologies list, different casing allowed" lResult = self.oMainDic.getMorph(sWord) if self.oExtendedDic: lResult.extend(self.oExtendedDic.getMorph(sWord)) if self.oCommunityDic: lResult.extend(self.oCommunityDic.getMorph(sWord)) if self.oPersonalDic: lResult.extend(self.oPersonalDic.getMorph(sWord)) return lResult def getLemma (self, sWord): return set([ s[1:s.find(" ")] for s in self.getMorph(sWord) ]) def suggest (self, sWord, nSuggLimit=10): "generator: returns 1, 2 or 3 lists of suggestions" yield self.oMainDic.suggest(sWord, nSuggLimit) if self.oExtendedDic: yield self.oExtendedDic.suggest(sWord, nSuggLimit) if self.oCommunityDic: yield self.oCommunityDic.suggest(sWord, nSuggLimit) if self.oPersonalDic: yield self.oPersonalDic.suggest(sWord, nSuggLimit) def select (self, sPattern=""): "generator: returns all entries which morphology fits <sPattern>" yield from self.oMainDic.select(sPattern) if self.oExtendedDic: yield from self.oExtendedDic.select(sPattern) if self.oCommunityDic: yield from self.oCommunityDic.select(sPattern) if self.oPersonalDic: yield from self.oPersonalDic.select(sPattern) def drawPath (self, sWord): self.oMainDic.drawPath(sWord) if self.oExtendedDic: print("-----") self.oExtendedDic.drawPath(sWord) if self.oCommunityDic: print("-----") self.oCommunityDic.drawPath(sWord) if self.oPersonalDic: print("-----") self.oPersonalDic.drawPath(sWord) |
Modified make.py from [b9941a6809] to [8e3260b6ad].
︙ | ︙ | |||
303 304 305 306 307 308 309 | dVars[sf[:-3]] = open("js_extension/"+sf, "r", encoding="utf-8").read() for sf in os.listdir("graphspell-js"): if not os.path.isdir("graphspell-js/"+sf): file_util.copy_file("graphspell-js/"+sf, "grammalecte-js/graphspell") helpers.copyAndFileTemplate("graphspell-js/"+sf, "grammalecte-js/graphspell/"+sf, dVars) | | > > > > | 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 | dVars[sf[:-3]] = open("js_extension/"+sf, "r", encoding="utf-8").read() for sf in os.listdir("graphspell-js"): if not os.path.isdir("graphspell-js/"+sf): file_util.copy_file("graphspell-js/"+sf, "grammalecte-js/graphspell") helpers.copyAndFileTemplate("graphspell-js/"+sf, "grammalecte-js/graphspell/"+sf, dVars) def copyGraphspellDictionaries (dVars, bJavaScript=False, bExtendedDict=False, bCommunityDict=False, bPersonalDict=False): dVars["dic_main_filename_py"] = "" dVars["dic_main_filename_js"] = "" dVars["dic_extended_filename_py"] = "" dVars["dic_extended_filename_js"] = "" dVars["dic_community_filename_py"] = "" dVars["dic_community_filename_js"] = "" dVars["dic_personal_filename_py"] = "" dVars["dic_personal_filename_js"] = "" lDict = [ ("main", dVars['dic_filename']) ] if bExtendedDict: lDict.append(("extended", dVars['dic_extended_filename'])) if bCommunityDict: lDict.append(("community", dVars['dic_community_filename'])) if bPersonalDict: lDict.append(("personal", dVars['dic_personal_filename'])) for sType, sFileName in lDict: spfPyDic = "graphspell/_dictionaries/" + sFileName + ".bdic" spfJSDic = "graphspell-js/_dictionaries/" + sFileName + ".json" if not os.path.isfile(spfPyDic) or (bJavaScript and not os.path.isfile(spfJSDic)): buildDictionary(dVars, sType, bJavaScript) |
︙ | ︙ | |||
337 338 339 340 341 342 343 344 345 346 347 348 349 350 | spfLexSrc = dVars['lexicon_src'] sfDictDst = dVars['dic_filename'] sDicName = dVars['dic_name'] elif sType == "extended": spfLexSrc = dVars['lexicon_extended_src'] sfDictDst = dVars['dic_extended_filename'] sDicName = dVars['dic_extended_name'] elif sType == "personal": spfLexSrc = dVars['lexicon_personal_src'] sfDictDst = dVars['dic_personal_filename'] sDicName = dVars['dic_personal_name'] lex_build.build(spfLexSrc, dVars['lang'], dVars['lang_name'], sfDictDst, bJavaScript, sDicName, dVars['stemming_method'], int(dVars['fsa_method'])) | > > > > | 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 | spfLexSrc = dVars['lexicon_src'] sfDictDst = dVars['dic_filename'] sDicName = dVars['dic_name'] elif sType == "extended": spfLexSrc = dVars['lexicon_extended_src'] sfDictDst = dVars['dic_extended_filename'] sDicName = dVars['dic_extended_name'] elif sType == "community": spfLexSrc = dVars['lexicon_community_src'] sfDictDst = dVars['dic_community_filename'] sDicName = dVars['dic_community_name'] elif sType == "personal": spfLexSrc = dVars['lexicon_personal_src'] sfDictDst = dVars['dic_personal_filename'] sDicName = dVars['dic_personal_name'] lex_build.build(spfLexSrc, dVars['lang'], dVars['lang_name'], sfDictDst, bJavaScript, sDicName, dVars['stemming_method'], int(dVars['fsa_method'])) |
︙ | ︙ | |||
358 359 360 361 362 363 364 365 366 367 368 369 370 371 | xParser.add_argument("-ba", "--build_data_after", help="launch build_data.py (only part 2: before dictionary building)", action="store_true") xParser.add_argument("-d", "--dict", help="generate FSA dictionary", action="store_true") xParser.add_argument("-t", "--tests", help="run unit tests", action="store_true") xParser.add_argument("-p", "--perf", help="run performance tests", action="store_true") xParser.add_argument("-pm", "--perf_memo", help="run performance tests and store results in perf_memo.txt", action="store_true") xParser.add_argument("-js", "--javascript", help="JavaScript build for Firefox", action="store_true") xParser.add_argument("-aed", "--add_extended_dictionary", help="add extended dictionary to the build", action="store_true") xParser.add_argument("-apd", "--add_personal_dictionary", help="add personal dictionary to the build", action="store_true") xParser.add_argument("-fx", "--firefox", help="Launch Firefox Developper for WebExtension testing", action="store_true") xParser.add_argument("-we", "--web_ext", help="Launch Firefox Nightly for WebExtension testing", action="store_true") xParser.add_argument("-tb", "--thunderbird", help="Launch Thunderbird", action="store_true") xParser.add_argument("-i", "--install", help="install the extension in Writer (path of unopkg must be set in config.ini)", action="store_true") xArgs = xParser.parse_args() | > | 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 | xParser.add_argument("-ba", "--build_data_after", help="launch build_data.py (only part 2: before dictionary building)", action="store_true") xParser.add_argument("-d", "--dict", help="generate FSA dictionary", action="store_true") xParser.add_argument("-t", "--tests", help="run unit tests", action="store_true") xParser.add_argument("-p", "--perf", help="run performance tests", action="store_true") xParser.add_argument("-pm", "--perf_memo", help="run performance tests and store results in perf_memo.txt", action="store_true") xParser.add_argument("-js", "--javascript", help="JavaScript build for Firefox", action="store_true") xParser.add_argument("-aed", "--add_extended_dictionary", help="add extended dictionary to the build", action="store_true") xParser.add_argument("-acd", "--add_community_dictionary", help="add community dictionary to the build", action="store_true") xParser.add_argument("-apd", "--add_personal_dictionary", help="add personal dictionary to the build", action="store_true") xParser.add_argument("-fx", "--firefox", help="Launch Firefox Developper for WebExtension testing", action="store_true") xParser.add_argument("-we", "--web_ext", help="Launch Firefox Nightly for WebExtension testing", action="store_true") xParser.add_argument("-tb", "--thunderbird", help="Launch Thunderbird", action="store_true") xParser.add_argument("-i", "--install", help="install the extension in Writer (path of unopkg must be set in config.ini)", action="store_true") xArgs = xParser.parse_args() |
︙ | ︙ | |||
383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 | for sLang in xArgs.lang: if os.path.exists("gc_lang/"+sLang) and os.path.isdir("gc_lang/"+sLang): xConfig = getConfig(sLang) dVars = xConfig._sections['args'] if not dVars["lexicon_extended_src"]: xArgs.add_extended_dictionary = False if not dVars["lexicon_personal_src"]: xArgs.add_personal_dictionary = False # build data build_data_module = None if xArgs.build_data_before or xArgs.build_data_after: # lang data try: build_data_module = importlib.import_module("gc_lang."+sLang+".build_data") except ImportError: print("# Error. Couldn’t import file build_data.py in folder gc_lang/"+sLang) if build_data_module and xArgs.build_data_before: build_data_module.before('gc_lang/'+sLang, dVars, xArgs.javascript) if xArgs.dict: buildDictionary(dVars, "main", xArgs.javascript) if xArgs.add_extended_dictionary: buildDictionary(dVars, "extended", xArgs.javascript) if xArgs.add_personal_dictionary: buildDictionary(dVars, "personal", xArgs.javascript) if build_data_module and xArgs.build_data_after: build_data_module.after('gc_lang/'+sLang, dVars, xArgs.javascript) # copy dictionaries from Graphspell | > > > > | | 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 | for sLang in xArgs.lang: if os.path.exists("gc_lang/"+sLang) and os.path.isdir("gc_lang/"+sLang): xConfig = getConfig(sLang) dVars = xConfig._sections['args'] if not dVars["lexicon_extended_src"]: xArgs.add_extended_dictionary = False if not dVars["lexicon_community_src"]: xArgs.add_community_dictionary = False if not dVars["lexicon_personal_src"]: xArgs.add_personal_dictionary = False # build data build_data_module = None if xArgs.build_data_before or xArgs.build_data_after: # lang data try: build_data_module = importlib.import_module("gc_lang."+sLang+".build_data") except ImportError: print("# Error. Couldn’t import file build_data.py in folder gc_lang/"+sLang) if build_data_module and xArgs.build_data_before: build_data_module.before('gc_lang/'+sLang, dVars, xArgs.javascript) if xArgs.dict: buildDictionary(dVars, "main", xArgs.javascript) if xArgs.add_extended_dictionary: buildDictionary(dVars, "extended", xArgs.javascript) if xArgs.add_community_dictionary: buildDictionary(dVars, "community", xArgs.javascript) if xArgs.add_personal_dictionary: buildDictionary(dVars, "personal", xArgs.javascript) if build_data_module and xArgs.build_data_after: build_data_module.after('gc_lang/'+sLang, dVars, xArgs.javascript) # copy dictionaries from Graphspell copyGraphspellDictionaries(dVars, xArgs.javascript, xArgs.add_extended_dictionary, xArgs.add_community_dictionary, xArgs.add_personal_dictionary) # make sVersion = create(sLang, xConfig, xArgs.install, xArgs.javascript, ) # tests if xArgs.tests or xArgs.perf or xArgs.perf_memo: print("> Running tests") |
︙ | ︙ |