Grammalecte  Diff

Differences From Artifact [129eee4726]:

To Artifact [e414bb30c0]:


31
32
33
34
35
36
37
38

39
40
41
42
43
44
45
31
32
33
34
35
36
37

38
39
40
41
42
43
44
45







-
+








//console.log("[Worker] GC Engine Worker [start]");
//console.log(self);

importScripts("grammalecte/graphspell/helpers.js");
importScripts("grammalecte/graphspell/str_transform.js");
importScripts("grammalecte/graphspell/char_player.js");
importScripts("grammalecte/graphspell/suggest.js");
importScripts("grammalecte/graphspell/lexgraph_fr.js");
importScripts("grammalecte/graphspell/ibdawg.js");
importScripts("grammalecte/graphspell/spellchecker.js");
importScripts("grammalecte/text.js");
importScripts("grammalecte/graphspell/tokenizer.js");
importScripts("grammalecte/fr/conj.js");
importScripts("grammalecte/fr/mfsp.js");
importScripts("grammalecte/fr/phonet.js");
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
151
152
153
154
155
156
157

158
159
160
161
162
163
164







-










let bInitDone = false;

let oSpellChecker = null;
let oTokenizer = null;
let oLxg = null;
let oTest = null;
let oLocution = null;


/*
    Technical note:
    This worker don’t work as a PromiseWorker (which returns a promise),  so when we send request
178
179
180
181
182
183
184
185

186
187
188
189
190
191
192
177
178
179
180
181
182
183

184
185
186
187
188
189
190
191







-
+







            mfsp.init(helpers.loadFile(sExtensionPath + "/grammalecte/fr/mfsp_data.json"));
            //console.log("[Worker] Modules have been initialized…");
            gc_engine.load(sContext, "aHSL", sExtensionPath+"grammalecte/graphspell/_dictionaries");
            oSpellChecker = gc_engine.getSpellChecker();
            oTest = new TestGrammarChecking(gc_engine, sExtensionPath+"/grammalecte/fr/tests_data.json");
            oTokenizer = new Tokenizer("fr");
            oLocution =  helpers.loadFile(sExtensionPath + "/grammalecte/fr/locutions_data.json");
            oLxg = new Lexicographe(oSpellChecker, oTokenizer, oLocution);
            lexgraph_fr.load(oSpellChecker, oTokenizer, oLocution);
            if (dOptions !== null) {
                if (!(dOptions instanceof Map)) {
                    dOptions = helpers.objectToMap(dOptions);
                }
                gc_options.setOptions(dOptions);
            }
            //tests();
247
248
249
250
251
252
253
254

255
256
257
258
259
260
261
246
247
248
249
250
251
252

253
254
255
256
257
258
259
260







-
+








function getListOfTokens (sText, oInfo={}) {
    // lexicographer
    try {
        sText = sText.replace(/­/g, "").normalize("NFC");
        for (let sParagraph of text.getParagraph(sText)) {
            if (sParagraph.trim() !== "") {
                postMessage(createResponse("getListOfTokens", oLxg.getListOfTokensReduc(sParagraph, true), oInfo, false));
                postMessage(createResponse("getListOfTokens", lexgraph_fr.getListOfTokensReduc(sParagraph, true), oInfo, false));
            }
        }
        postMessage(createResponse("getListOfTokens", null, oInfo, true));
    }
    catch (e) {
        console.error(e);
        postMessage(createResponse("getListOfTokens", createErrorResult(e, "no tokens"), oInfo, true, true));