288
289
290
291
292
293
294
295
296
297
298
299
300
301
|
self.parseText(self.sSentence, self.sSentence0, False, iStart, sCountry, dOpt, bShowRuleId, bDebug, bContext)
if bFullInfo:
for dToken in self.lTokens0:
if dToken["sType"] == "WORD":
dToken["bValidToken"] = _oSpellChecker.isValidToken(dToken["sValue"])
if "lMorph" not in dToken:
dToken["lMorph"] = _oSpellChecker.getMorph(dToken["sValue"])
lSentences.append({
"nStart": iStart,
"nEnd": iEnd,
"sSentence": self.sSentence0,
"lTokens": self.lTokens0,
"lGrammarErrors": list(self.dSentenceError.values())
})
|
>
|
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
|
self.parseText(self.sSentence, self.sSentence0, False, iStart, sCountry, dOpt, bShowRuleId, bDebug, bContext)
if bFullInfo:
for dToken in self.lTokens0:
if dToken["sType"] == "WORD":
dToken["bValidToken"] = _oSpellChecker.isValidToken(dToken["sValue"])
if "lMorph" not in dToken:
dToken["lMorph"] = _oSpellChecker.getMorph(dToken["sValue"])
_oSpellChecker.setLabelsOnToken(dToken)
lSentences.append({
"nStart": iStart,
"nEnd": iEnd,
"sSentence": self.sSentence0,
"lTokens": self.lTokens0,
"lGrammarErrors": list(self.dSentenceError.values())
})
|