Index: graphspell/tokenizer.py ================================================================== --- graphspell/tokenizer.py +++ graphspell/tokenizer.py @@ -52,11 +52,11 @@ def genTokens (self, sText, bStartEndToken=False): "generator: tokenize " i = 0 if bStartEndToken: - yield { "i": 0, "sType": "INFO", "sValue": "", "nStart": 0, "nEnd": 0 } + yield { "i": 0, "sType": "INFO", "sValue": "", "nStart": 0, "nEnd": 0, "lMorph": [""] } for i, m in enumerate(self.zToken.finditer(sText), 1): yield { "i": i, "sType": m.lastgroup, "sValue": m.group(), "nStart": m.start(), "nEnd": m.end() } if bStartEndToken: iEnd = len(sText) - yield { "i": i+1, "sType": "INFO", "sValue": "", "nStart": iEnd, "nEnd": iEnd } + yield { "i": i+1, "sType": "INFO", "sValue": "", "nStart": iEnd, "nEnd": iEnd, , "lMorph": [""] }