Overview
Comment: | [build][core] merge actions in key <rules> + code clarification |
---|---|
Downloads: | Tarball | ZIP archive | SQL archive |
Timelines: | family | ancestors | descendants | both | core | build | rg |
Files: | files | file ages | folders |
SHA3-256: |
a59fbc32a0cec714cec8e4a4e786ad15 |
User & Date: | olr on 2018-05-19 14:06:03 |
Other Links: | branch diff | manifest | tags |
Context
2018-05-19
| ||
14:28 | [build][core] graph rule: condition is moved in action check-in: 5d1e6b3f8b user: olr tags: core, build, rg | |
14:06 | [build][core] merge actions in key <rules> + code clarification check-in: a59fbc32a0 user: olr tags: core, build, rg | |
2018-05-18
| ||
13:11 | [graphspell] tokenizer: add token index and avoid punctuations aggregation check-in: be6d99bbdc user: olr tags: graphspell, rg | |
Changes
Modified compile_rules_graph.py from [61c35162ee] to [9991956fdc].
︙ | ︙ | |||
64 65 66 67 68 69 70 | for nAction, sAction in enumerate(sActions.split(" <<- ")): if sAction.strip(): sActionId = sRuleName + "_a" + str(nAction) sCondition, tAction = createAction(sActionId, sAction, nGroup, nPriority, dPos) if tAction: dACTIONS[sActionId] = tAction lResult = list(lToken) | | | 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 | for nAction, sAction in enumerate(sActions.split(" <<- ")): if sAction.strip(): sActionId = sRuleName + "_a" + str(nAction) sCondition, tAction = createAction(sActionId, sAction, nGroup, nPriority, dPos) if tAction: dACTIONS[sActionId] = tAction lResult = list(lToken) lResult.extend(["##"+str(iLine), sRuleName, sCondition, sActionId]) yield lResult def createAction (sIdAction, sAction, nGroup, nPriority, dPos): m = re.search("([-~=])(\\d+|)(:\\d+|)>> ", sAction) if not m: print(" # Error. No action found at: ", sIdAction) |
︙ | ︙ |
Modified datg.py from [7505f580c7] to [c340246af4].
︙ | ︙ | |||
70 71 72 73 74 75 76 | iToken = nCommonPrefix for token in aRule[nCommonPrefix:]: oNextNode = Node() oNode.dArcs[token] = oNextNode self.lUncheckedNodes.append((oNode, token, oNextNode)) if iToken == (len(aRule) - 4): oNode.bFinal = True | < | 70 71 72 73 74 75 76 77 78 79 80 81 82 83 | iToken = nCommonPrefix for token in aRule[nCommonPrefix:]: oNextNode = Node() oNode.dArcs[token] = oNextNode self.lUncheckedNodes.append((oNode, token, oNextNode)) if iToken == (len(aRule) - 4): oNode.bFinal = True iToken += 1 oNode = oNextNode oNode.bFinal = True self.aPreviousRule = aRule def finish (self): "minimize unchecked nodes" |
︙ | ︙ | |||
135 136 137 138 139 140 141 | class Node: NextId = 0 def __init__ (self): self.i = Node.NextId Node.NextId += 1 self.bFinal = False | < < | > > > > > | | < < | 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 | class Node: NextId = 0 def __init__ (self): self.i = Node.NextId Node.NextId += 1 self.bFinal = False self.dArcs = {} # key: arc value; value: a node @classmethod def resetNextId (cls): cls.NextId = 0 def __str__ (self): # Caution! this function is used for hashing and comparison! cFinal = "1" if self.bFinal else "0" l = [cFinal] for (key, oNode) in self.dArcs.items(): l.append(str(key)) l.append(str(oNode.i)) return "_".join(l) def __hash__ (self): # Used as a key in a python dictionary. return self.__str__().__hash__() def __eq__ (self, other): # Used as a key in a python dictionary. # Nodes are equivalent if they have identical arcs, and each identical arc leads to identical states. return self.__str__() == other.__str__() def getNodeAsDict (self): "returns the node as a dictionary structure" dNode = {} dRegex = {} dRules = {} for arc, oNode in self.dArcs.items(): if type(arc) == str and arc.startswith("~"): dRegex[arc[1:]] = oNode.__hash__() elif arc.startswith("##"): dRules[arc[1:]] = oNode.__hash__() else: dNode[arc] = oNode.__hash__() if dRegex: dNode["<regex>"] = dRegex if dRules: dNode["<rules>"] = dRules #if self.bFinal: # dNode["<final>"] = 1 return dNode |