1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
|
#!python3
# RULE GRAPH BUILDER
#
# by Olivier R.
# License: MPL 2
import json
import time
import traceback
from graphspell.progressbar import ProgressBar
class DARG:
"""DIRECT ACYCLIC RULE GRAPH"""
# This code is inspired from Steve Hanov’s DAWG, 2011. (http://stevehanov.ca/blog/index.php?id=115)
def __init__ (self, lRule, sLangCode):
print("===== Direct Acyclic Rule Graph - Minimal Acyclic Finite State Automaton =====")
# Preparing DARG
print(" > Preparing list of tokens")
self.sLangCode = sLangCode
self.nRule = len(lRule)
self.aPreviousRule = []
Node.resetNextId()
self.oRoot = Node()
self.lUncheckedNodes = [] # list of nodes that have not been checked for duplication.
self.lMinimizedNodes = {} # list of unique nodes that have been checked for duplication.
self.nNode = 0
self.nArc = 0
# build
lRule.sort()
oProgBar = ProgressBar(0, len(lRule))
for aRule in lRule:
self.insert(aRule)
oProgBar.increment(1)
oProgBar.done()
self.finish()
self.countNodes()
self.countArcs()
self.displayInfo()
# BUILD DARG
def insert (self, aRule):
if aRule < self.aPreviousRule:
sys.exit("# Error: tokens must be inserted in order.")
# find common prefix between word and previous word
nCommonPrefix = 0
for i in range(min(len(aRule), len(self.aPreviousRule))):
if aRule[i] != self.aPreviousRule[i]:
break
nCommonPrefix += 1
|
>
|
>
|
<
<
<
<
<
|
>
|
|
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
|
#!python3
"""
RULE GRAPH BUILDER
"""
# by Olivier R.
# License: MPL 2
from graphspell.progressbar import ProgressBar
class DARG:
"""DIRECT ACYCLIC RULE GRAPH"""
# This code is inspired from Steve Hanov’s DAWG, 2011. (http://stevehanov.ca/blog/index.php?id=115)
def __init__ (self, lRule, sLangCode):
print("===== Direct Acyclic Rule Graph - Minimal Acyclic Finite State Automaton =====")
# Preparing DARG
print(" > Preparing list of tokens")
self.sLangCode = sLangCode
self.nRule = len(lRule)
self.aPreviousRule = []
Node.resetNextId()
self.oRoot = Node()
self.lUncheckedNodes = [] # list of nodes that have not been checked for duplication.
self.lMinimizedNodes = {} # list of unique nodes that have been checked for duplication.
self.nNode = 0
self.nArc = 0
# build
lRule.sort()
oProgBar = ProgressBar(0, len(lRule))
for aRule in lRule:
self.insert(aRule)
oProgBar.increment(1)
oProgBar.done()
self.finish()
self.countNodes()
self.countArcs()
self.displayInfo()
# BUILD DARG
def insert (self, aRule):
"insert a new rule (tokens must be inserted in order)"
if aRule < self.aPreviousRule:
exit("# Error: tokens must be inserted in order.")
# find common prefix between word and previous word
nCommonPrefix = 0
for i in range(min(len(aRule), len(self.aPreviousRule))):
if aRule[i] != self.aPreviousRule[i]:
break
nCommonPrefix += 1
|
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
|
oNode = self.lUncheckedNodes[-1][2]
iToken = nCommonPrefix
for sToken in aRule[nCommonPrefix:]:
oNextNode = Node()
oNode.dArcs[sToken] = oNextNode
self.lUncheckedNodes.append((oNode, sToken, oNextNode))
if iToken == (len(aRule) - 2):
oNode.bFinal = True
iToken += 1
oNode = oNextNode
oNode.bFinal = True
self.aPreviousRule = aRule
def finish (self):
|
|
|
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
|
oNode = self.lUncheckedNodes[-1][2]
iToken = nCommonPrefix
for sToken in aRule[nCommonPrefix:]:
oNextNode = Node()
oNode.dArcs[sToken] = oNextNode
self.lUncheckedNodes.append((oNode, sToken, oNextNode))
if iToken == (len(aRule) - 2):
oNode.bFinal = True
iToken += 1
oNode = oNextNode
oNode.bFinal = True
self.aPreviousRule = aRule
def finish (self):
|
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
|
oNode.dArcs[sToken] = self.lMinimizedNodes[oChildNode]
else:
# add the state to the minimized nodes.
self.lMinimizedNodes[oChildNode] = oChildNode
self.lUncheckedNodes.pop()
def countNodes (self):
self.nNode = len(self.lMinimizedNodes)
def countArcs (self):
self.nArc = 0
for oNode in self.lMinimizedNodes:
self.nArc += len(oNode.dArcs)
def displayInfo (self):
print(" * {:<12} {:>16,}".format("Rules:", self.nRule))
print(" * {:<12} {:>16,}".format("Nodes:", self.nNode))
print(" * {:<12} {:>16,}".format("Arcs:", self.nArc))
def createGraph (self):
dGraph = { 0: self.oRoot.getNodeAsDict() }
for oNode in self.lMinimizedNodes:
sHashId = oNode.__hash__()
if sHashId not in dGraph:
dGraph[sHashId] = oNode.getNodeAsDict()
else:
print("Error. Double node… same id: ", sHashId)
print(str(oNode.getNodeAsDict()))
return dGraph
class Node:
NextId = 0
def __init__ (self):
self.i = Node.NextId
Node.NextId += 1
self.bFinal = False
self.dArcs = {} # key: arc value; value: a node
@classmethod
def resetNextId (cls):
cls.NextId = 0
def __str__ (self):
# Caution! this function is used for hashing and comparison!
cFinal = "1" if self.bFinal else "0"
l = [cFinal]
for (key, oNode) in self.dArcs.items():
l.append(str(key))
l.append(str(oNode.i))
return "_".join(l)
def __hash__ (self):
# Used as a key in a python dictionary.
return self.__str__().__hash__()
def __eq__ (self, other):
# Used as a key in a python dictionary.
# Nodes are equivalent if they have identical arcs, and each identical arc leads to identical states.
return self.__str__() == other.__str__()
def getNodeAsDict (self):
"returns the node as a dictionary structure"
dNode = {}
dReValue = {}
dReMorph = {}
dRule = {}
|
>
>
>
>
|
>
>
|
>
|
|
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
|
oNode.dArcs[sToken] = self.lMinimizedNodes[oChildNode]
else:
# add the state to the minimized nodes.
self.lMinimizedNodes[oChildNode] = oChildNode
self.lUncheckedNodes.pop()
def countNodes (self):
"count nodes within the whole graph"
self.nNode = len(self.lMinimizedNodes)
def countArcs (self):
"count arcs within the whole graph"
self.nArc = 0
for oNode in self.lMinimizedNodes:
self.nArc += len(oNode.dArcs)
def displayInfo (self):
"display informations about the rule graph"
print(" * {:<12} {:>16,}".format("Rules:", self.nRule))
print(" * {:<12} {:>16,}".format("Nodes:", self.nNode))
print(" * {:<12} {:>16,}".format("Arcs:", self.nArc))
def createGraph (self):
"create the graph as a dictionary"
dGraph = { 0: self.oRoot.getNodeAsDict() }
for oNode in self.lMinimizedNodes:
sHashId = oNode.__hash__()
if sHashId not in dGraph:
dGraph[sHashId] = oNode.getNodeAsDict()
else:
print("Error. Double node… same id: ", sHashId)
print(str(oNode.getNodeAsDict()))
return dGraph
class Node:
"""Node of the rule graph"""
NextId = 0
def __init__ (self):
self.i = Node.NextId
Node.NextId += 1
self.bFinal = False
self.dArcs = {} # key: arc value; value: a node
@classmethod
def resetNextId (cls):
"reset to 0 the node counter"
cls.NextId = 0
def __str__ (self):
# Caution! this function is used for hashing and comparison!
cFinal = "1" if self.bFinal else "0"
l = [cFinal]
for (key, oNode) in self.dArcs.items():
l.append(str(key))
l.append(str(oNode.i))
return "_".join(l)
def __hash__ (self):
# Used as a key in a python dictionary.
return self.__str__().__hash__()
def __eq__ (self, other):
# Used as a key in a python dictionary.
# Nodes are equivalent if they have identical arcs, and each identical arc leads to identical states.
return self.__str__() == other.__str__()
def getNodeAsDict (self):
"returns the node as a dictionary structure"
dNode = {}
dReValue = {}
dReMorph = {}
dRule = {}
|