Grammalecte  Check-in [e3fd3fda28]

Overview
Comment:[build] darg builder: optional tokens
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | build | rg
Files: files | file ages | folders
SHA3-256: e3fd3fda284a5ba241c4b854a62e961dc8bc67f3428bf9c258388f23eab955ab
User & Date: olr on 2018-06-18 07:01:32
Other Links: branch diff | manifest | tags
Context
2018-06-18
14:19
[core] gc engine, graph parser: better debugging + fix token positioning while rewriting check-in: d5ac5b5af6 user: olr tags: core, rg
07:01
[build] darg builder: optional tokens check-in: e3fd3fda28 user: olr tags: build, rg
04:58
[build] darg builder: group selection + definition check-in: dc40a55790 user: olr tags: build, rg
Changes

Modified compile_rules_graph.py from [04c9d15dc9] to [1c73a8473c].

35
36
37
38
39
40
41





42
43
44

45
46


47
48
49
50

51
52



53
54

55
56
57
58
59
60









61
62
63
64
65
66




67
68
69
70
71

72


73
74








75
76
77
78
79
80
81
35
36
37
38
39
40
41
42
43
44
45
46
47
48

49
50

51
52
53
54
55

56
57

58
59
60
61
62
63






64
65
66
67
68
69
70
71
72






73
74
75
76
77
78
79
80

81
82
83
84


85
86
87
88
89
90
91
92
93
94
95
96
97
98
99







+
+
+
+
+


-
+

-
+
+



-
+

-
+
+
+


+
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+




-
+

+
+
-
-
+
+
+
+
+
+
+
+









def genTokenLines (sTokenLine, dDef):
    "tokenize a string and return a list of lines of tokens"
    lToken = sTokenLine.split()
    lTokenLines = None
    for i, sToken in enumerate(lToken):
        # optional token?
        bNullPossible = sToken.startswith("?") and sToken.endswith("?")
        if bNullPossible:
            sToken = sToken[1:-1]
        # token with definition?
        if sToken.startswith("({") and sToken.endswith("})") and sToken[1:-1] in dDef:
            sToken = "(" + dDef[sToken[1:-1]] + ")"
        if sToken.startswith("{") and sToken.endswith("}") and sToken in dDef:
        elif sToken.startswith("{") and sToken.endswith("}") and sToken in dDef:
            sToken = dDef[sToken]
        if ( (sToken.startswith("[") and sToken.endswith("]")) or (sToken.startswith("([") and sToken.endswith("])")) ):
        if ( (sToken.startswith("[") and sToken.endswith("]")) or (sToken.startswith("([") and sToken.endswith("])")) or (sToken.startswith("?[") and sToken.endswith("]?")) ):
            # multiple token
            bSelectedGroup = sToken.startswith("(") and sToken.endswith(")")
            if bSelectedGroup:
                sToken = sToken[1:-1]
            # multiple token
            lNewToken = sToken[1:-1].split("|")
            if not lTokenLines:
                lTokenLines = [ [s]  for s  in sToken[1:-1].split("|") ]
                lTokenLines = [ [s]  for s  in lNewToken ]
                if bNullPossible:
                    lTokenLines.extend([ []  for i  in range(len(lNewToken)+1) ])
            else:
                lNewTemp = []
                if bNullPossible:
                for aRule in lTokenLines:
                    lElem = sToken[1:-1].split("|")
                    sElem1 = lElem.pop(0)
                    if bSelectedGroup:
                        sElem1 = "(" + sElem1 + ")"
                    for sElem in lElem:
                    for aRule in lTokenLines:
                        for sElem in lNewToken:
                            aNewRule = list(aRule)
                            aNewRule.append(sElem)
                            lNewTemp.append(aNewRule)
                else:
                    sElem1 = lNewToken.pop(0)
                    for aRule in lTokenLines:
                        for sElem in lNewToken:
                        if bSelectedGroup:
                            sElem = "(" + sElem + ")"
                        aNew = list(aRule)
                        aNew.append(sElem)
                        lNewTemp.append(aNew)
                    aRule.append(sElem1)
                            aNewRule = list(aRule)
                            aNewRule.append("(" + sElem + ")"  if bSelectedGroup  else sElem)
                            lNewTemp.append(aNewRule)
                        aRule.append("(" + sElem1 + ")"  if bSelectedGroup  else sElem1)
                lTokenLines.extend(lNewTemp)
        else:
            # simple token
            if not lTokenLines:
                lTokenLines = [[sToken]]
                lTokenLines = [[sToken], []]  if bNullPossible  else [[sToken]]
            else:
                if bNullPossible:
                    lNewTemp = []
                for aRule in lTokenLines:
                    aRule.append(sToken)
                    for aRule in lTokenLines:
                        lNew = list(aRule)
                        lNew.append(sToken)
                        lNewTemp.append(lNew)
                    lTokenLines.extend(lNewTemp)
                else:
                    for aRule in lTokenLines:
                        aRule.append(sToken)
    for aRule in lTokenLines:
        yield aRule


def createRule (iLine, sRuleName, sTokenLine, iActionBlock, sActions, nPriority, dDef):
    # print(iLine, "//", sRuleName, "//", sTokenLine, "//", sActions, "//", nPriority)
    for lToken in genTokenLines(sTokenLine, dDef):