Commit | Line | Data |
---|---|---|
920dae64 AT |
1 | #! /usr/bin/env python |
2 | ||
3 | """Token constants (from "token.h").""" | |
4 | ||
5 | # This file is automatically generated; please don't muck it up! | |
6 | # | |
7 | # To update the symbols in this file, 'cd' to the top directory of | |
8 | # the python source tree after building the interpreter and run: | |
9 | # | |
10 | # python Lib/token.py | |
11 | ||
12 | #--start constants-- | |
13 | ENDMARKER = 0 | |
14 | NAME = 1 | |
15 | NUMBER = 2 | |
16 | STRING = 3 | |
17 | NEWLINE = 4 | |
18 | INDENT = 5 | |
19 | DEDENT = 6 | |
20 | LPAR = 7 | |
21 | RPAR = 8 | |
22 | LSQB = 9 | |
23 | RSQB = 10 | |
24 | COLON = 11 | |
25 | COMMA = 12 | |
26 | SEMI = 13 | |
27 | PLUS = 14 | |
28 | MINUS = 15 | |
29 | STAR = 16 | |
30 | SLASH = 17 | |
31 | VBAR = 18 | |
32 | AMPER = 19 | |
33 | LESS = 20 | |
34 | GREATER = 21 | |
35 | EQUAL = 22 | |
36 | DOT = 23 | |
37 | PERCENT = 24 | |
38 | BACKQUOTE = 25 | |
39 | LBRACE = 26 | |
40 | RBRACE = 27 | |
41 | EQEQUAL = 28 | |
42 | NOTEQUAL = 29 | |
43 | LESSEQUAL = 30 | |
44 | GREATEREQUAL = 31 | |
45 | TILDE = 32 | |
46 | CIRCUMFLEX = 33 | |
47 | LEFTSHIFT = 34 | |
48 | RIGHTSHIFT = 35 | |
49 | DOUBLESTAR = 36 | |
50 | PLUSEQUAL = 37 | |
51 | MINEQUAL = 38 | |
52 | STAREQUAL = 39 | |
53 | SLASHEQUAL = 40 | |
54 | PERCENTEQUAL = 41 | |
55 | AMPEREQUAL = 42 | |
56 | VBAREQUAL = 43 | |
57 | CIRCUMFLEXEQUAL = 44 | |
58 | LEFTSHIFTEQUAL = 45 | |
59 | RIGHTSHIFTEQUAL = 46 | |
60 | DOUBLESTAREQUAL = 47 | |
61 | DOUBLESLASH = 48 | |
62 | DOUBLESLASHEQUAL = 49 | |
63 | AT = 50 | |
64 | OP = 51 | |
65 | ERRORTOKEN = 52 | |
66 | N_TOKENS = 53 | |
67 | NT_OFFSET = 256 | |
68 | #--end constants-- | |
69 | ||
70 | tok_name = {} | |
71 | for _name, _value in globals().items(): | |
72 | if type(_value) is type(0): | |
73 | tok_name[_value] = _name | |
74 | ||
75 | ||
76 | def ISTERMINAL(x): | |
77 | return x < NT_OFFSET | |
78 | ||
79 | def ISNONTERMINAL(x): | |
80 | return x >= NT_OFFSET | |
81 | ||
82 | def ISEOF(x): | |
83 | return x == ENDMARKER | |
84 | ||
85 | ||
86 | def main(): | |
87 | import re | |
88 | import sys | |
89 | args = sys.argv[1:] | |
90 | inFileName = args and args[0] or "Include/token.h" | |
91 | outFileName = "Lib/token.py" | |
92 | if len(args) > 1: | |
93 | outFileName = args[1] | |
94 | try: | |
95 | fp = open(inFileName) | |
96 | except IOError, err: | |
97 | sys.stdout.write("I/O error: %s\n" % str(err)) | |
98 | sys.exit(1) | |
99 | lines = fp.read().split("\n") | |
100 | fp.close() | |
101 | prog = re.compile( | |
102 | "#define[ \t][ \t]*([A-Z0-9][A-Z0-9_]*)[ \t][ \t]*([0-9][0-9]*)", | |
103 | re.IGNORECASE) | |
104 | tokens = {} | |
105 | for line in lines: | |
106 | match = prog.match(line) | |
107 | if match: | |
108 | name, val = match.group(1, 2) | |
109 | val = int(val) | |
110 | tokens[val] = name # reverse so we can sort them... | |
111 | keys = tokens.keys() | |
112 | keys.sort() | |
113 | # load the output skeleton from the target: | |
114 | try: | |
115 | fp = open(outFileName) | |
116 | except IOError, err: | |
117 | sys.stderr.write("I/O error: %s\n" % str(err)) | |
118 | sys.exit(2) | |
119 | format = fp.read().split("\n") | |
120 | fp.close() | |
121 | try: | |
122 | start = format.index("#--start constants--") + 1 | |
123 | end = format.index("#--end constants--") | |
124 | except ValueError: | |
125 | sys.stderr.write("target does not contain format markers") | |
126 | sys.exit(3) | |
127 | lines = [] | |
128 | for val in keys: | |
129 | lines.append("%s = %d" % (tokens[val], val)) | |
130 | format[start:end] = lines | |
131 | try: | |
132 | fp = open(outFileName, 'w') | |
133 | except IOError, err: | |
134 | sys.stderr.write("I/O error: %s\n" % str(err)) | |
135 | sys.exit(4) | |
136 | fp.write("\n".join(format)) | |
137 | fp.close() | |
138 | ||
139 | ||
140 | if __name__ == "__main__": | |
141 | main() |