add in ability to concat ints
[soc.git] / src / soc / decoder / pseudo / parser.py
1 # Based on GardenSnake - a parser generator demonstration program
2 # GardenSnake was released into the Public Domain by Andrew Dalke.
3
4 # Portions of this work are derived from Python's Grammar definition
5 # and may be covered under the Python copyright and license
6 #
7 # Andrew Dalke / Dalke Scientific Software, LLC
8 # 30 August 2006 / Cape Town, South Africa
9
10 # Modifications for inclusion in PLY distribution
11 from pprint import pprint
12 from ply import lex, yacc
13 import astor
14
15 from soc.decoder.power_decoder import create_pdecode
16 from soc.decoder.pseudo.lexer import IndentLexer
17
18 # I use the Python AST
19 #from compiler import ast
20 import ast
21
22 # Helper function
23 def Assign(left, right):
24 names = []
25 if isinstance(left, ast.Name):
26 # Single assignment on left
27 # XXX when doing IntClass, which will have an "eq" function,
28 # this is how to access it
29 # eq = ast.Attribute(left, "eq") # get eq fn
30 # return ast.Call(eq, [right], []) # now call left.eq(right)
31 return ast.Assign([ast.Name(left.id, ast.Store())], right)
32 elif isinstance(left, ast.Tuple):
33 # List of things - make sure they are Name nodes
34 names = []
35 for child in left.getChildren():
36 if not isinstance(child, ast.Name):
37 raise SyntaxError("that assignment not supported")
38 names.append(child.name)
39 ass_list = [ast.AssName(name, 'OP_ASSIGN') for name in names]
40 return ast.Assign([ast.AssTuple(ass_list)], right)
41 else:
42 raise SyntaxError("Can't do that yet")
43
44
45 ## I implemented INDENT / DEDENT generation as a post-processing filter
46
47 # The original lex token stream contains WS and NEWLINE characters.
48 # WS will only occur before any other tokens on a line.
49
50 # I have three filters. One tags tokens by adding two attributes.
51 # "must_indent" is True if the token must be indented from the
52 # previous code. The other is "at_line_start" which is True for WS
53 # and the first non-WS/non-NEWLINE on a line. It flags the check so
54 # see if the new line has changed indication level.
55
56
57 ## No using Python's approach because Ply supports precedence
58
59 # comparison: expr (comp_op expr)*
60 # arith_expr: term (('+'|'-') term)*
61 # term: factor (('*'|'/'|'%'|'//') factor)*
62 # factor: ('+'|'-'|'~') factor | power
63 # comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
64
65 def make_lt_compare(arg):
66 (left, right) = arg
67 return ast.Compare(left, [ast.Lt()], [right])
68 def make_gt_compare(arg):
69 (left, right) = arg
70 return ast.Compare(left, [ast.Gt()], [right])
71 def make_eq_compare(arg):
72 (left, right) = arg
73 return ast.Compare(left, [ast.Eq()], [right])
74
75 binary_ops = {
76 "+": ast.Add(),
77 "-": ast.Sub(),
78 "*": ast.Mult(),
79 "/": ast.Div(),
80 "<": make_lt_compare,
81 ">": make_gt_compare,
82 "=": make_eq_compare,
83 }
84 unary_ops = {
85 "+": ast.Add,
86 "-": ast.Sub,
87 }
88
89 def check_concat(node): # checks if the comparison is already a concat
90 print (node)
91 if not isinstance(node, ast.Call):
92 return [node]
93 print (node.func.id)
94 if node.func.id != 'concat':
95 return [node]
96 return node[1]
97
98
99 ########## Parser (tokens -> AST) ######
100
101 # also part of Ply
102 #import yacc
103
104 class PowerParser:
105
106 precedence = (
107 ("left", "EQ", "GT", "LT"),
108 ("left", "PLUS", "MINUS"),
109 ("left", "MULT", "DIV"),
110 )
111
112 def __init__(self):
113 self.gprs = {}
114 for rname in ['RA', 'RB', 'RC', 'RT', 'RS']:
115 self.gprs[rname] = None
116 self.read_regs = []
117 self.write_regs = []
118
119 # The grammar comments come from Python's Grammar/Grammar file
120
121 ## NB: compound_stmt in single_input is followed by extra NEWLINE!
122 # file_input: (NEWLINE | stmt)* ENDMARKER
123
124 def p_file_input_end(self, p):
125 """file_input_end : file_input ENDMARKER"""
126 print ("end", p[1])
127 p[0] = p[1]
128
129 def p_file_input(self, p):
130 """file_input : file_input NEWLINE
131 | file_input stmt
132 | NEWLINE
133 | stmt"""
134 if isinstance(p[len(p)-1], str):
135 if len(p) == 3:
136 p[0] = p[1]
137 else:
138 p[0] = [] # p == 2 --> only a blank line
139 else:
140 if len(p) == 3:
141 p[0] = p[1] + p[2]
142 else:
143 p[0] = p[1]
144
145
146 # funcdef: [decorators] 'def' NAME parameters ':' suite
147 # ignoring decorators
148 def p_funcdef(self, p):
149 "funcdef : DEF NAME parameters COLON suite"
150 p[0] = ast.FunctionDef(p[2], p[3], p[5], ())
151
152 # parameters: '(' [varargslist] ')'
153 def p_parameters(self, p):
154 """parameters : LPAR RPAR
155 | LPAR varargslist RPAR"""
156 if len(p) == 3:
157 args=[]
158 else:
159 args = p[2]
160 p[0] = ast.arguments(args=args, vararg=None, kwarg=None, defaults=[])
161
162
163 # varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] |
164 # '**' NAME) |
165 # highly simplified
166 def p_varargslist(self, p):
167 """varargslist : varargslist COMMA NAME
168 | NAME"""
169 if len(p) == 4:
170 p[0] = p[1] + p[3]
171 else:
172 p[0] = [p[1]]
173
174 # stmt: simple_stmt | compound_stmt
175 def p_stmt_simple(self, p):
176 """stmt : simple_stmt"""
177 # simple_stmt is a list
178 p[0] = p[1]
179
180 def p_stmt_compound(self, p):
181 """stmt : compound_stmt"""
182 p[0] = [p[1]]
183
184 # simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
185 def p_simple_stmt(self, p):
186 """simple_stmt : small_stmts NEWLINE
187 | small_stmts SEMICOLON NEWLINE"""
188 p[0] = p[1]
189
190 def p_small_stmts(self, p):
191 """small_stmts : small_stmts SEMICOLON small_stmt
192 | small_stmt"""
193 if len(p) == 4:
194 p[0] = p[1] + [p[3]]
195 else:
196 p[0] = [p[1]]
197
198 # small_stmt: expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt |
199 # import_stmt | global_stmt | exec_stmt | assert_stmt
200 def p_small_stmt(self, p):
201 """small_stmt : flow_stmt
202 | break_stmt
203 | expr_stmt"""
204 if isinstance(p[1], ast.Call):
205 p[0] = ast.Expr(p[1])
206 else:
207 p[0] = p[1]
208
209 # expr_stmt: testlist (augassign (yield_expr|testlist) |
210 # ('=' (yield_expr|testlist))*)
211 # augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
212 # '<<=' | '>>=' | '**=' | '//=')
213 def p_expr_stmt(self, p):
214 """expr_stmt : testlist ASSIGN testlist
215 | testlist """
216 if len(p) == 2:
217 # a list of expressions
218 #p[0] = ast.Discard(p[1])
219 p[0] = p[1]
220 else:
221 if p[1].id in self.gprs:
222 self.write_regs.append(p[1].id) # add to list of regs to write
223 p[0] = Assign(p[1], p[3])
224
225 def p_flow_stmt(self, p):
226 "flow_stmt : return_stmt"
227 p[0] = p[1]
228
229 # return_stmt: 'return' [testlist]
230 def p_return_stmt(self, p):
231 "return_stmt : RETURN testlist"
232 p[0] = ast.Return(p[2])
233
234
235 def p_compound_stmt(self, p):
236 """compound_stmt : if_stmt
237 | while_stmt
238 | for_stmt
239 | funcdef
240 """
241 p[0] = p[1]
242
243 def p_break_stmt(self, p):
244 """break_stmt : BREAK
245 """
246 p[0] = ast.Break()
247
248 def p_for_stmt(self, p):
249 """for_stmt : FOR test EQ test TO test COLON suite
250 """
251 p[0] = ast.While(p[2], p[4], [])
252 # auto-add-one (sigh) due to python range
253 start = p[4]
254 end = ast.BinOp(p[6], ast.Add(), ast.Constant(1))
255 it = ast.Call(ast.Name("range"), [start, end], [])
256 p[0] = ast.For(p[2], it, p[8], [])
257
258 def p_while_stmt(self, p):
259 """while_stmt : DO WHILE test COLON suite ELSE COLON suite
260 | DO WHILE test COLON suite
261 """
262 if len(p) == 6:
263 p[0] = ast.While(p[3], p[5], [])
264 else:
265 p[0] = ast.While(p[3], p[5], p[8])
266
267 def p_if_stmt(self, p):
268 """if_stmt : IF test COLON suite ELSE COLON suite
269 | IF test COLON suite
270 """
271 if len(p) == 5:
272 p[0] = ast.If(p[2], p[4], [])
273 else:
274 p[0] = ast.If(p[2], p[4], p[7])
275
276 def p_suite(self, p):
277 """suite : simple_stmt
278 | NEWLINE INDENT stmts DEDENT"""
279 if len(p) == 2:
280 p[0] = p[1]
281 else:
282 p[0] = p[3]
283
284
285 def p_stmts(self, p):
286 """stmts : stmts stmt
287 | stmt"""
288 if len(p) == 3:
289 p[0] = p[1] + p[2]
290 else:
291 p[0] = p[1]
292
293 def p_comparison(self, p):
294 """comparison : comparison PLUS comparison
295 | comparison MINUS comparison
296 | comparison MULT comparison
297 | comparison DIV comparison
298 | comparison LT comparison
299 | comparison EQ comparison
300 | comparison GT comparison
301 | PLUS comparison
302 | MINUS comparison
303 | comparison APPEND comparison
304 | power"""
305 if len(p) == 4:
306 print (list(p))
307 if p[2] == '||':
308 l = check_concat(p[1]) + check_concat(p[3])
309 p[0] = ast.Call(ast.Name("concat"), l, [])
310 elif p[2] in ['<', '>', '=']:
311 p[0] = binary_ops[p[2]]((p[1],p[3]))
312 else:
313 p[0] = ast.BinOp(p[1], binary_ops[p[2]], p[3])
314 elif len(p) == 3:
315 p[0] = unary_ops[p[1]](p[2])
316 else:
317 p[0] = p[1]
318
319 # power: atom trailer* ['**' factor]
320 # trailers enables function calls (and subscripts).
321 # I only allow one level of calls
322 # so this is 'trailer'
323 def p_power(self, p):
324 """power : atom
325 | atom trailer"""
326 if len(p) == 2:
327 p[0] = p[1]
328 else:
329 if p[2][0] == "CALL":
330 #p[0] = ast.Expr(ast.Call(p[1], p[2][1], []))
331 p[0] = ast.Call(p[1], p[2][1], [])
332 #if p[1].id == 'print':
333 # p[0] = ast.Printnl(ast.Tuple(p[2][1]), None, None)
334 #else:
335 # p[0] = ast.CallFunc(p[1], p[2][1], None, None)
336 else:
337 print (p[2][1])
338 #raise AssertionError("not implemented %s" % p[2][0])
339 subs = p[2][1]
340 if len(subs) == 1:
341 idx = subs[0]
342 else:
343 idx = ast.Slice(subs[0], subs[1], None)
344 p[0] = ast.Subscript(p[1], idx)
345
346 def p_atom_name(self, p):
347 """atom : NAME"""
348 p[0] = ast.Name(p[1], ctx=ast.Load())
349
350 def p_atom_number(self, p):
351 """atom : BINARY
352 | NUMBER
353 | STRING"""
354 p[0] = ast.Constant(p[1])
355
356 #'[' [listmaker] ']' |
357
358 def p_atom_listmaker(self, p):
359 """atom : LBRACK listmaker RBRACK"""
360 p[0] = p[2]
361
362 def p_listmaker(self, p):
363 """listmaker : test COMMA listmaker
364 | test
365 """
366 if len(p) == 2:
367 p[0] = ast.List([p[1]])
368 else:
369 p[0] = ast.List([p[1]] + p[3].nodes)
370
371 def p_atom_tuple(self, p):
372 """atom : LPAR testlist RPAR"""
373 print ("tuple", p[2])
374 if isinstance(p[2], ast.Name):
375 print ("tuple name", p[2].id)
376 if p[2].id in self.gprs:
377 self.read_regs.append(p[2].id) # add to list of regs to read
378 #p[0] = ast.Subscript(ast.Name("GPR"), ast.Str(p[2].id))
379 #return
380 p[0] = p[2]
381
382 # trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
383 def p_trailer(self, p):
384 """trailer : trailer_arglist
385 | trailer_subscript
386 """
387 p[0] = p[1]
388
389 def p_trailer_arglist(self, p):
390 "trailer_arglist : LPAR arglist RPAR"
391 p[0] = ("CALL", p[2])
392
393 def p_trailer_subscript(self, p):
394 "trailer_subscript : LBRACK subscript RBRACK"
395 p[0] = ("SUBS", p[2])
396
397 #subscript: '.' '.' '.' | test | [test] ':' [test]
398
399 def p_subscript(self, p):
400 """subscript : test COLON test
401 | test
402 """
403 if len(p) == 4:
404 p[0] = [p[1], p[3]]
405 else:
406 p[0] = [p[1]]
407
408
409 # testlist: test (',' test)* [',']
410 # Contains shift/reduce error
411 def p_testlist(self, p):
412 """testlist : testlist_multi COMMA
413 | testlist_multi """
414 if len(p) == 2:
415 p[0] = p[1]
416 else:
417 # May need to promote singleton to tuple
418 if isinstance(p[1], list):
419 p[0] = p[1]
420 else:
421 p[0] = [p[1]]
422 # Convert into a tuple?
423 if isinstance(p[0], list):
424 p[0] = ast.Tuple(p[0])
425
426 def p_testlist_multi(self, p):
427 """testlist_multi : testlist_multi COMMA test
428 | test"""
429 if len(p) == 2:
430 # singleton
431 p[0] = p[1]
432 else:
433 if isinstance(p[1], list):
434 p[0] = p[1] + [p[3]]
435 else:
436 # singleton -> tuple
437 p[0] = [p[1], p[3]]
438
439
440 # test: or_test ['if' or_test 'else' test] | lambdef
441 # as I don't support 'and', 'or', and 'not' this works down to 'comparison'
442 def p_test(self, p):
443 "test : comparison"
444 p[0] = p[1]
445
446
447
448 # arglist: (argument ',')* (argument [',']| '*' test [',' '**' test]
449 # | '**' test)
450 # XXX INCOMPLETE: this doesn't allow the trailing comma
451 def p_arglist(self, p):
452 """arglist : arglist COMMA argument
453 | argument"""
454 if len(p) == 4:
455 p[0] = p[1] + [p[3]]
456 else:
457 p[0] = [p[1]]
458
459 # argument: test [gen_for] | test '=' test # Really [keyword '='] test
460 def p_argument(self, p):
461 "argument : test"
462 p[0] = p[1]
463
464 def p_error(self, p):
465 #print "Error!", repr(p)
466 raise SyntaxError(p)
467
468
469 class GardenSnakeParser(PowerParser):
470 def __init__(self, lexer = None):
471 PowerParser.__init__(self)
472 if lexer is None:
473 lexer = IndentLexer(debug=1)
474 self.lexer = lexer
475 self.tokens = lexer.tokens
476 self.parser = yacc.yacc(module=self, start="file_input_end",
477 debug=False, write_tables=False)
478
479 self.sd = create_pdecode()
480
481 def parse(self, code):
482 self.lexer.input(code)
483 result = self.parser.parse(lexer = self.lexer, debug=False)
484 return ast.Module(result)
485
486
487 ###### Code generation ######
488
489 #from compiler import misc, syntax, pycodegen
490
491 class GardenSnakeCompiler(object):
492 def __init__(self):
493 self.parser = GardenSnakeParser()
494 def compile(self, code, mode="exec", filename="<string>"):
495 tree = self.parser.parse(code)
496 print ("snake")
497 pprint(tree)
498 return tree
499 #misc.set_filename(filename, tree)
500 return compile(tree, mode="exec", filename="<string>")
501 #syntax.check(tree)
502 gen = pycodegen.ModuleCodeGenerator(tree)
503 code = gen.getCode()
504 return code
505