957195fb2a62b11e8b2912c54b55dbdabe745712
[soc.git] / src / soc / decoder / pseudo / parser.py
1 # Based on GardenSnake - a parser generator demonstration program
2 # GardenSnake was released into the Public Domain by Andrew Dalke.
3
4 # Portions of this work are derived from Python's Grammar definition
5 # and may be covered under the Python copyright and license
6 #
7 # Andrew Dalke / Dalke Scientific Software, LLC
8 # 30 August 2006 / Cape Town, South Africa
9
10 # Modifications for inclusion in PLY distribution
11 from pprint import pprint
12 from ply import lex, yacc
13 import astor
14
15 from soc.decoder.power_decoder import create_pdecode
16 from soc.decoder.pseudo.lexer import IndentLexer
17 from soc.decoder.orderedset import OrderedSet
18
19 # I use the Python AST
20 #from compiler import ast
21 import ast
22
23 # Helper function
24
25
26 def Assign(autoassign, assignname, left, right, iea_mode):
27 names = []
28 print("Assign", left, right)
29 if isinstance(left, ast.Name):
30 # Single assignment on left
31 # XXX when doing IntClass, which will have an "eq" function,
32 # this is how to access it
33 # eq = ast.Attribute(left, "eq") # get eq fn
34 # return ast.Call(eq, [right], []) # now call left.eq(right)
35 return ast.Assign([ast.Name(left.id, ast.Store())], right)
36 elif isinstance(left, ast.Tuple):
37 # List of things - make sure they are Name nodes
38 names = []
39 for child in left.getChildren():
40 if not isinstance(child, ast.Name):
41 raise SyntaxError("that assignment not supported")
42 names.append(child.name)
43 ass_list = [ast.AssName(name, 'OP_ASSIGN') for name in names]
44 return ast.Assign([ast.AssTuple(ass_list)], right)
45 elif isinstance(left, ast.Subscript):
46 ls = left.slice
47 if (isinstance(ls, ast.Slice) and isinstance(right, ast.Name) and
48 right.id == 'undefined'):
49 # undefined needs to be copied the exact same slice
50 right = ast.Subscript(right, ls, ast.Load())
51 return ast.Assign([left], right)
52 res = ast.Assign([left], right)
53 if autoassign and isinstance(ls, ast.Slice):
54 # hack to create a variable pre-declared based on a slice.
55 # dividend[0:32] = (RA)[0:32] will create
56 # dividend = [0] * 32
57 # dividend[0:32] = (RA)[0:32]
58 # the declaration makes the slice-assignment "work"
59 lower, upper, step = ls.lower, ls.upper, ls.step
60 print ("lower, upper, step", repr(lower), repr(upper), step)
61 if not isinstance(lower, ast.Constant) or \
62 not isinstance(upper, ast.Constant):
63 return res
64 qty = ast.Num(upper.value-lower.value)
65 keywords = [ast.keyword(arg='repeat', value=qty)]
66 l = [ast.Num(0)]
67 right = ast.Call(ast.Name("concat", ast.Load()), l, keywords)
68 declare = ast.Assign([ast.Name(assignname, ast.Store())], right)
69 return [declare, res]
70 return res
71 # XXX HMMM probably not needed...
72 ls = left.slice
73 if isinstance(ls, ast.Slice):
74 lower, upper, step = ls.lower, ls.upper, ls.step
75 print("slice assign", lower, upper, step)
76 if step is None:
77 ls = (lower, upper, None)
78 else:
79 ls = (lower, upper, step)
80 ls = ast.Tuple(ls)
81 return ast.Call(ast.Name("selectassign", ast.Load()),
82 [left.value, ls, right], [])
83 else:
84 print("Assign fail")
85 raise SyntaxError("Can't do that yet")
86
87
88 # I implemented INDENT / DEDENT generation as a post-processing filter
89
90 # The original lex token stream contains WS and NEWLINE characters.
91 # WS will only occur before any other tokens on a line.
92
93 # I have three filters. One tags tokens by adding two attributes.
94 # "must_indent" is True if the token must be indented from the
95 # previous code. The other is "at_line_start" which is True for WS
96 # and the first non-WS/non-NEWLINE on a line. It flags the check so
97 # see if the new line has changed indication level.
98
99
100 # No using Python's approach because Ply supports precedence
101
102 # comparison: expr (comp_op expr)*
103 # arith_expr: term (('+'|'-') term)*
104 # term: factor (('*'|'/'|'%'|'//') factor)*
105 # factor: ('+'|'-'|'~') factor | power
106 # comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
107
108 def make_le_compare(arg):
109 (left, right) = arg
110 return ast.Call(ast.Name("le", ast.Load()), (left, right), [])
111
112
113 def make_ge_compare(arg):
114 (left, right) = arg
115 return ast.Call(ast.Name("ge", ast.Load()), (left, right), [])
116
117
118 def make_lt_compare(arg):
119 (left, right) = arg
120 return ast.Call(ast.Name("lt", ast.Load()), (left, right), [])
121
122
123 def make_gt_compare(arg):
124 (left, right) = arg
125 return ast.Call(ast.Name("gt", ast.Load()), (left, right), [])
126
127
128 def make_eq_compare(arg):
129 (left, right) = arg
130 return ast.Call(ast.Name("eq", ast.Load()), (left, right), [])
131
132
133 def make_ne_compare(arg):
134 (left, right) = arg
135 return ast.Call(ast.Name("ne", ast.Load()), (left, right), [])
136
137
138 binary_ops = {
139 "^": ast.BitXor(),
140 "&": ast.BitAnd(),
141 "|": ast.BitOr(),
142 "+": ast.Add(),
143 "-": ast.Sub(),
144 "*": ast.Mult(),
145 "/": ast.FloorDiv(),
146 "%": ast.Mod(),
147 "<=": make_le_compare,
148 ">=": make_ge_compare,
149 "<": make_lt_compare,
150 ">": make_gt_compare,
151 "=": make_eq_compare,
152 "!=": make_ne_compare,
153 }
154 unary_ops = {
155 "+": ast.UAdd(),
156 "-": ast.USub(),
157 "¬": ast.Invert(),
158 }
159
160
161 def check_concat(node): # checks if the comparison is already a concat
162 print("check concat", node)
163 if not isinstance(node, ast.Call):
164 return [node]
165 print("func", node.func.id)
166 if node.func.id != 'concat':
167 return [node]
168 if node.keywords: # a repeated list-constant, don't optimise
169 return [node]
170 return node.args
171
172
173 # identify SelectableInt pattern [something] * N
174 # must return concat(something, repeat=N)
175 def identify_sint_mul_pattern(p):
176 if p[2] != '*': # multiply
177 return False
178 if not isinstance(p[3], ast.Constant): # rhs = Num
179 return False
180 if not isinstance(p[1], ast.List): # lhs is a list
181 return False
182 l = p[1].elts
183 if len(l) != 1: # lhs is a list of length 1
184 return False
185 return True # yippee!
186
187
188 def apply_trailer(atom, trailer):
189 if trailer[0] == "TLIST":
190 # assume depth of one
191 atom = apply_trailer(atom, trailer[1])
192 trailer = trailer[2]
193 if trailer[0] == "CALL":
194 #p[0] = ast.Expr(ast.Call(p[1], p[2][1], []))
195 return ast.Call(atom, trailer[1], [])
196 # if p[1].id == 'print':
197 # p[0] = ast.Printnl(ast.Tuple(p[2][1]), None, None)
198 # else:
199 # p[0] = ast.CallFunc(p[1], p[2][1], None, None)
200 else:
201 print("subscript atom", trailer[1])
202 #raise AssertionError("not implemented %s" % p[2][0])
203 subs = trailer[1]
204 if len(subs) == 1:
205 idx = subs[0]
206 else:
207 idx = ast.Slice(subs[0], subs[1], None)
208 return ast.Subscript(atom, idx, ast.Load())
209
210 ########## Parser (tokens -> AST) ######
211
212 # also part of Ply
213 #import yacc
214
215 # https://www.mathcs.emory.edu/~valerie/courses/fall10/155/resources/op_precedence.html
216 # python operator precedence
217 # Highest precedence at top, lowest at bottom.
218 # Operators in the same box evaluate left to right.
219 #
220 # Operator Description
221 # () Parentheses (grouping)
222 # f(args...) Function call
223 # x[index:index] Slicing
224 # x[index] Subscription
225 # x.attribute Attribute reference
226 # ** Exponentiation
227 # ~x Bitwise not
228 # +x, -x Positive, negative
229 # *, /, % mul, div, remainder
230 # +, - Addition, subtraction
231 # <<, >> Bitwise shifts
232 # & Bitwise AND
233 # ^ Bitwise XOR
234 # | Bitwise OR
235 # in, not in, is, is not, <, <=, >, >=, <>, !=, == comp, membership, ident
236 # not x Boolean NOT
237 # and Boolean AND
238 # or Boolean OR
239 # lambda Lambda expression
240
241
242 class PowerParser:
243
244 precedence = (
245 ("left", "EQ", "NE", "GT", "LT", "LE", "GE", "LTU", "GTU"),
246 ("left", "BITOR"),
247 ("left", "BITXOR"),
248 ("left", "BITAND"),
249 ("left", "PLUS", "MINUS"),
250 ("left", "MULT", "DIV", "MOD"),
251 ("left", "INVERT"),
252 )
253
254 def __init__(self, form, include_carry_in_write=False):
255 self.include_ca_in_write = include_carry_in_write
256 self.gprs = {}
257 form = self.sd.sigforms[form]
258 print(form)
259 formkeys = form._asdict().keys()
260 self.declared_vars = set()
261 for rname in ['RA', 'RB', 'RC', 'RT', 'RS']:
262 self.gprs[rname] = None
263 self.declared_vars.add(rname)
264 self.available_op_fields = set()
265 for k in formkeys:
266 if k not in self.gprs:
267 if k == 'SPR': # sigh, lower-case to not conflict
268 k = k.lower()
269 self.available_op_fields.add(k)
270 self.op_fields = OrderedSet()
271 self.read_regs = OrderedSet()
272 self.uninit_regs = OrderedSet()
273 self.write_regs = OrderedSet()
274 self.special_regs = OrderedSet() # see p_atom_name
275
276 # The grammar comments come from Python's Grammar/Grammar file
277
278 # NB: compound_stmt in single_input is followed by extra NEWLINE!
279 # file_input: (NEWLINE | stmt)* ENDMARKER
280
281 def p_file_input_end(self, p):
282 """file_input_end : file_input ENDMARKER"""
283 print("end", p[1])
284 p[0] = p[1]
285
286 def p_file_input(self, p):
287 """file_input : file_input NEWLINE
288 | file_input stmt
289 | NEWLINE
290 | stmt"""
291 if isinstance(p[len(p)-1], str):
292 if len(p) == 3:
293 p[0] = p[1]
294 else:
295 p[0] = [] # p == 2 --> only a blank line
296 else:
297 if len(p) == 3:
298 p[0] = p[1] + p[2]
299 else:
300 p[0] = p[1]
301
302 # funcdef: [decorators] 'def' NAME parameters ':' suite
303 # ignoring decorators
304
305 def p_funcdef(self, p):
306 "funcdef : DEF NAME parameters COLON suite"
307 p[0] = ast.FunctionDef(p[2], p[3], p[5], ())
308
309 # parameters: '(' [varargslist] ')'
310 def p_parameters(self, p):
311 """parameters : LPAR RPAR
312 | LPAR varargslist RPAR"""
313 if len(p) == 3:
314 args = []
315 else:
316 args = p[2]
317 p[0] = ast.arguments(args=args, vararg=None, kwarg=None, defaults=[])
318
319 # varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] |
320 # '**' NAME) |
321 # highly simplified
322
323 def p_varargslist(self, p):
324 """varargslist : varargslist COMMA NAME
325 | NAME"""
326 if len(p) == 4:
327 p[0] = p[1] + p[3]
328 else:
329 p[0] = [p[1]]
330
331 # stmt: simple_stmt | compound_stmt
332 def p_stmt_simple(self, p):
333 """stmt : simple_stmt"""
334 # simple_stmt is a list
335 p[0] = p[1]
336
337 def p_stmt_compound(self, p):
338 """stmt : compound_stmt"""
339 p[0] = [p[1]]
340
341 # simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
342 def p_simple_stmt(self, p):
343 """simple_stmt : small_stmts NEWLINE
344 | small_stmts SEMICOLON NEWLINE"""
345 p[0] = p[1]
346
347 def p_small_stmts(self, p):
348 """small_stmts : small_stmts SEMICOLON small_stmt
349 | small_stmt"""
350 if len(p) == 4:
351 p[0] = p[1] + [p[3]]
352 elif isinstance(p[1], list):
353 p[0] = p[1]
354 else:
355 p[0] = [p[1]]
356
357 # small_stmt: expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt |
358 # import_stmt | global_stmt | exec_stmt | assert_stmt
359 def p_small_stmt(self, p):
360 """small_stmt : flow_stmt
361 | break_stmt
362 | expr_stmt"""
363 if isinstance(p[1], ast.Call):
364 p[0] = ast.Expr(p[1])
365 elif isinstance(p[1], ast.Name) and p[1].id == 'TRAP':
366 # TRAP needs to actually be a function
367 name = ast.Name("self", ast.Load())
368 name = ast.Attribute(name, "TRAP", ast.Load())
369 p[0] = ast.Call(name, [], [])
370 else:
371 p[0] = p[1]
372
373 # expr_stmt: testlist (augassign (yield_expr|testlist) |
374 # ('=' (yield_expr|testlist))*)
375 # augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
376 # '<<=' | '>>=' | '**=' | '//=')
377 def p_expr_stmt(self, p):
378 """expr_stmt : testlist ASSIGNEA testlist
379 | testlist ASSIGN testlist
380 | testlist """
381 print("expr_stmt", p)
382 if len(p) == 2:
383 # a list of expressions
384 #p[0] = ast.Discard(p[1])
385 p[0] = p[1]
386 else:
387 iea_mode = p[2] == '<-iea'
388 name = None
389 autoassign = False
390 if isinstance(p[1], ast.Name):
391 name = p[1].id
392 elif isinstance(p[1], ast.Subscript):
393 if isinstance(p[1].value, ast.Name):
394 name = p[1].value.id
395 if name in self.gprs:
396 # add to list of uninitialised
397 self.uninit_regs.add(name)
398 autoassign = name not in self.declared_vars
399 elif isinstance(p[1], ast.Call) and p[1].func.id in ['GPR', 'SPR']:
400 print(astor.dump_tree(p[1]))
401 # replace GPR(x) with GPR[x]
402 idx = p[1].args[0]
403 p[1] = ast.Subscript(p[1].func, idx, ast.Load())
404 elif isinstance(p[1], ast.Call) and p[1].func.id == 'MEM':
405 print("mem assign")
406 print(astor.dump_tree(p[1]))
407 p[1].func.id = "memassign" # change function name to set
408 p[1].args.append(p[3])
409 p[0] = p[1]
410 print("mem rewrite")
411 print(astor.dump_tree(p[0]))
412 return
413 else:
414 print("help, help")
415 print(astor.dump_tree(p[1]))
416 print("expr assign", name, p[1])
417 if name and name in self.gprs:
418 self.write_regs.add(name) # add to list of regs to write
419 p[0] = Assign(autoassign, name, p[1], p[3], iea_mode)
420 if name:
421 self.declared_vars.add(name)
422
423 def p_flow_stmt(self, p):
424 "flow_stmt : return_stmt"
425 p[0] = p[1]
426
427 # return_stmt: 'return' [testlist]
428 def p_return_stmt(self, p):
429 "return_stmt : RETURN testlist"
430 p[0] = ast.Return(p[2])
431
432 def p_compound_stmt(self, p):
433 """compound_stmt : if_stmt
434 | while_stmt
435 | switch_stmt
436 | for_stmt
437 | funcdef
438 """
439 p[0] = p[1]
440
441 def p_break_stmt(self, p):
442 """break_stmt : BREAK
443 """
444 p[0] = ast.Break()
445
446 def p_for_stmt(self, p):
447 """for_stmt : FOR atom EQ test TO test COLON suite
448 | DO atom EQ test TO test COLON suite
449 """
450 # auto-add-one (sigh) due to python range
451 start = p[4]
452 end = ast.BinOp(p[6], ast.Add(), ast.Constant(1))
453 it = ast.Call(ast.Name("range", ast.Load()), [start, end], [])
454 p[0] = ast.For(p[2], it, p[8], [])
455
456 def p_while_stmt(self, p):
457 """while_stmt : DO WHILE test COLON suite ELSE COLON suite
458 | DO WHILE test COLON suite
459 """
460 if len(p) == 6:
461 p[0] = ast.While(p[3], p[5], [])
462 else:
463 p[0] = ast.While(p[3], p[5], p[8])
464
465 def p_switch_smt(self, p):
466 """switch_stmt : SWITCH LPAR atom RPAR COLON NEWLINE INDENT switches DEDENT
467 """
468 switchon = p[3]
469 print("switch stmt")
470 print(astor.dump_tree(p[1]))
471
472 cases = []
473 current_cases = [] # for deferral
474 for (case, suite) in p[8]:
475 print("for", case, suite)
476 if suite is None:
477 for c in case:
478 current_cases.append(ast.Num(c))
479 continue
480 if case == 'default': # last
481 break
482 for c in case:
483 current_cases.append(ast.Num(c))
484 print("cases", current_cases)
485 compare = ast.Compare(switchon, [ast.In()],
486 [ast.List(current_cases, ast.Load())])
487 current_cases = []
488 cases.append((compare, suite))
489
490 print("ended", case, current_cases)
491 if case == 'default':
492 if current_cases:
493 compare = ast.Compare(switchon, [ast.In()],
494 [ast.List(current_cases, ast.Load())])
495 cases.append((compare, suite))
496 cases.append((None, suite))
497
498 cases.reverse()
499 res = []
500 for compare, suite in cases:
501 print("after rev", compare, suite)
502 if compare is None:
503 assert len(res) == 0, "last case should be default"
504 res = suite
505 else:
506 if not isinstance(res, list):
507 res = [res]
508 res = ast.If(compare, suite, res)
509 p[0] = res
510
511 def p_switches(self, p):
512 """switches : switch_list switch_default
513 | switch_default
514 """
515 if len(p) == 3:
516 p[0] = p[1] + [p[2]]
517 else:
518 p[0] = [p[1]]
519
520 def p_switch_list(self, p):
521 """switch_list : switch_case switch_list
522 | switch_case
523 """
524 if len(p) == 3:
525 p[0] = [p[1]] + p[2]
526 else:
527 p[0] = [p[1]]
528
529 def p_switch_case(self, p):
530 """switch_case : CASE LPAR atomlist RPAR COLON suite
531 """
532 # XXX bad hack
533 if isinstance(p[6][0], ast.Name) and p[6][0].id == 'fallthrough':
534 p[6] = None
535 p[0] = (p[3], p[6])
536
537 def p_switch_default(self, p):
538 """switch_default : DEFAULT COLON suite
539 """
540 p[0] = ('default', p[3])
541
542 def p_atomlist(self, p):
543 """atomlist : atom COMMA atomlist
544 | atom
545 """
546 assert isinstance(p[1], ast.Constant), "case must be numbers"
547 if len(p) == 4:
548 p[0] = [p[1].value] + p[3]
549 else:
550 p[0] = [p[1].value]
551
552 def p_if_stmt(self, p):
553 """if_stmt : IF test COLON suite ELSE COLON if_stmt
554 | IF test COLON suite ELSE COLON suite
555 | IF test COLON suite
556 """
557 if len(p) == 8 and isinstance(p[7], ast.If):
558 p[0] = ast.If(p[2], p[4], [p[7]])
559 elif len(p) == 5:
560 p[0] = ast.If(p[2], p[4], [])
561 else:
562 p[0] = ast.If(p[2], p[4], p[7])
563
564 def p_suite(self, p):
565 """suite : simple_stmt
566 | NEWLINE INDENT stmts DEDENT"""
567 if len(p) == 2:
568 p[0] = p[1]
569 else:
570 p[0] = p[3]
571
572 def p_stmts(self, p):
573 """stmts : stmts stmt
574 | stmt"""
575 if len(p) == 3:
576 p[0] = p[1] + p[2]
577 else:
578 p[0] = p[1]
579
580 def p_comparison(self, p):
581 """comparison : comparison PLUS comparison
582 | comparison MINUS comparison
583 | comparison MULT comparison
584 | comparison DIV comparison
585 | comparison MOD comparison
586 | comparison EQ comparison
587 | comparison NE comparison
588 | comparison LE comparison
589 | comparison GE comparison
590 | comparison LTU comparison
591 | comparison GTU comparison
592 | comparison LT comparison
593 | comparison GT comparison
594 | comparison BITOR comparison
595 | comparison BITXOR comparison
596 | comparison BITAND comparison
597 | PLUS comparison
598 | comparison MINUS
599 | INVERT comparison
600 | comparison APPEND comparison
601 | power"""
602 if len(p) == 4:
603 print(list(p))
604 if p[2] == '<u':
605 p[0] = ast.Call(ast.Name("ltu", ast.Load()), (p[1], p[3]), [])
606 elif p[2] == '>u':
607 p[0] = ast.Call(ast.Name("gtu", ast.Load()), (p[1], p[3]), [])
608 elif p[2] == '||':
609 l = check_concat(p[1]) + check_concat(p[3])
610 p[0] = ast.Call(ast.Name("concat", ast.Load()), l, [])
611 elif p[2] in ['/', '%']:
612 # bad hack: if % or / used anywhere other than div/mod ops,
613 # do % or /. however if the argument names are "dividend"
614 # we must call the special trunc_divs and trunc_rems functions
615 l, r = p[1], p[3]
616 # actual call will be "dividend / divisor" - just check
617 # LHS name
618 if isinstance(l, ast.Name) and l.id == 'dividend':
619 if p[2] == '/':
620 fn = 'trunc_divs'
621 else:
622 fn = 'trunc_rems'
623 # return "function trunc_xxx(l, r)"
624 p[0] = ast.Call(ast.Name(fn, ast.Load()), (l, r), [])
625 else:
626 # return "l {binop} r"
627 p[0] = ast.BinOp(p[1], binary_ops[p[2]], p[3])
628 elif p[2] in ['<', '>', '=', '<=', '>=', '!=']:
629 p[0] = binary_ops[p[2]]((p[1], p[3]))
630 elif identify_sint_mul_pattern(p):
631 keywords = [ast.keyword(arg='repeat', value=p[3])]
632 l = p[1].elts
633 p[0] = ast.Call(ast.Name("concat", ast.Load()), l, keywords)
634 else:
635 p[0] = ast.BinOp(p[1], binary_ops[p[2]], p[3])
636 elif len(p) == 3:
637 if isinstance(p[2], str) and p[2] == '-':
638 p[0] = ast.UnaryOp(unary_ops[p[2]], p[1])
639 else:
640 p[0] = ast.UnaryOp(unary_ops[p[1]], p[2])
641 else:
642 p[0] = p[1]
643
644 # power: atom trailer* ['**' factor]
645 # trailers enables function calls (and subscripts).
646 # so this is 'trailerlist'
647 def p_power(self, p):
648 """power : atom
649 | atom trailerlist"""
650 if len(p) == 2:
651 p[0] = p[1]
652 else:
653 print("power dump atom")
654 print(astor.dump_tree(p[1]))
655 print("power dump trailerlist")
656 print(astor.dump_tree(p[2]))
657 p[0] = apply_trailer(p[1], p[2])
658 if isinstance(p[1], ast.Name):
659 name = p[1].id
660 if name in ['RA', 'RS', 'RB', 'RC']:
661 self.read_regs.add(name)
662
663 def p_atom_name(self, p):
664 """atom : NAME"""
665 name = p[1]
666 if name in self.available_op_fields:
667 self.op_fields.add(name)
668 if name == 'overflow':
669 self.write_regs.add(name)
670 if self.include_ca_in_write:
671 if name in ['CA', 'CA32']:
672 self.write_regs.add(name)
673 if name in ['CR', 'LR', 'CTR', 'TAR', 'FPSCR', 'MSR']:
674 self.special_regs.add(name)
675 self.write_regs.add(name) # and add to list to write
676 p[0] = ast.Name(id=name, ctx=ast.Load())
677
678 def p_atom_number(self, p):
679 """atom : BINARY
680 | NUMBER
681 | HEX
682 | STRING"""
683 p[0] = ast.Constant(p[1])
684
685 # '[' [listmaker] ']' |
686
687 def p_atom_listmaker(self, p):
688 """atom : LBRACK listmaker RBRACK"""
689 p[0] = p[2]
690
691 def p_listmaker(self, p):
692 """listmaker : test COMMA listmaker
693 | test
694 """
695 if len(p) == 2:
696 p[0] = ast.List([p[1]], ast.Load())
697 else:
698 p[0] = ast.List([p[1]] + p[3].nodes, ast.Load())
699
700 def p_atom_tuple(self, p):
701 """atom : LPAR testlist RPAR"""
702 print("tuple", p[2])
703 print("astor dump")
704 print(astor.dump_tree(p[2]))
705
706 if isinstance(p[2], ast.Name):
707 name = p[2].id
708 print("tuple name", name)
709 if name in self.gprs:
710 self.read_regs.add(name) # add to list of regs to read
711 #p[0] = ast.Subscript(ast.Name("GPR", ast.Load()), ast.Str(p[2].id))
712 # return
713 p[0] = p[2]
714 elif isinstance(p[2], ast.BinOp):
715 if isinstance(p[2].left, ast.Name) and \
716 isinstance(p[2].right, ast.Constant) and \
717 p[2].right.value == 0 and \
718 p[2].left.id in self.gprs:
719 rid = p[2].left.id
720 self.read_regs.add(rid) # add to list of regs to read
721 # create special call to GPR.getz
722 gprz = ast.Name("GPR", ast.Load())
723 # get testzero function
724 gprz = ast.Attribute(gprz, "getz", ast.Load())
725 # *sigh* see class GPR. we need index itself not reg value
726 ridx = ast.Name("_%s" % rid, ast.Load())
727 p[0] = ast.Call(gprz, [ridx], [])
728 print("tree", astor.dump_tree(p[0]))
729 else:
730 p[0] = p[2]
731 else:
732 p[0] = p[2]
733
734 def p_trailerlist(self, p):
735 """trailerlist : trailer trailerlist
736 | trailer
737 """
738 if len(p) == 2:
739 p[0] = p[1]
740 else:
741 p[0] = ("TLIST", p[1], p[2])
742
743 # trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
744 def p_trailer(self, p):
745 """trailer : trailer_arglist
746 | trailer_subscript
747 """
748 p[0] = p[1]
749
750 def p_trailer_arglist(self, p):
751 "trailer_arglist : LPAR arglist RPAR"
752 p[0] = ("CALL", p[2])
753
754 def p_trailer_subscript(self, p):
755 "trailer_subscript : LBRACK subscript RBRACK"
756 p[0] = ("SUBS", p[2])
757
758 # subscript: '.' '.' '.' | test | [test] ':' [test]
759
760 def p_subscript(self, p):
761 """subscript : test COLON test
762 | test
763 """
764 if len(p) == 4:
765 # add one to end
766 if isinstance(p[3], ast.Constant):
767 end = ast.Constant(p[3].value+1)
768 else:
769 end = ast.BinOp(p[3], ast.Add(), ast.Constant(1))
770 p[0] = [p[1], end]
771 else:
772 p[0] = [p[1]]
773
774 # testlist: test (',' test)* [',']
775 # Contains shift/reduce error
776
777 def p_testlist(self, p):
778 """testlist : testlist_multi COMMA
779 | testlist_multi """
780 if len(p) == 2:
781 p[0] = p[1]
782 else:
783 # May need to promote singleton to tuple
784 if isinstance(p[1], list):
785 p[0] = p[1]
786 else:
787 p[0] = [p[1]]
788 # Convert into a tuple?
789 if isinstance(p[0], list):
790 p[0] = ast.Tuple(p[0])
791
792 def p_testlist_multi(self, p):
793 """testlist_multi : testlist_multi COMMA test
794 | test"""
795 if len(p) == 2:
796 # singleton
797 p[0] = p[1]
798 else:
799 if isinstance(p[1], list):
800 p[0] = p[1] + [p[3]]
801 else:
802 # singleton -> tuple
803 p[0] = [p[1], p[3]]
804
805 # test: or_test ['if' or_test 'else' test] | lambdef
806 # as I don't support 'and', 'or', and 'not' this works down to 'comparison'
807
808 def p_test(self, p):
809 "test : comparison"
810 p[0] = p[1]
811
812 # arglist: (argument ',')* (argument [',']| '*' test [',' '**' test]
813 # | '**' test)
814 # XXX INCOMPLETE: this doesn't allow the trailing comma
815
816 def p_arglist(self, p):
817 """arglist : arglist COMMA argument
818 | argument"""
819 if len(p) == 4:
820 p[0] = p[1] + [p[3]]
821 else:
822 p[0] = [p[1]]
823
824 # argument: test [gen_for] | test '=' test # Really [keyword '='] test
825 def p_argument(self, p):
826 "argument : test"
827 p[0] = p[1]
828
829 def p_error(self, p):
830 # print "Error!", repr(p)
831 raise SyntaxError(p)
832
833
834 class GardenSnakeParser(PowerParser):
835 def __init__(self, lexer=None, debug=False, form=None, incl_carry=False):
836 self.sd = create_pdecode()
837 PowerParser.__init__(self, form, incl_carry)
838 self.debug = debug
839 if lexer is None:
840 lexer = IndentLexer(debug=0)
841 self.lexer = lexer
842 self.tokens = lexer.tokens
843 self.parser = yacc.yacc(module=self, start="file_input_end",
844 debug=debug, write_tables=False)
845
846 def parse(self, code):
847 # self.lexer.input(code)
848 result = self.parser.parse(code, lexer=self.lexer, debug=self.debug)
849 return ast.Module(result)
850
851
852 ###### Code generation ######
853
854 #from compiler import misc, syntax, pycodegen
855
856 class GardenSnakeCompiler(object):
857 def __init__(self, debug=False, form=None, incl_carry=False):
858 self.parser = GardenSnakeParser(debug=debug, form=form,
859 incl_carry=incl_carry)
860
861 def compile(self, code, mode="exec", filename="<string>"):
862 tree = self.parser.parse(code)
863 print("snake")
864 pprint(tree)
865 return tree
866 #misc.set_filename(filename, tree)
867 return compile(tree, mode="exec", filename="<string>")
868 # syntax.check(tree)
869 gen = pycodegen.ModuleCodeGenerator(tree)
870 code = gen.getCode()
871 return code