when calling multi-arg function with regs, add to read list
[openpower-isa.git] / src / openpower / decoder / pseudo / parser.py
1 # Based on GardenSnake - a parser generator demonstration program
2 # GardenSnake was released into the Public Domain by Andrew Dalke.
3
4 # Portions of this work are derived from Python's Grammar definition
5 # and may be covered under the Python copyright and license
6 #
7 # Andrew Dalke / Dalke Scientific Software, LLC
8 # 30 August 2006 / Cape Town, South Africa
9
10 # Modifications for inclusion in PLY distribution
11 from pprint import pprint
12 from ply import lex, yacc
13 import astor
14 from copy import deepcopy
15
16 from openpower.decoder.power_decoder import create_pdecode
17 from openpower.decoder.pseudo.lexer import IndentLexer
18 from openpower.decoder.orderedset import OrderedSet
19
20 # I use the Python AST
21 #from compiler import ast
22 import ast
23
24 # Helper function
25
26 regs = ['RA', 'RS', 'RB', 'RC', 'RT']
27 fregs = ['FRA', 'FRS', 'FRB', 'FRC', 'FRT']
28
29 def Assign(autoassign, assignname, left, right, iea_mode):
30 names = []
31 print("Assign", assignname, left, right)
32 if isinstance(left, ast.Name):
33 # Single assignment on left
34 # XXX when doing IntClass, which will have an "eq" function,
35 # this is how to access it
36 # eq = ast.Attribute(left, "eq") # get eq fn
37 # return ast.Call(eq, [right], []) # now call left.eq(right)
38 return ast.Assign([ast.Name(left.id, ast.Store())], right)
39 elif isinstance(left, ast.Tuple):
40 # List of things - make sure they are Name nodes
41 names = []
42 for child in left.getChildren():
43 if not isinstance(child, ast.Name):
44 raise SyntaxError("that assignment not supported")
45 names.append(child.name)
46 ass_list = [ast.AssName(name, 'OP_ASSIGN') for name in names]
47 return ast.Assign([ast.AssTuple(ass_list)], right)
48 elif isinstance(left, ast.Subscript):
49 ls = left.slice
50 # XXX changing meaning of "undefined" to a function
51 #if (isinstance(ls, ast.Slice) and isinstance(right, ast.Name) and
52 # right.id == 'undefined'):
53 # # undefined needs to be copied the exact same slice
54 # right = ast.Subscript(right, ls, ast.Load())
55 # return ast.Assign([left], right)
56 res = ast.Assign([left], right)
57 if autoassign and isinstance(ls, ast.Slice):
58 # hack to create a variable pre-declared based on a slice.
59 # dividend[0:32] = (RA)[0:32] will create
60 # dividend = [0] * 32
61 # dividend[0:32] = (RA)[0:32]
62 # the declaration makes the slice-assignment "work"
63 lower, upper, step = ls.lower, ls.upper, ls.step
64 print("lower, upper, step", repr(lower), repr(upper), step)
65 if not isinstance(lower, ast.Constant) or \
66 not isinstance(upper, ast.Constant):
67 return res
68 qty = ast.Num(upper.value-lower.value)
69 keywords = [ast.keyword(arg='repeat', value=qty)]
70 l = [ast.Num(0)]
71 right = ast.Call(ast.Name("concat", ast.Load()), l, keywords)
72 declare = ast.Assign([ast.Name(assignname, ast.Store())], right)
73 return [declare, res]
74 return res
75 # XXX HMMM probably not needed...
76 ls = left.slice
77 if isinstance(ls, ast.Slice):
78 lower, upper, step = ls.lower, ls.upper, ls.step
79 print("slice assign", lower, upper, step)
80 if step is None:
81 ls = (lower, upper, None)
82 else:
83 ls = (lower, upper, step)
84 ls = ast.Tuple(ls)
85 return ast.Call(ast.Name("selectassign", ast.Load()),
86 [left.value, ls, right], [])
87 else:
88 print("Assign fail")
89 raise SyntaxError("Can't do that yet")
90
91
92 # I implemented INDENT / DEDENT generation as a post-processing filter
93
94 # The original lex token stream contains WS and NEWLINE characters.
95 # WS will only occur before any other tokens on a line.
96
97 # I have three filters. One tags tokens by adding two attributes.
98 # "must_indent" is True if the token must be indented from the
99 # previous code. The other is "at_line_start" which is True for WS
100 # and the first non-WS/non-NEWLINE on a line. It flags the check so
101 # see if the new line has changed indication level.
102
103
104 # No using Python's approach because Ply supports precedence
105
106 # comparison: expr (comp_op expr)*
107 # arith_expr: term (('+'|'-') term)*
108 # term: factor (('*'|'/'|'%'|'//') factor)*
109 # factor: ('+'|'-'|'~') factor | power
110 # comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
111
112 def make_le_compare(arg):
113 (left, right) = arg
114 return ast.Call(ast.Name("le", ast.Load()), (left, right), [])
115
116
117 def make_ge_compare(arg):
118 (left, right) = arg
119 return ast.Call(ast.Name("ge", ast.Load()), (left, right), [])
120
121
122 def make_lt_compare(arg):
123 (left, right) = arg
124 return ast.Call(ast.Name("lt", ast.Load()), (left, right), [])
125
126
127 def make_gt_compare(arg):
128 (left, right) = arg
129 return ast.Call(ast.Name("gt", ast.Load()), (left, right), [])
130
131
132 def make_eq_compare(arg):
133 (left, right) = arg
134 return ast.Call(ast.Name("eq", ast.Load()), (left, right), [])
135
136
137 def make_ne_compare(arg):
138 (left, right) = arg
139 return ast.Call(ast.Name("ne", ast.Load()), (left, right), [])
140
141
142 binary_ops = {
143 "^": ast.BitXor(),
144 "&": ast.BitAnd(),
145 "|": ast.BitOr(),
146 "+": ast.Add(),
147 "-": ast.Sub(),
148 "*": ast.Mult(),
149 "/": ast.FloorDiv(),
150 "%": ast.Mod(),
151 "<=": make_le_compare,
152 ">=": make_ge_compare,
153 "<": make_lt_compare,
154 ">": make_gt_compare,
155 "=": make_eq_compare,
156 "!=": make_ne_compare,
157 }
158 unary_ops = {
159 "+": ast.UAdd(),
160 "-": ast.USub(),
161 "¬": ast.Invert(),
162 }
163
164
165 def check_concat(node): # checks if the comparison is already a concat
166 print("check concat", node)
167 if not isinstance(node, ast.Call):
168 return [node]
169 print("func", node.func.id)
170 if node.func.id != 'concat':
171 return [node]
172 if node.keywords: # a repeated list-constant, don't optimise
173 return [node]
174 return node.args
175
176
177 # identify SelectableInt pattern [something] * N
178 # must return concat(something, repeat=N)
179 def identify_sint_mul_pattern(p):
180 if p[2] != '*': # multiply
181 return False
182 if not isinstance(p[3], ast.Constant): # rhs = Num
183 return False
184 if not isinstance(p[1], ast.List): # lhs is a list
185 return False
186 l = p[1].elts
187 if len(l) != 1: # lhs is a list of length 1
188 return False
189 return True # yippee!
190
191
192 def apply_trailer(atom, trailer, read_regs):
193 if trailer[0] == "TLIST":
194 # assume depth of one
195 atom = apply_trailer(atom, trailer[1], read_regs)
196 trailer = trailer[2]
197 if trailer[0] == "CALL":
198 #p[0] = ast.Expr(ast.Call(p[1], p[2][1], []))
199 for arg in trailer[1]:
200 if isinstance(arg, ast.Name):
201 name = arg.id
202 if name in regs + fregs:
203 read_regs.add(name)
204 return ast.Call(atom, trailer[1], [])
205 # if p[1].id == 'print':
206 # p[0] = ast.Printnl(ast.Tuple(p[2][1]), None, None)
207 # else:
208 # p[0] = ast.CallFunc(p[1], p[2][1], None, None)
209 else:
210 print("subscript atom", trailer[1])
211 #raise AssertionError("not implemented %s" % p[2][0])
212 subs = trailer[1]
213 if len(subs) == 1:
214 idx = subs[0]
215 if isinstance(idx, ast.Name) and idx.id in regs + fregs:
216 read_regs.add(idx.id)
217 if isinstance(idx, ast.Name) and idx.id in regs:
218 print ("single atom subscript, underscored", idx.id)
219 idx = ast.Name("_%s" % idx.id, ast.Load())
220 else:
221 idx = ast.Slice(subs[0], subs[1], None)
222 # if isinstance(atom, ast.Name) and atom.id == 'CR':
223 # atom.id = 'CR' # bad hack
224 #print ("apply_trailer Subscript", atom.id, idx)
225 return ast.Subscript(atom, idx, ast.Load())
226
227 ########## Parser (tokens -> AST) ######
228
229 # also part of Ply
230 #import yacc
231
232 # https://www.mathcs.emory.edu/~valerie/courses/fall10/155/resources/op_precedence.html
233 # python operator precedence
234 # Highest precedence at top, lowest at bottom.
235 # Operators in the same box evaluate left to right.
236 #
237 # Operator Description
238 # () Parentheses (grouping)
239 # f(args...) Function call
240 # x[index:index] Slicing
241 # x[index] Subscription
242 # x.attribute Attribute reference
243 # ** Exponentiation
244 # ~x Bitwise not
245 # +x, -x Positive, negative
246 # *, /, % mul, div, remainder
247 # +, - Addition, subtraction
248 # <<, >> Bitwise shifts
249 # & Bitwise AND
250 # ^ Bitwise XOR
251 # | Bitwise OR
252 # in, not in, is, is not, <, <=, >, >=, <>, !=, == comp, membership, ident
253 # not x Boolean NOT
254 # and Boolean AND
255 # or Boolean OR
256 # lambda Lambda expression
257
258
259 class PowerParser:
260
261 precedence = (
262 ("left", "EQ", "NE", "GT", "LT", "LE", "GE", "LTU", "GTU"),
263 ("left", "BITOR"),
264 ("left", "BITXOR"),
265 ("left", "BITAND"),
266 ("left", "PLUS", "MINUS"),
267 ("left", "MULT", "DIV", "MOD"),
268 ("left", "INVERT"),
269 )
270
271 def __init__(self, form, include_carry_in_write=False):
272 self.include_ca_in_write = include_carry_in_write
273 self.gprs = {}
274 form = self.sd.sigforms[form]
275 print(form)
276 formkeys = form._asdict().keys()
277 self.declared_vars = set()
278 for rname in regs + fregs:
279 self.gprs[rname] = None
280 self.declared_vars.add(rname)
281 self.available_op_fields = set()
282 for k in formkeys:
283 if k not in self.gprs:
284 if k == 'SPR': # sigh, lower-case to not conflict
285 k = k.lower()
286 self.available_op_fields.add(k)
287 self.op_fields = OrderedSet()
288 self.read_regs = OrderedSet()
289 self.uninit_regs = OrderedSet()
290 self.write_regs = OrderedSet()
291 self.special_regs = OrderedSet() # see p_atom_name
292
293 # The grammar comments come from Python's Grammar/Grammar file
294
295 # NB: compound_stmt in single_input is followed by extra NEWLINE!
296 # file_input: (NEWLINE | stmt)* ENDMARKER
297
298 def p_file_input_end(self, p):
299 """file_input_end : file_input ENDMARKER"""
300 print("end", p[1])
301 p[0] = p[1]
302
303 def p_file_input(self, p):
304 """file_input : file_input NEWLINE
305 | file_input stmt
306 | NEWLINE
307 | stmt"""
308 if isinstance(p[len(p)-1], str):
309 if len(p) == 3:
310 p[0] = p[1]
311 else:
312 p[0] = [] # p == 2 --> only a blank line
313 else:
314 if len(p) == 3:
315 p[0] = p[1] + p[2]
316 else:
317 p[0] = p[1]
318
319 # funcdef: [decorators] 'def' NAME parameters ':' suite
320 # ignoring decorators
321
322 def p_funcdef(self, p):
323 "funcdef : DEF NAME parameters COLON suite"
324 p[0] = ast.FunctionDef(p[2], p[3], p[5], ())
325
326 # parameters: '(' [varargslist] ')'
327 def p_parameters(self, p):
328 """parameters : LPAR RPAR
329 | LPAR varargslist RPAR"""
330 if len(p) == 3:
331 args = []
332 else:
333 args = p[2]
334 p[0] = ast.arguments(args=args, vararg=None, kwarg=None, defaults=[])
335
336 # varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] |
337 # '**' NAME) |
338 # highly simplified
339
340 def p_varargslist(self, p):
341 """varargslist : varargslist COMMA NAME
342 | NAME"""
343 if len(p) == 4:
344 p[0] = p[1] + p[3]
345 else:
346 p[0] = [p[1]]
347
348 # stmt: simple_stmt | compound_stmt
349 def p_stmt_simple(self, p):
350 """stmt : simple_stmt"""
351 # simple_stmt is a list
352 p[0] = p[1]
353
354 def p_stmt_compound(self, p):
355 """stmt : compound_stmt"""
356 p[0] = [p[1]]
357
358 # simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
359 def p_simple_stmt(self, p):
360 """simple_stmt : small_stmts NEWLINE
361 | small_stmts SEMICOLON NEWLINE"""
362 p[0] = p[1]
363
364 def p_small_stmts(self, p):
365 """small_stmts : small_stmts SEMICOLON small_stmt
366 | small_stmt"""
367 if len(p) == 4:
368 p[0] = p[1] + [p[3]]
369 elif isinstance(p[1], list):
370 p[0] = p[1]
371 else:
372 p[0] = [p[1]]
373
374 # small_stmt: expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt |
375 # import_stmt | global_stmt | exec_stmt | assert_stmt
376 def p_small_stmt(self, p):
377 """small_stmt : flow_stmt
378 | break_stmt
379 | expr_stmt"""
380 if isinstance(p[1], ast.Call):
381 p[0] = ast.Expr(p[1])
382 elif isinstance(p[1], ast.Name) and p[1].id == 'TRAP':
383 # TRAP needs to actually be a function
384 name = ast.Name("self", ast.Load())
385 name = ast.Attribute(name, "TRAP", ast.Load())
386 p[0] = ast.Call(name, [], [])
387 else:
388 p[0] = p[1]
389
390 # expr_stmt: testlist (augassign (yield_expr|testlist) |
391 # ('=' (yield_expr|testlist))*)
392 # augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
393 # '<<=' | '>>=' | '**=' | '//=')
394 def p_expr_stmt(self, p):
395 """expr_stmt : testlist ASSIGNEA testlist
396 | testlist ASSIGN testlist
397 | testlist """
398 print("expr_stmt", p)
399 if len(p) == 2:
400 # a list of expressions
401 #p[0] = ast.Discard(p[1])
402 p[0] = p[1]
403 else:
404 iea_mode = p[2] == '<-iea'
405 name = None
406 autoassign = False
407 if isinstance(p[1], ast.Name):
408 name = p[1].id
409 elif isinstance(p[1], ast.Subscript):
410 if isinstance(p[1].value, ast.Name):
411 name = p[1].value.id
412 if name in self.gprs:
413 # add to list of uninitialised
414 self.uninit_regs.add(name)
415 autoassign = (name not in self.declared_vars and
416 name not in self.special_regs)
417 elif isinstance(p[1], ast.Call) and p[1].func.id in \
418 ['GPR', 'FPR', 'SPR']:
419 print(astor.dump_tree(p[1]))
420 # replace GPR(x) with GPR[x]
421 idx = p[1].args[0].id
422 ridx = ast.Name("_%s" % idx, ast.Load())
423 p[1] = ast.Subscript(p[1].func, ridx, ast.Load())
424 self.read_regs.add(idx) # add to list of regs to read
425 elif isinstance(p[1], ast.Call) and p[1].func.id == 'MEM':
426 print("mem assign")
427 print(astor.dump_tree(p[1]))
428 p[1].func.id = "memassign" # change function name to set
429 p[1].args.append(p[3])
430 p[0] = p[1]
431 print("mem rewrite")
432 print(astor.dump_tree(p[0]))
433 return
434 else:
435 print("help, help")
436 print(astor.dump_tree(p[1]))
437 print("expr assign", name, p[1], "to", p[3])
438 if isinstance(p[3], ast.Name):
439 toname = p[3].id
440 if toname in self.gprs:
441 self.read_regs.add(toname)
442 if name and name in self.gprs:
443 self.write_regs.add(name) # add to list of regs to write
444 p[0] = Assign(autoassign, name, p[1], p[3], iea_mode)
445 if name:
446 self.declared_vars.add(name)
447
448 def p_flow_stmt(self, p):
449 "flow_stmt : return_stmt"
450 p[0] = p[1]
451
452 # return_stmt: 'return' [testlist]
453 def p_return_stmt(self, p):
454 "return_stmt : RETURN testlist"
455 p[0] = ast.Return(p[2])
456
457 def p_compound_stmt(self, p):
458 """compound_stmt : if_stmt
459 | while_stmt
460 | switch_stmt
461 | for_stmt
462 | funcdef
463 """
464 p[0] = p[1]
465
466 def p_break_stmt(self, p):
467 """break_stmt : BREAK
468 """
469 p[0] = ast.Break()
470
471 def p_for_stmt(self, p):
472 """for_stmt : FOR atom EQ test TO test COLON suite
473 | DO atom EQ test TO test COLON suite
474 """
475 start = p[4]
476 end = p[6]
477 if start.value > end.value: # start greater than end, must go -ve
478 # auto-subtract-one (sigh) due to python range
479 end = ast.BinOp(p[6], ast.Add(), ast.Constant(-1))
480 arange = [start, end, ast.Constant(-1)]
481 else:
482 # auto-add-one (sigh) due to python range
483 end = ast.BinOp(p[6], ast.Add(), ast.Constant(1))
484 arange = [start, end]
485 it = ast.Call(ast.Name("range", ast.Load()), arange, [])
486 p[0] = ast.For(p[2], it, p[8], [])
487
488 def p_while_stmt(self, p):
489 """while_stmt : DO WHILE test COLON suite ELSE COLON suite
490 | DO WHILE test COLON suite
491 """
492 if len(p) == 6:
493 p[0] = ast.While(p[3], p[5], [])
494 else:
495 p[0] = ast.While(p[3], p[5], p[8])
496
497 def p_switch_smt(self, p):
498 """switch_stmt : SWITCH LPAR atom RPAR COLON NEWLINE INDENT switches DEDENT
499 """
500 switchon = p[3]
501 print("switch stmt")
502 print(astor.dump_tree(p[1]))
503
504 cases = []
505 current_cases = [] # for deferral
506 for (case, suite) in p[8]:
507 print("for", case, suite)
508 if suite is None:
509 for c in case:
510 current_cases.append(ast.Num(c))
511 continue
512 if case == 'default': # last
513 break
514 for c in case:
515 current_cases.append(ast.Num(c))
516 print("cases", current_cases)
517 compare = ast.Compare(switchon, [ast.In()],
518 [ast.List(current_cases, ast.Load())])
519 current_cases = []
520 cases.append((compare, suite))
521
522 print("ended", case, current_cases)
523 if case == 'default':
524 if current_cases:
525 compare = ast.Compare(switchon, [ast.In()],
526 [ast.List(current_cases, ast.Load())])
527 cases.append((compare, suite))
528 cases.append((None, suite))
529
530 cases.reverse()
531 res = []
532 for compare, suite in cases:
533 print("after rev", compare, suite)
534 if compare is None:
535 assert len(res) == 0, "last case should be default"
536 res = suite
537 else:
538 if not isinstance(res, list):
539 res = [res]
540 res = ast.If(compare, suite, res)
541 p[0] = res
542
543 def p_switches(self, p):
544 """switches : switch_list switch_default
545 | switch_default
546 """
547 if len(p) == 3:
548 p[0] = p[1] + [p[2]]
549 else:
550 p[0] = [p[1]]
551
552 def p_switch_list(self, p):
553 """switch_list : switch_case switch_list
554 | switch_case
555 """
556 if len(p) == 3:
557 p[0] = [p[1]] + p[2]
558 else:
559 p[0] = [p[1]]
560
561 def p_switch_case(self, p):
562 """switch_case : CASE LPAR atomlist RPAR COLON suite
563 """
564 # XXX bad hack
565 if isinstance(p[6][0], ast.Name) and p[6][0].id == 'fallthrough':
566 p[6] = None
567 p[0] = (p[3], p[6])
568
569 def p_switch_default(self, p):
570 """switch_default : DEFAULT COLON suite
571 """
572 p[0] = ('default', p[3])
573
574 def p_atomlist(self, p):
575 """atomlist : atom COMMA atomlist
576 | atom
577 """
578 assert isinstance(p[1], ast.Constant), "case must be numbers"
579 if len(p) == 4:
580 p[0] = [p[1].value] + p[3]
581 else:
582 p[0] = [p[1].value]
583
584 def p_if_stmt(self, p):
585 """if_stmt : IF test COLON suite ELSE COLON if_stmt
586 | IF test COLON suite ELSE COLON suite
587 | IF test COLON suite
588 """
589 if len(p) == 8 and isinstance(p[7], ast.If):
590 p[0] = ast.If(p[2], p[4], [p[7]])
591 elif len(p) == 5:
592 p[0] = ast.If(p[2], p[4], [])
593 else:
594 p[0] = ast.If(p[2], p[4], p[7])
595
596 def p_suite(self, p):
597 """suite : simple_stmt
598 | NEWLINE INDENT stmts DEDENT"""
599 if len(p) == 2:
600 p[0] = p[1]
601 else:
602 p[0] = p[3]
603
604 def p_stmts(self, p):
605 """stmts : stmts stmt
606 | stmt"""
607 if len(p) == 3:
608 p[0] = p[1] + p[2]
609 else:
610 p[0] = p[1]
611
612 def p_comparison(self, p):
613 """comparison : comparison PLUS comparison
614 | comparison MINUS comparison
615 | comparison MULT comparison
616 | comparison DIV comparison
617 | comparison MOD comparison
618 | comparison EQ comparison
619 | comparison NE comparison
620 | comparison LE comparison
621 | comparison GE comparison
622 | comparison LTU comparison
623 | comparison GTU comparison
624 | comparison LT comparison
625 | comparison GT comparison
626 | comparison BITOR comparison
627 | comparison BITXOR comparison
628 | comparison BITAND comparison
629 | PLUS comparison
630 | comparison MINUS
631 | INVERT comparison
632 | comparison APPEND comparison
633 | power"""
634 if len(p) == 4:
635 print(list(p))
636 if p[2] == '<u':
637 p[0] = ast.Call(ast.Name("ltu", ast.Load()), (p[1], p[3]), [])
638 elif p[2] == '>u':
639 p[0] = ast.Call(ast.Name("gtu", ast.Load()), (p[1], p[3]), [])
640 elif p[2] == '||':
641 l = check_concat(p[1]) + check_concat(p[3])
642 p[0] = ast.Call(ast.Name("concat", ast.Load()), l, [])
643 elif p[2] in ['/', '%']:
644 # bad hack: if % or / used anywhere other than div/mod ops,
645 # do % or /. however if the argument names are "dividend"
646 # we must call the special trunc_divs and trunc_rems functions
647 l, r = p[1], p[3]
648 # actual call will be "dividend / divisor" - just check
649 # LHS name
650 # XXX DISABLE BAD HACK (False)
651 if False and isinstance(l, ast.Name) and l.id == 'dividend':
652 if p[2] == '/':
653 fn = 'trunc_divs'
654 else:
655 fn = 'trunc_rems'
656 # return "function trunc_xxx(l, r)"
657 p[0] = ast.Call(ast.Name(fn, ast.Load()), (l, r), [])
658 else:
659 # return "l {binop} r"
660 p[0] = ast.BinOp(p[1], binary_ops[p[2]], p[3])
661 elif p[2] in ['<', '>', '=', '<=', '>=', '!=']:
662 p[0] = binary_ops[p[2]]((p[1], p[3]))
663 elif identify_sint_mul_pattern(p):
664 keywords = [ast.keyword(arg='repeat', value=p[3])]
665 l = p[1].elts
666 p[0] = ast.Call(ast.Name("concat", ast.Load()), l, keywords)
667 else:
668 p[0] = ast.BinOp(p[1], binary_ops[p[2]], p[3])
669 elif len(p) == 3:
670 if isinstance(p[2], str) and p[2] == '-':
671 p[0] = ast.UnaryOp(unary_ops[p[2]], p[1])
672 else:
673 p[0] = ast.UnaryOp(unary_ops[p[1]], p[2])
674 else:
675 p[0] = p[1]
676
677 # power: atom trailer* ['**' factor]
678 # trailers enables function calls (and subscripts).
679 # so this is 'trailerlist'
680 def p_power(self, p):
681 """power : atom
682 | atom trailerlist"""
683 if len(p) == 2:
684 print("power dump atom notrailer")
685 print(astor.dump_tree(p[1]))
686 p[0] = p[1]
687 else:
688 print("power dump atom")
689 print(astor.dump_tree(p[1]))
690 print("power dump trailerlist")
691 print(astor.dump_tree(p[2]))
692 p[0] = apply_trailer(p[1], p[2], self.read_regs)
693 if isinstance(p[1], ast.Name):
694 name = p[1].id
695 if name in regs + fregs:
696 self.read_regs.add(name)
697
698 def p_atom_name(self, p):
699 """atom : NAME"""
700 name = p[1]
701 if name in self.available_op_fields:
702 self.op_fields.add(name)
703 if name == 'overflow':
704 self.write_regs.add(name)
705 if self.include_ca_in_write:
706 if name in ['CA', 'CA32']:
707 self.write_regs.add(name)
708 if name in ['CR', 'LR', 'CTR', 'TAR', 'FPSCR', 'MSR', 'SVSTATE']:
709 self.special_regs.add(name)
710 self.write_regs.add(name) # and add to list to write
711 p[0] = ast.Name(id=name, ctx=ast.Load())
712
713 def p_atom_number(self, p):
714 """atom : BINARY
715 | NUMBER
716 | HEX
717 | STRING"""
718 p[0] = ast.Constant(p[1])
719
720 # '[' [listmaker] ']' |
721
722 def p_atom_listmaker(self, p):
723 """atom : LBRACK listmaker RBRACK"""
724 p[0] = p[2]
725
726 def p_listmaker(self, p):
727 """listmaker : test COMMA listmaker
728 | test
729 """
730 if len(p) == 2:
731 p[0] = ast.List([p[1]], ast.Load())
732 else:
733 p[0] = ast.List([p[1]] + p[3].nodes, ast.Load())
734
735 def p_atom_tuple(self, p):
736 """atom : LPAR testlist RPAR"""
737 print("tuple", p[2])
738 print("astor dump")
739 print(astor.dump_tree(p[2]))
740
741 if isinstance(p[2], ast.Name):
742 name = p[2].id
743 print("tuple name", name)
744 if name in self.gprs:
745 self.read_regs.add(name) # add to list of regs to read
746 #p[0] = ast.Subscript(ast.Name("GPR", ast.Load()), ast.Str(p[2].id))
747 # return
748 p[0] = p[2]
749 elif isinstance(p[2], ast.BinOp):
750 if isinstance(p[2].left, ast.Name) and \
751 isinstance(p[2].right, ast.Constant) and \
752 p[2].right.value == 0 and \
753 p[2].left.id in self.gprs:
754 rid = p[2].left.id
755 self.read_regs.add(rid) # add to list of regs to read
756 # create special call to GPR.getz or FPR.getz
757 if rid in fregs:
758 gprz = ast.Name("FPR", ast.Load())
759 else:
760 gprz = ast.Name("GPR", ast.Load())
761 # get testzero function
762 gprz = ast.Attribute(gprz, "getz", ast.Load())
763 # *sigh* see class GPR. we need index itself not reg value
764 ridx = ast.Name("_%s" % rid, ast.Load())
765 p[0] = ast.Call(gprz, [ridx], [])
766 print("tree", astor.dump_tree(p[0]))
767 else:
768 p[0] = p[2]
769 else:
770 p[0] = p[2]
771
772 def p_trailerlist(self, p):
773 """trailerlist : trailer trailerlist
774 | trailer
775 """
776 if len(p) == 2:
777 p[0] = p[1]
778 else:
779 p[0] = ("TLIST", p[1], p[2])
780
781 # trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
782 def p_trailer(self, p):
783 """trailer : trailer_arglist
784 | trailer_subscript
785 """
786 p[0] = p[1]
787
788 def p_trailer_arglist(self, p):
789 "trailer_arglist : LPAR arglist RPAR"
790 p[0] = ("CALL", p[2])
791
792 def p_trailer_subscript(self, p):
793 "trailer_subscript : LBRACK subscript RBRACK"
794 p[0] = ("SUBS", p[2])
795
796 # subscript: '.' '.' '.' | test | [test] ':' [test]
797
798 def p_subscript(self, p):
799 """subscript : test COLON test
800 | test
801 """
802 if len(p) == 4:
803 # add one to end
804 if isinstance(p[3], ast.Constant):
805 end = ast.Constant(p[3].value+1)
806 else:
807 end = ast.BinOp(p[3], ast.Add(), ast.Constant(1))
808 p[0] = [p[1], end]
809 else:
810 p[0] = [p[1]]
811
812 # testlist: test (',' test)* [',']
813 # Contains shift/reduce error
814
815 def p_testlist(self, p):
816 """testlist : testlist_multi COMMA
817 | testlist_multi """
818 if len(p) == 2:
819 p[0] = p[1]
820 else:
821 # May need to promote singleton to tuple
822 if isinstance(p[1], list):
823 p[0] = p[1]
824 else:
825 p[0] = [p[1]]
826 # Convert into a tuple?
827 if isinstance(p[0], list):
828 p[0] = ast.Tuple(p[0])
829
830 def p_testlist_multi(self, p):
831 """testlist_multi : testlist_multi COMMA test
832 | test"""
833 if len(p) == 2:
834 # singleton
835 p[0] = p[1]
836 else:
837 if isinstance(p[1], list):
838 p[0] = p[1] + [p[3]]
839 else:
840 # singleton -> tuple
841 p[0] = [p[1], p[3]]
842
843 # test: or_test ['if' or_test 'else' test] | lambdef
844 # as I don't support 'and', 'or', and 'not' this works down to 'comparison'
845
846 def p_test(self, p):
847 "test : comparison"
848 p[0] = p[1]
849
850 # arglist: (argument ',')* (argument [',']| '*' test [',' '**' test]
851 # | '**' test)
852 # XXX INCOMPLETE: this doesn't allow the trailing comma
853
854 def p_arglist(self, p):
855 """arglist : arglist COMMA argument
856 | argument"""
857 if len(p) == 4:
858 p[0] = p[1] + [p[3]]
859 else:
860 p[0] = [p[1]]
861
862 # argument: test [gen_for] | test '=' test # Really [keyword '='] test
863 def p_argument(self, p):
864 "argument : test"
865 p[0] = p[1]
866
867 def p_error(self, p):
868 # print "Error!", repr(p)
869 raise SyntaxError(p)
870
871
872 class GardenSnakeParser(PowerParser):
873 def __init__(self, lexer=None, debug=False, form=None, incl_carry=False):
874 self.sd = create_pdecode()
875 PowerParser.__init__(self, form, incl_carry)
876 self.debug = debug
877 if lexer is None:
878 lexer = IndentLexer(debug=0)
879 self.lexer = lexer
880 self.tokens = lexer.tokens
881 self.parser = yacc.yacc(module=self, start="file_input_end",
882 debug=debug, write_tables=False)
883
884 def parse(self, code):
885 # self.lexer.input(code)
886 result = self.parser.parse(code, lexer=self.lexer, debug=self.debug)
887 return ast.Module(result)
888
889
890 ###### Code generation ######
891
892 #from compiler import misc, syntax, pycodegen
893
894 _CACHED_PARSERS = {}
895 _CACHE_PARSERS = True
896
897
898 class GardenSnakeCompiler(object):
899 def __init__(self, debug=False, form=None, incl_carry=False):
900 if _CACHE_PARSERS:
901 try:
902 parser = _CACHED_PARSERS[debug, form, incl_carry]
903 except KeyError:
904 parser = GardenSnakeParser(debug=debug, form=form,
905 incl_carry=incl_carry)
906 _CACHED_PARSERS[debug, form, incl_carry] = parser
907
908 self.parser = deepcopy(parser)
909 else:
910 self.parser = GardenSnakeParser(debug=debug, form=form,
911 incl_carry=incl_carry)
912
913 def compile(self, code, mode="exec", filename="<string>"):
914 tree = self.parser.parse(code)
915 print("snake")
916 pprint(tree)
917 return tree
918 #misc.set_filename(filename, tree)
919 return compile(tree, mode="exec", filename="<string>")
920 # syntax.check(tree)
921 gen = pycodegen.ModuleCodeGenerator(tree)
922 code = gen.getCode()
923 return code