1 # Based on GardenSnake - a parser generator demonstration program
2 # GardenSnake was released into the Public Domain by Andrew Dalke.
4 # Portions of this work are derived from Python's Grammar definition
5 # and may be covered under the Python copyright and license
7 # Andrew Dalke / Dalke Scientific Software, LLC
8 # 30 August 2006 / Cape Town, South Africa
10 # Modifications for inclusion in PLY distribution
11 from pprint
import pprint
12 from ply
import lex
, yacc
14 from copy
import deepcopy
16 from openpower
.decoder
.power_decoder
import create_pdecode
17 from openpower
.decoder
.pseudo
.lexer
import IndentLexer
18 from openpower
.decoder
.orderedset
import OrderedSet
20 # I use the Python AST
21 #from compiler import ast
26 regs
= ['RA', 'RS', 'RB', 'RC', 'RT']
27 fregs
= ['FRA', 'FRS', 'FRB', 'FRC', 'FRT', 'FRS']
28 SPECIAL_HELPERS
= {'concat', 'MEM', 'GPR', 'FPR', 'SPR'}
31 def Assign(autoassign
, assignname
, left
, right
, iea_mode
):
33 print("Assign", autoassign
, assignname
, left
, right
)
34 if isinstance(left
, ast
.Name
):
35 # Single assignment on left
36 # XXX when doing IntClass, which will have an "eq" function,
37 # this is how to access it
38 # eq = ast.Attribute(left, "eq") # get eq fn
39 # return ast.Call(eq, [right], []) # now call left.eq(right)
40 return ast
.Assign([ast
.Name(left
.id, ast
.Store())], right
)
41 elif isinstance(left
, ast
.Tuple
):
42 # List of things - make sure they are Name nodes
44 for child
in left
.getChildren():
45 if not isinstance(child
, ast
.Name
):
46 raise SyntaxError("that assignment not supported")
47 names
.append(child
.name
)
48 ass_list
= [ast
.AssName(name
, 'OP_ASSIGN') for name
in names
]
49 return ast
.Assign([ast
.AssTuple(ass_list
)], right
)
50 elif isinstance(left
, ast
.Subscript
):
52 # XXX changing meaning of "undefined" to a function
53 # if (isinstance(ls, ast.Slice) and isinstance(right, ast.Name) and
54 # right.id == 'undefined'):
55 # # undefined needs to be copied the exact same slice
56 # right = ast.Subscript(right, ls, ast.Load())
57 # return ast.Assign([left], right)
58 res
= ast
.Assign([left
], right
)
59 if autoassign
and isinstance(ls
, ast
.Slice
):
60 # hack to create a variable pre-declared based on a slice.
61 # dividend[0:32] = (RA)[0:32] will create
63 # dividend[0:32] = (RA)[0:32]
64 # the declaration makes the slice-assignment "work"
65 lower
, upper
, step
= ls
.lower
, ls
.upper
, ls
.step
66 print("lower, upper, step", repr(lower
), repr(upper
), step
)
67 # XXX relax constraint that indices on auto-assign have
68 # to be constants x[0:32]
69 # if not isinstance(lower, ast.Constant) or \
70 # not isinstance(upper, ast.Constant):
72 qty
= ast
.BinOp(upper
, binary_ops
['-'], lower
)
73 keywords
= [ast
.keyword(arg
='repeat', value
=qty
)]
75 right
= ast
.Call(ast
.Name("concat", ast
.Load()), l
, keywords
)
76 declare
= ast
.Assign([ast
.Name(assignname
, ast
.Store())], right
)
79 # XXX HMMM probably not needed...
81 if isinstance(ls
, ast
.Slice
):
82 lower
, upper
, step
= ls
.lower
, ls
.upper
, ls
.step
83 print("slice assign", lower
, upper
, step
)
85 ls
= (lower
, upper
, None)
87 ls
= (lower
, upper
, step
)
89 return ast
.Call(ast
.Name("selectassign", ast
.Load()),
90 [left
.value
, ls
, right
], [])
93 raise SyntaxError("Can't do that yet")
96 # I implemented INDENT / DEDENT generation as a post-processing filter
98 # The original lex token stream contains WS and NEWLINE characters.
99 # WS will only occur before any other tokens on a line.
101 # I have three filters. One tags tokens by adding two attributes.
102 # "must_indent" is True if the token must be indented from the
103 # previous code. The other is "at_line_start" which is True for WS
104 # and the first non-WS/non-NEWLINE on a line. It flags the check so
105 # see if the new line has changed indication level.
108 # No using Python's approach because Ply supports precedence
110 # comparison: expr (comp_op expr)*
111 # arith_expr: term (('+'|'-') term)*
112 # term: factor (('*'|'/'|'%'|'//') factor)*
113 # factor: ('+'|'-'|'~') factor | power
114 # comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
116 def make_le_compare(arg
):
118 return ast
.Call(ast
.Name("le", ast
.Load()), (left
, right
), [])
121 def make_ge_compare(arg
):
123 return ast
.Call(ast
.Name("ge", ast
.Load()), (left
, right
), [])
126 def make_lt_compare(arg
):
128 return ast
.Call(ast
.Name("lt", ast
.Load()), (left
, right
), [])
131 def make_gt_compare(arg
):
133 return ast
.Call(ast
.Name("gt", ast
.Load()), (left
, right
), [])
136 def make_eq_compare(arg
):
138 return ast
.Call(ast
.Name("eq", ast
.Load()), (left
, right
), [])
141 def make_ne_compare(arg
):
143 return ast
.Call(ast
.Name("ne", ast
.Load()), (left
, right
), [])
155 "<=": make_le_compare
,
156 ">=": make_ge_compare
,
157 "<": make_lt_compare
,
158 ">": make_gt_compare
,
159 "=": make_eq_compare
,
160 "!=": make_ne_compare
,
169 def check_concat(node
): # checks if the comparison is already a concat
170 print("check concat", node
)
171 if not isinstance(node
, ast
.Call
):
173 print("func", node
.func
.id)
174 if node
.func
.id != 'concat':
176 if node
.keywords
: # a repeated list-constant, don't optimise
181 # identify SelectableInt pattern [something] * N
182 # must return concat(something, repeat=N)
183 # here is a TEMPORARY hack to support minimal expressions
184 # such as (XLEN-16), looking for ast.BinOp
185 # have to keep an eye on this
186 def identify_sint_mul_pattern(p
):
187 """here we are looking for patterns of this type:
189 these must specifically be returned as concat(item, repeat=something)
191 #print ("identify_sint_mul_pattern")
193 # print(" ", astor.dump_tree(pat))
194 if p
[2] != '*': # multiply
196 if (not isinstance(p
[3], ast
.Constant
) and # rhs = Num
197 not isinstance(p
[3], ast
.BinOp
) and # rhs = (XLEN-something)
198 not isinstance(p
[3], ast
.Attribute
)): # rhs = XLEN
200 if not isinstance(p
[1], ast
.List
): # lhs is a list
203 if len(l
) != 1: # lhs is a list of length 1
205 return True # yippee!
208 def apply_trailer(atom
, trailer
, read_regs
):
209 if trailer
[0] == "TLIST":
210 # assume depth of one
211 atom
= apply_trailer(atom
, trailer
[1], read_regs
)
213 if trailer
[0] == "CALL":
214 #p[0] = ast.Expr(ast.Call(p[1], p[2][1], []))
215 for arg
in trailer
[1]:
216 if isinstance(arg
, ast
.Name
):
218 if name
in regs
+ fregs
:
220 # special-case, these functions must NOT be made "self.xxxx"
221 if atom
.id not in SPECIAL_HELPERS
:
222 name
= ast
.Name("self", ast
.Load())
223 atom
= ast
.Attribute(name
, atom
, ast
.Load())
224 return ast
.Call(atom
, trailer
[1], [])
225 # if p[1].id == 'print':
226 # p[0] = ast.Printnl(ast.Tuple(p[2][1]), None, None)
228 # p[0] = ast.CallFunc(p[1], p[2][1], None, None)
230 print("subscript atom", trailer
[1])
231 #raise AssertionError("not implemented %s" % p[2][0])
235 if isinstance(idx
, ast
.Name
) and idx
.id in regs
+ fregs
:
236 read_regs
.add(idx
.id)
237 if isinstance(idx
, ast
.Name
) and idx
.id in regs
:
238 print("single atom subscript, underscored", idx
.id)
239 idx
= ast
.Name("_%s" % idx
.id, ast
.Load())
241 idx
= ast
.Slice(subs
[0], subs
[1], None)
242 # if isinstance(atom, ast.Name) and atom.id == 'CR':
243 # atom.id = 'CR' # bad hack
244 #print ("apply_trailer Subscript", atom.id, idx)
245 return ast
.Subscript(atom
, idx
, ast
.Load())
247 ########## Parser (tokens -> AST) ######
252 # https://www.mathcs.emory.edu/~valerie/courses/fall10/155/resources/op_precedence.html
253 # python operator precedence
254 # Highest precedence at top, lowest at bottom.
255 # Operators in the same box evaluate left to right.
257 # Operator Description
258 # () Parentheses (grouping)
259 # f(args...) Function call
260 # x[index:index] Slicing
261 # x[index] Subscription
262 # x.attribute Attribute reference
265 # +x, -x Positive, negative
266 # *, /, % mul, div, remainder
267 # +, - Addition, subtraction
268 # <<, >> Bitwise shifts
272 # in, not in, is, is not, <, <=, >, >=, <>, !=, == comp, membership, ident
276 # lambda Lambda expression
282 ("left", "EQ", "NE", "GT", "LT", "LE", "GE", "LTU", "GTU"),
286 ("left", "PLUS", "MINUS"),
287 ("left", "MULT", "DIV", "MOD"),
293 if self
.form
is not None:
294 form
= self
.sd
.sigforms
[self
.form
]
296 formkeys
= form
._asdict
().keys()
299 self
.declared_vars
= set()
300 self
.fnparm_vars
= set()
301 for rname
in regs
+ fregs
:
302 self
.gprs
[rname
] = None
303 self
.declared_vars
.add(rname
)
304 self
.available_op_fields
= set()
306 if k
not in self
.gprs
:
307 if k
== 'SPR': # sigh, lower-case to not conflict
309 self
.available_op_fields
.add(k
)
310 self
.op_fields
= OrderedSet()
311 self
.read_regs
= OrderedSet()
312 self
.uninit_regs
= OrderedSet()
313 self
.write_regs
= OrderedSet()
314 self
.special_regs
= OrderedSet() # see p_atom_name
316 def __init__(self
, form
, include_carry_in_write
=False, helper
=False):
317 self
.include_ca_in_write
= include_carry_in_write
322 # The grammar comments come from Python's Grammar/Grammar file
324 # NB: compound_stmt in single_input is followed by extra NEWLINE!
325 # file_input: (NEWLINE | stmt)* ENDMARKER
327 def p_file_input_end(self
, p
):
328 """file_input_end : file_input ENDMARKER"""
332 def p_file_input(self
, p
):
333 """file_input : file_input NEWLINE
337 if isinstance(p
[len(p
)-1], str):
341 p
[0] = [] # p == 2 --> only a blank line
348 # funcdef: [decorators] 'def' NAME parameters ':' suite
349 # ignoring decorators
351 def p_funcdef(self
, p
):
352 "funcdef : DEF NAME parameters COLON suite"
353 p
[0] = ast
.FunctionDef(p
[2], p
[3], p
[5], ())
354 # reset function parameters after suite is identified
355 self
.fnparm_vars
= set()
357 # parameters: '(' [varargslist] ')'
358 def p_parameters(self
, p
):
359 """parameters : LPAR RPAR
360 | LPAR varargslist RPAR"""
366 p
[0] = ast
.arguments(args
=args
, vararg
=None, kwarg
=None, defaults
=[])
367 # during the time between parameters identified and suite is not
368 # there is a window of opportunity to declare the function parameters
369 # in-scope, for use to not overwrite them with auto-assign
370 self
.fnparm_vars
= set()
372 print("adding fn parm", arg
)
373 self
.fnparm_vars
.add(arg
)
375 # varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] |
379 def p_varargslist(self
, p
):
380 """varargslist : varargslist COMMA NAME
388 # stmt: simple_stmt | compound_stmt
389 def p_stmt_simple(self
, p
):
390 """stmt : simple_stmt"""
391 # simple_stmt is a list
394 def p_stmt_compound(self
, p
):
395 """stmt : compound_stmt"""
398 # simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
399 def p_simple_stmt(self
, p
):
400 """simple_stmt : small_stmts NEWLINE
401 | small_stmts SEMICOLON NEWLINE"""
404 def p_small_stmts(self
, p
):
405 """small_stmts : small_stmts SEMICOLON small_stmt
409 elif isinstance(p
[1], list):
414 # small_stmt: expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt |
415 # import_stmt | global_stmt | exec_stmt | assert_stmt
416 def p_small_stmt(self
, p
):
417 """small_stmt : flow_stmt
420 if isinstance(p
[1], ast
.Call
):
421 p
[0] = ast
.Expr(p
[1])
422 elif isinstance(p
[1], ast
.Name
) and p
[1].id not in SPECIAL_HELPERS
:
424 name
= ast
.Name("self", ast
.Load())
425 name
= ast
.Attribute(name
, fname
, ast
.Load())
426 p
[0] = ast
.Call(name
, [], [])
430 # expr_stmt: testlist (augassign (yield_expr|testlist) |
431 # ('=' (yield_expr|testlist))*)
432 # augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
433 # '<<=' | '>>=' | '**=' | '//=')
434 def p_expr_stmt(self
, p
):
435 """expr_stmt : testlist ASSIGNEA testlist
436 | testlist ASSIGN testlist
438 print("expr_stmt", p
)
440 # a list of expressions
441 #p[0] = ast.Discard(p[1])
444 iea_mode
= p
[2] == '<-iea'
447 if isinstance(p
[1], ast
.Name
):
449 elif isinstance(p
[1], ast
.Subscript
):
450 print("assign subscript", p
[1].value
,
454 print(astor
.dump_tree(p
[1]))
455 if isinstance(p
[1].value
, ast
.Name
):
457 print("assign subscript value to name", name
)
458 if name
in self
.gprs
:
459 # add to list of uninitialised
460 self
.uninit_regs
.add(name
)
461 # work out if this is an ininitialised variable
462 # that should be auto-assigned simply by being "sliced"
463 autoassign
= (name
not in self
.declared_vars
and
464 name
not in self
.fnparm_vars
and
465 name
not in self
.special_regs
)
466 elif isinstance(p
[1], ast
.Call
) and p
[1].func
.id in \
467 ['GPR', 'FPR', 'SPR']:
468 print(astor
.dump_tree(p
[1]))
469 # replace GPR(x) with GPR[x]
470 idx
= p
[1].args
[0].id
471 if idx
in regs
+ fregs
:
472 ridx
= ast
.Name("_%s" % idx
, ast
.Load())
474 ridx
= ast
.Name(idx
, ast
.Load())
475 p
[1] = ast
.Subscript(p
[1].func
, ridx
, ast
.Load())
477 self
.read_regs
.add(idx
) # add to list of regs to read
478 elif isinstance(p
[1], ast
.Call
) and p
[1].func
.id == 'MEM':
480 print(astor
.dump_tree(p
[1]))
481 p
[1].func
.id = "memassign" # change function name to set
482 p
[1].args
.append(p
[3])
485 print(astor
.dump_tree(p
[0]))
489 print(astor
.dump_tree(p
[1]))
490 print("expr assign", name
, p
[1], "to", p
[3])
491 if isinstance(p
[3], ast
.Name
):
493 if toname
in self
.gprs
:
494 self
.read_regs
.add(toname
)
495 if name
and name
in self
.gprs
:
496 self
.write_regs
.add(name
) # add to list of regs to write
497 p
[0] = Assign(autoassign
, name
, p
[1], p
[3], iea_mode
)
499 self
.declared_vars
.add(name
)
501 def p_flow_stmt(self
, p
):
502 "flow_stmt : return_stmt"
505 # return_stmt: 'return' [testlist]
506 def p_return_stmt(self
, p
):
507 "return_stmt : RETURN testlist"
508 p
[0] = ast
.Return(p
[2])
510 def p_compound_stmt(self
, p
):
511 """compound_stmt : if_stmt
519 def p_break_stmt(self
, p
):
520 """break_stmt : BREAK
524 def p_for_stmt(self
, p
):
525 """for_stmt : FOR atom EQ comparison TO comparison COLON suite
526 | DO atom EQ comparison TO comparison COLON suite
530 it
= ast
.Call(ast
.Name("RANGE", ast
.Load()), (start
, end
), [])
531 p
[0] = ast
.For(p
[2], it
, p
[8], [])
533 def p_while_stmt(self
, p
):
534 """while_stmt : DO WHILE test COLON suite ELSE COLON suite
535 | DO WHILE test COLON suite
538 p
[0] = ast
.While(p
[3], p
[5], [])
540 p
[0] = ast
.While(p
[3], p
[5], p
[8])
542 def p_switch_smt(self
, p
):
543 """switch_stmt : SWITCH LPAR atom RPAR COLON NEWLINE INDENT switches DEDENT
547 print(astor
.dump_tree(p
[1]))
550 current_cases
= [] # for deferral
551 for (case
, suite
) in p
[8]:
552 print("for", case
, suite
)
555 current_cases
.append(ast
.Num(c
))
557 if case
== 'default': # last
560 current_cases
.append(ast
.Num(c
))
561 print("cases", current_cases
)
562 compare
= ast
.Compare(switchon
, [ast
.In()],
563 [ast
.List(current_cases
, ast
.Load())])
565 cases
.append((compare
, suite
))
567 print("ended", case
, current_cases
)
568 if case
== 'default':
570 compare
= ast
.Compare(switchon
, [ast
.In()],
571 [ast
.List(current_cases
, ast
.Load())])
572 cases
.append((compare
, suite
))
573 cases
.append((None, suite
))
577 for compare
, suite
in cases
:
578 print("after rev", compare
, suite
)
580 assert len(res
) == 0, "last case should be default"
583 if not isinstance(res
, list):
585 res
= ast
.If(compare
, suite
, res
)
588 def p_switches(self
, p
):
589 """switches : switch_list switch_default
597 def p_switch_list(self
, p
):
598 """switch_list : switch_case switch_list
606 def p_switch_case(self
, p
):
607 """switch_case : CASE LPAR atomlist RPAR COLON suite
610 if isinstance(p
[6][0], ast
.Name
) and p
[6][0].id == 'fallthrough':
614 def p_switch_default(self
, p
):
615 """switch_default : DEFAULT COLON suite
617 p
[0] = ('default', p
[3])
619 def p_atomlist(self
, p
):
620 """atomlist : atom COMMA atomlist
623 assert isinstance(p
[1], ast
.Constant
), "case must be numbers"
625 p
[0] = [p
[1].value
] + p
[3]
629 def p_if_stmt(self
, p
):
630 """if_stmt : IF test COLON suite ELSE COLON if_stmt
631 | IF test COLON suite ELSE COLON suite
632 | IF test COLON suite
634 if len(p
) == 8 and isinstance(p
[7], ast
.If
):
635 p
[0] = ast
.If(p
[2], p
[4], [p
[7]])
637 p
[0] = ast
.If(p
[2], p
[4], [])
639 p
[0] = ast
.If(p
[2], p
[4], p
[7])
641 def p_suite(self
, p
):
642 """suite : simple_stmt
643 | NEWLINE INDENT stmts DEDENT"""
649 def p_stmts(self
, p
):
650 """stmts : stmts stmt
657 def p_comparison(self
, p
):
658 """comparison : comparison PLUS comparison
659 | comparison MINUS comparison
660 | comparison MULT comparison
661 | comparison DIV comparison
662 | comparison MOD comparison
663 | comparison EQ comparison
664 | comparison NE comparison
665 | comparison LE comparison
666 | comparison GE comparison
667 | comparison LTU comparison
668 | comparison GTU comparison
669 | comparison LT comparison
670 | comparison GT comparison
671 | comparison BITOR comparison
672 | comparison BITXOR comparison
673 | comparison BITAND comparison
677 | comparison APPEND comparison
682 p
[0] = ast
.Call(ast
.Name("ltu", ast
.Load()), (p
[1], p
[3]), [])
684 p
[0] = ast
.Call(ast
.Name("gtu", ast
.Load()), (p
[1], p
[3]), [])
686 l
= check_concat(p
[1]) + check_concat(p
[3])
687 p
[0] = ast
.Call(ast
.Name("concat", ast
.Load()), l
, [])
688 elif p
[2] in ['/', '%']:
689 # bad hack: if % or / used anywhere other than div/mod ops,
690 # do % or /. however if the argument names are "dividend"
691 # we must call the special trunc_divs and trunc_rems functions
693 # actual call will be "dividend / divisor" - just check
695 # XXX DISABLE BAD HACK (False)
696 if False and isinstance(l
, ast
.Name
) and l
.id == 'dividend':
701 # return "function trunc_xxx(l, r)"
702 p
[0] = ast
.Call(ast
.Name(fn
, ast
.Load()), (l
, r
), [])
704 # return "l {binop} r"
705 p
[0] = ast
.BinOp(p
[1], binary_ops
[p
[2]], p
[3])
706 elif p
[2] in ['<', '>', '=', '<=', '>=', '!=']:
707 p
[0] = binary_ops
[p
[2]]((p
[1], p
[3]))
708 elif identify_sint_mul_pattern(p
):
709 keywords
= [ast
.keyword(arg
='repeat', value
=p
[3])]
711 p
[0] = ast
.Call(ast
.Name("concat", ast
.Load()), l
, keywords
)
713 p
[0] = ast
.BinOp(p
[1], binary_ops
[p
[2]], p
[3])
714 # HORRENDOUS hack, add brackets around the bin-op by
715 # creating a function call with a *blank* function name!
716 # XXX argh doesn't work because of analysis of
717 # identify_sint_pattern
718 #p[0] = ast.Call(ast.Name("", ast.Load()), [p[0]], [])
720 if isinstance(p
[2], str) and p
[2] == '-':
721 p
[0] = ast
.UnaryOp(unary_ops
[p
[2]], p
[1])
723 p
[0] = ast
.UnaryOp(unary_ops
[p
[1]], p
[2])
727 # power: atom trailer* ['**' factor]
728 # trailers enables function calls (and subscripts).
729 # so this is 'trailerlist'
730 def p_power(self
, p
):
732 | atom trailerlist"""
734 print("power dump atom notrailer")
735 print(astor
.dump_tree(p
[1]))
738 print("power dump atom")
739 print(astor
.dump_tree(p
[1]))
740 print("power dump trailerlist")
741 print(astor
.dump_tree(p
[2]))
742 p
[0] = apply_trailer(p
[1], p
[2], self
.read_regs
)
743 if isinstance(p
[1], ast
.Name
):
745 if name
in regs
+ fregs
:
746 self
.read_regs
.add(name
)
748 def p_atom_name(self
, p
):
751 if name
in self
.available_op_fields
:
752 self
.op_fields
.add(name
)
753 if name
== 'overflow':
754 self
.write_regs
.add(name
)
755 if self
.include_ca_in_write
:
756 if name
in ['CA', 'CA32']:
757 self
.write_regs
.add(name
)
758 if name
in ['CR', 'LR', 'CTR', 'TAR', 'FPSCR', 'MSR',
759 'SVSTATE', 'SVREMAP',
760 'SVSHAPE0', 'SVSHAPE1', 'SVSHAPE2', 'SVSHAPE3']:
761 self
.special_regs
.add(name
)
762 self
.write_regs
.add(name
) # and add to list to write
764 attr
= ast
.Name("self", ast
.Load())
765 p
[0] = ast
.Attribute(attr
, name
, ast
.Load())
767 p
[0] = ast
.Name(id=name
, ctx
=ast
.Load())
769 def p_atom_number(self
, p
):
774 p
[0] = ast
.Constant(p
[1])
776 # '[' [listmaker] ']' |
778 def p_atom_listmaker(self
, p
):
779 """atom : LBRACK listmaker RBRACK"""
782 def p_listmaker(self
, p
):
783 """listmaker : test COMMA listmaker
787 p
[0] = ast
.List([p
[1]], ast
.Load())
789 p
[0] = ast
.List([p
[1]] + p
[3].nodes
, ast
.Load())
791 def p_atom_tuple(self
, p
):
792 """atom : LPAR testlist RPAR"""
795 print(astor
.dump_tree(p
[2]))
797 if isinstance(p
[2], ast
.Name
):
799 print("tuple name", name
)
800 if name
in self
.gprs
:
801 self
.read_regs
.add(name
) # add to list of regs to read
802 #p[0] = ast.Subscript(ast.Name("GPR", ast.Load()), ast.Str(p[2].id))
805 elif isinstance(p
[2], ast
.BinOp
):
806 if isinstance(p
[2].left
, ast
.Name
) and \
807 isinstance(p
[2].right
, ast
.Constant
) and \
808 p
[2].right
.value
== 0 and \
809 p
[2].left
.id in self
.gprs
:
811 self
.read_regs
.add(rid
) # add to list of regs to read
812 # create special call to GPR.getz or FPR.getz
814 gprz
= ast
.Name("FPR", ast
.Load())
816 gprz
= ast
.Name("GPR", ast
.Load())
817 # get testzero function
818 gprz
= ast
.Attribute(gprz
, "getz", ast
.Load())
819 # *sigh* see class GPR. we need index itself not reg value
820 ridx
= ast
.Name("_%s" % rid
, ast
.Load())
821 p
[0] = ast
.Call(gprz
, [ridx
], [])
822 print("tree", astor
.dump_tree(p
[0]))
828 def p_trailerlist(self
, p
):
829 """trailerlist : trailer trailerlist
835 p
[0] = ("TLIST", p
[1], p
[2])
837 # trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
838 def p_trailer(self
, p
):
839 """trailer : trailer_arglist
844 def p_trailer_arglist(self
, p
):
845 "trailer_arglist : LPAR arglist RPAR"
846 p
[0] = ("CALL", p
[2])
848 def p_trailer_subscript(self
, p
):
849 "trailer_subscript : LBRACK subscript RBRACK"
850 p
[0] = ("SUBS", p
[2])
852 # subscript: '.' '.' '.' | test | [test] ':' [test]
854 def p_subscript(self
, p
):
855 """subscript : test COLON test
860 if isinstance(p
[3], ast
.Constant
):
861 end
= ast
.Constant(p
[3].value
+1)
863 end
= ast
.BinOp(p
[3], ast
.Add(), ast
.Constant(1))
868 # testlist: test (',' test)* [',']
869 # Contains shift/reduce error
871 def p_testlist(self
, p
):
872 """testlist : testlist_multi COMMA
877 # May need to promote singleton to tuple
878 if isinstance(p
[1], list):
882 # Convert into a tuple?
883 if isinstance(p
[0], list):
884 p
[0] = ast
.Tuple(p
[0])
886 def p_testlist_multi(self
, p
):
887 """testlist_multi : testlist_multi COMMA test
893 if isinstance(p
[1], list):
899 # test: or_test ['if' or_test 'else' test] | lambdef
900 # as I don't support 'and', 'or', and 'not' this works down to 'comparison'
906 # arglist: (argument ',')* (argument [',']| '*' test [',' '**' test]
908 # XXX INCOMPLETE: this doesn't allow the trailing comma
910 def p_arglist(self
, p
):
911 """arglist : arglist COMMA argument
918 # argument: test [gen_for] | test '=' test # Really [keyword '='] test
919 def p_argument(self
, p
):
923 def p_error(self
, p
):
924 # print "Error!", repr(p)
928 _CACHE_DECODER
= True
929 _CACHED_DECODER
= None
932 def _create_cached_decoder():
933 global _CACHED_DECODER
935 if _CACHED_DECODER
is None:
936 _CACHED_DECODER
= create_pdecode()
937 return _CACHED_DECODER
938 return create_pdecode()
941 class GardenSnakeParser(PowerParser
):
942 def __init__(self
, debug
=False, form
=None, incl_carry
=False, helper
=False):
944 self
.sd
= _create_cached_decoder()
945 PowerParser
.__init
__(self
, form
, incl_carry
, helper
=helper
)
947 self
.lexer
= IndentLexer(debug
=0)
948 self
.tokens
= self
.lexer
.tokens
949 self
.parser
= yacc
.yacc(module
=self
, start
="file_input_end",
950 debug
=debug
, write_tables
=False)
952 def parse(self
, code
):
954 result
= self
.parser
.parse(code
, lexer
=self
.lexer
, debug
=self
.debug
)
956 result
= [ast
.ClassDef("ISACallerFnHelper", [
957 "ISACallerHelper"], [], result
, decorator_list
=[])]
958 return ast
.Module(result
)
961 ###### Code generation ######
963 #from compiler import misc, syntax, pycodegen
966 _CACHE_PARSERS
= True
969 class GardenSnakeCompiler(object):
970 def __init__(self
, debug
=False, form
=None, incl_carry
=False, helper
=False):
973 self
.parser
= _CACHED_PARSERS
[debug
, form
, incl_carry
, helper
]
975 self
.parser
= GardenSnakeParser(
976 debug
=debug
, form
=form
, incl_carry
=incl_carry
,
978 _CACHED_PARSERS
[debug
, form
, incl_carry
, helper
] = self
.parser
980 self
.parser
= GardenSnakeParser(debug
=debug
, form
=form
,
981 incl_carry
=incl_carry
, helper
=helper
)
983 def compile(self
, code
, mode
="exec", filename
="<string>"):
984 tree
= self
.parser
.parse(code
)
988 #misc.set_filename(filename, tree)
989 return compile(tree
, mode
="exec", filename
="<string>")
991 gen
= pycodegen
.ModuleCodeGenerator(tree
)