1 # Based on GardenSnake - a parser generator demonstration program
2 # GardenSnake was released into the Public Domain by Andrew Dalke.
4 # Portions of this work are derived from Python's Grammar definition
5 # and may be covered under the Python copyright and license
7 # Andrew Dalke / Dalke Scientific Software, LLC
8 # 30 August 2006 / Cape Town, South Africa
10 # Modifications for inclusion in PLY distribution
11 from pprint
import pprint
12 from ply
import lex
, yacc
15 from soc
.decoder
.power_decoder
import create_pdecode
16 from soc
.decoder
.pseudo
.lexer
import IndentLexer
17 from soc
.decoder
.orderedset
import OrderedSet
19 # I use the Python AST
20 #from compiler import ast
26 def Assign(left
, right
, iea_mode
):
28 print("Assign", left
, right
)
29 if isinstance(left
, ast
.Name
):
30 # Single assignment on left
31 # XXX when doing IntClass, which will have an "eq" function,
32 # this is how to access it
33 # eq = ast.Attribute(left, "eq") # get eq fn
34 # return ast.Call(eq, [right], []) # now call left.eq(right)
35 return ast
.Assign([ast
.Name(left
.id, ast
.Store())], right
)
36 elif isinstance(left
, ast
.Tuple
):
37 # List of things - make sure they are Name nodes
39 for child
in left
.getChildren():
40 if not isinstance(child
, ast
.Name
):
41 raise SyntaxError("that assignment not supported")
42 names
.append(child
.name
)
43 ass_list
= [ast
.AssName(name
, 'OP_ASSIGN') for name
in names
]
44 return ast
.Assign([ast
.AssTuple(ass_list
)], right
)
45 elif isinstance(left
, ast
.Subscript
):
46 return ast
.Assign([left
], right
)
47 # XXX HMMM probably not needed...
49 if isinstance(ls
, ast
.Slice
):
50 lower
, upper
, step
= ls
.lower
, ls
.upper
, ls
.step
51 print("slice assign", lower
, upper
, step
)
53 ls
= (lower
, upper
, None)
55 ls
= (lower
, upper
, step
)
57 return ast
.Call(ast
.Name("selectassign", ast
.Load()),
58 [left
.value
, ls
, right
], [])
61 raise SyntaxError("Can't do that yet")
64 # I implemented INDENT / DEDENT generation as a post-processing filter
66 # The original lex token stream contains WS and NEWLINE characters.
67 # WS will only occur before any other tokens on a line.
69 # I have three filters. One tags tokens by adding two attributes.
70 # "must_indent" is True if the token must be indented from the
71 # previous code. The other is "at_line_start" which is True for WS
72 # and the first non-WS/non-NEWLINE on a line. It flags the check so
73 # see if the new line has changed indication level.
76 # No using Python's approach because Ply supports precedence
78 # comparison: expr (comp_op expr)*
79 # arith_expr: term (('+'|'-') term)*
80 # term: factor (('*'|'/'|'%'|'//') factor)*
81 # factor: ('+'|'-'|'~') factor | power
82 # comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
84 def make_le_compare(arg
):
86 return ast
.Compare(left
, [ast
.LtE()], [right
])
89 def make_ge_compare(arg
):
91 return ast
.Compare(left
, [ast
.GtE()], [right
])
94 def make_lt_compare(arg
):
96 return ast
.Compare(left
, [ast
.Lt()], [right
])
99 def make_gt_compare(arg
):
101 return ast
.Compare(left
, [ast
.Gt()], [right
])
104 def make_eq_compare(arg
):
106 return ast
.Compare(left
, [ast
.Eq()], [right
])
109 def make_ne_compare(arg
):
111 return ast
.Compare(left
, [ast
.NotEq()], [right
])
123 "<=": make_le_compare
,
124 ">=": make_ge_compare
,
125 "<": make_lt_compare
,
126 ">": make_gt_compare
,
127 "=": make_eq_compare
,
128 "!=": make_ne_compare
,
137 def check_concat(node
): # checks if the comparison is already a concat
138 print("check concat", node
)
139 if not isinstance(node
, ast
.Call
):
141 print("func", node
.func
.id)
142 if node
.func
.id != 'concat':
144 if node
.keywords
: # a repeated list-constant, don't optimise
149 # identify SelectableInt pattern [something] * N
150 # must return concat(something, repeat=N)
151 def identify_sint_mul_pattern(p
):
152 if p
[2] != '*': # multiply
154 if not isinstance(p
[3], ast
.Constant
): # rhs = Num
156 if not isinstance(p
[1], ast
.List
): # lhs is a list
159 if len(l
) != 1: # lhs is a list of length 1
161 return True # yippee!
164 def apply_trailer(atom
, trailer
):
165 if trailer
[0] == "TLIST":
166 # assume depth of one
167 atom
= apply_trailer(atom
, trailer
[1])
169 if trailer
[0] == "CALL":
170 #p[0] = ast.Expr(ast.Call(p[1], p[2][1], []))
171 return ast
.Call(atom
, trailer
[1], [])
172 # if p[1].id == 'print':
173 # p[0] = ast.Printnl(ast.Tuple(p[2][1]), None, None)
175 # p[0] = ast.CallFunc(p[1], p[2][1], None, None)
177 print("subscript atom", trailer
[1])
178 #raise AssertionError("not implemented %s" % p[2][0])
183 idx
= ast
.Slice(subs
[0], subs
[1], None)
184 return ast
.Subscript(atom
, idx
, ast
.Load())
186 ########## Parser (tokens -> AST) ######
191 # https://www.mathcs.emory.edu/~valerie/courses/fall10/155/resources/op_precedence.html
192 # python operator precedence
193 # Highest precedence at top, lowest at bottom.
194 # Operators in the same box evaluate left to right.
196 # Operator Description
197 # () Parentheses (grouping)
198 # f(args...) Function call
199 # x[index:index] Slicing
200 # x[index] Subscription
201 # x.attribute Attribute reference
204 # +x, -x Positive, negative
205 # *, /, % mul, div, remainder
206 # +, - Addition, subtraction
207 # <<, >> Bitwise shifts
211 # in, not in, is, is not, <, <=, >, >=, <>, !=, == comp, membership, ident
215 # lambda Lambda expression
221 ("left", "EQ", "NE", "GT", "LT", "LE", "GE", "LTU", "GTU"),
225 ("left", "PLUS", "MINUS"),
226 ("left", "MULT", "DIV", "MOD"),
230 def __init__(self
, form
):
232 form
= self
.sd
.sigforms
[form
]
234 formkeys
= form
._asdict
().keys()
235 for rname
in ['RA', 'RB', 'RC', 'RT', 'RS']:
236 self
.gprs
[rname
] = None
237 self
.available_op_fields
= set()
239 if k
not in self
.gprs
:
240 if k
== 'SPR': # sigh, lower-case to not conflict
242 self
.available_op_fields
.add(k
)
243 self
.op_fields
= OrderedSet()
244 self
.read_regs
= OrderedSet()
245 self
.uninit_regs
= OrderedSet()
246 self
.write_regs
= OrderedSet()
247 self
.special_regs
= OrderedSet() # see p_atom_name
249 # The grammar comments come from Python's Grammar/Grammar file
251 # NB: compound_stmt in single_input is followed by extra NEWLINE!
252 # file_input: (NEWLINE | stmt)* ENDMARKER
254 def p_file_input_end(self
, p
):
255 """file_input_end : file_input ENDMARKER"""
259 def p_file_input(self
, p
):
260 """file_input : file_input NEWLINE
264 if isinstance(p
[len(p
)-1], str):
268 p
[0] = [] # p == 2 --> only a blank line
275 # funcdef: [decorators] 'def' NAME parameters ':' suite
276 # ignoring decorators
278 def p_funcdef(self
, p
):
279 "funcdef : DEF NAME parameters COLON suite"
280 p
[0] = ast
.FunctionDef(p
[2], p
[3], p
[5], ())
282 # parameters: '(' [varargslist] ')'
283 def p_parameters(self
, p
):
284 """parameters : LPAR RPAR
285 | LPAR varargslist RPAR"""
290 p
[0] = ast
.arguments(args
=args
, vararg
=None, kwarg
=None, defaults
=[])
292 # varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] |
296 def p_varargslist(self
, p
):
297 """varargslist : varargslist COMMA NAME
304 # stmt: simple_stmt | compound_stmt
305 def p_stmt_simple(self
, p
):
306 """stmt : simple_stmt"""
307 # simple_stmt is a list
310 def p_stmt_compound(self
, p
):
311 """stmt : compound_stmt"""
314 # simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
315 def p_simple_stmt(self
, p
):
316 """simple_stmt : small_stmts NEWLINE
317 | small_stmts SEMICOLON NEWLINE"""
320 def p_small_stmts(self
, p
):
321 """small_stmts : small_stmts SEMICOLON small_stmt
328 # small_stmt: expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt |
329 # import_stmt | global_stmt | exec_stmt | assert_stmt
330 def p_small_stmt(self
, p
):
331 """small_stmt : flow_stmt
334 if isinstance(p
[1], ast
.Call
):
335 p
[0] = ast
.Expr(p
[1])
339 # expr_stmt: testlist (augassign (yield_expr|testlist) |
340 # ('=' (yield_expr|testlist))*)
341 # augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
342 # '<<=' | '>>=' | '**=' | '//=')
343 def p_expr_stmt(self
, p
):
344 """expr_stmt : testlist ASSIGNEA testlist
345 | testlist ASSIGN testlist
347 print("expr_stmt", p
)
349 # a list of expressions
350 #p[0] = ast.Discard(p[1])
353 iea_mode
= p
[2] == '<-iea'
355 if isinstance(p
[1], ast
.Name
):
357 elif isinstance(p
[1], ast
.Subscript
):
358 if isinstance(p
[1].value
, ast
.Name
):
360 if name
in self
.gprs
:
361 # add to list of uninitialised
362 self
.uninit_regs
.add(name
)
363 elif isinstance(p
[1], ast
.Call
) and p
[1].func
.id in ['GPR', 'SPR']:
364 print(astor
.dump_tree(p
[1]))
365 # replace GPR(x) with GPR[x]
367 p
[1] = ast
.Subscript(p
[1].func
, idx
, ast
.Load())
368 elif isinstance(p
[1], ast
.Call
) and p
[1].func
.id == 'MEM':
370 print(astor
.dump_tree(p
[1]))
371 p
[1].func
.id = "memassign" # change function name to set
372 p
[1].args
.append(p
[3])
375 print(astor
.dump_tree(p
[0]))
379 print(astor
.dump_tree(p
[1]))
380 print("expr assign", name
, p
[1])
381 if name
and name
in self
.gprs
:
382 self
.write_regs
.add(name
) # add to list of regs to write
383 p
[0] = Assign(p
[1], p
[3], iea_mode
)
385 def p_flow_stmt(self
, p
):
386 "flow_stmt : return_stmt"
389 # return_stmt: 'return' [testlist]
390 def p_return_stmt(self
, p
):
391 "return_stmt : RETURN testlist"
392 p
[0] = ast
.Return(p
[2])
394 def p_compound_stmt(self
, p
):
395 """compound_stmt : if_stmt
403 def p_break_stmt(self
, p
):
404 """break_stmt : BREAK
408 def p_for_stmt(self
, p
):
409 """for_stmt : FOR atom EQ test TO test COLON suite
410 | DO atom EQ test TO test COLON suite
412 # auto-add-one (sigh) due to python range
414 end
= ast
.BinOp(p
[6], ast
.Add(), ast
.Constant(1))
415 it
= ast
.Call(ast
.Name("range", ast
.Load()), [start
, end
], [])
416 p
[0] = ast
.For(p
[2], it
, p
[8], [])
418 def p_while_stmt(self
, p
):
419 """while_stmt : DO WHILE test COLON suite ELSE COLON suite
420 | DO WHILE test COLON suite
423 p
[0] = ast
.While(p
[3], p
[5], [])
425 p
[0] = ast
.While(p
[3], p
[5], p
[8])
427 def p_switch_smt(self
, p
):
428 """switch_stmt : SWITCH LPAR atom RPAR COLON NEWLINE INDENT switches DEDENT
432 print(astor
.dump_tree(p
[1]))
435 current_cases
= [] # for deferral
436 for (case
, suite
) in p
[8]:
437 print("for", case
, suite
)
440 current_cases
.append(ast
.Num(c
))
442 if case
== 'default': # last
445 current_cases
.append(ast
.Num(c
))
446 print("cases", current_cases
)
447 compare
= ast
.Compare(switchon
, [ast
.In()],
448 [ast
.List(current_cases
, ast
.Load())])
450 cases
.append((compare
, suite
))
452 print("ended", case
, current_cases
)
453 if case
== 'default':
455 compare
= ast
.Compare(switchon
, [ast
.In()],
456 [ast
.List(current_cases
, ast
.Load())])
457 cases
.append((compare
, suite
))
458 cases
.append((None, suite
))
462 for compare
, suite
in cases
:
463 print("after rev", compare
, suite
)
465 assert len(res
) == 0, "last case should be default"
468 if not isinstance(res
, list):
470 res
= ast
.If(compare
, suite
, res
)
473 def p_switches(self
, p
):
474 """switches : switch_list switch_default
482 def p_switch_list(self
, p
):
483 """switch_list : switch_case switch_list
491 def p_switch_case(self
, p
):
492 """switch_case : CASE LPAR atomlist RPAR COLON suite
495 if isinstance(p
[6][0], ast
.Name
) and p
[6][0].id == 'fallthrough':
499 def p_switch_default(self
, p
):
500 """switch_default : DEFAULT COLON suite
502 p
[0] = ('default', p
[3])
504 def p_atomlist(self
, p
):
505 """atomlist : atom COMMA atomlist
508 assert isinstance(p
[1], ast
.Constant
), "case must be numbers"
510 p
[0] = [p
[1].value
] + p
[3]
514 def p_if_stmt(self
, p
):
515 """if_stmt : IF test COLON suite ELSE COLON if_stmt
516 | IF test COLON suite ELSE COLON suite
517 | IF test COLON suite
519 if len(p
) == 8 and isinstance(p
[7], ast
.If
):
520 p
[0] = ast
.If(p
[2], p
[4], [p
[7]])
522 p
[0] = ast
.If(p
[2], p
[4], [])
524 p
[0] = ast
.If(p
[2], p
[4], p
[7])
526 def p_suite(self
, p
):
527 """suite : simple_stmt
528 | NEWLINE INDENT stmts DEDENT"""
534 def p_stmts(self
, p
):
535 """stmts : stmts stmt
542 def p_comparison(self
, p
):
543 """comparison : comparison PLUS comparison
544 | comparison MINUS comparison
545 | comparison MULT comparison
546 | comparison DIV comparison
547 | comparison MOD comparison
548 | comparison EQ comparison
549 | comparison NE comparison
550 | comparison LE comparison
551 | comparison GE comparison
552 | comparison LTU comparison
553 | comparison GTU comparison
554 | comparison LT comparison
555 | comparison GT comparison
556 | comparison BITOR comparison
557 | comparison BITXOR comparison
558 | comparison BITAND comparison
562 | comparison APPEND comparison
567 p
[0] = ast
.Call(ast
.Name("ltu", ast
.Load()), (p
[1], p
[3]), [])
569 p
[0] = ast
.Call(ast
.Name("gtu", ast
.Load()), (p
[1], p
[3]), [])
571 l
= check_concat(p
[1]) + check_concat(p
[3])
572 p
[0] = ast
.Call(ast
.Name("concat", ast
.Load()), l
, [])
573 elif p
[2] in ['<', '>', '=', '<=', '>=', '!=']:
574 p
[0] = binary_ops
[p
[2]]((p
[1], p
[3]))
575 elif identify_sint_mul_pattern(p
):
576 keywords
= [ast
.keyword(arg
='repeat', value
=p
[3])]
578 p
[0] = ast
.Call(ast
.Name("concat", ast
.Load()), l
, keywords
)
580 p
[0] = ast
.BinOp(p
[1], binary_ops
[p
[2]], p
[3])
582 if isinstance(p
[2], str) and p
[2] == '-':
583 p
[0] = ast
.UnaryOp(unary_ops
[p
[2]], p
[1])
585 p
[0] = ast
.UnaryOp(unary_ops
[p
[1]], p
[2])
589 # power: atom trailer* ['**' factor]
590 # trailers enables function calls (and subscripts).
591 # so this is 'trailerlist'
592 def p_power(self
, p
):
594 | atom trailerlist"""
598 print("power dump atom")
599 print(astor
.dump_tree(p
[1]))
600 print("power dump trailerlist")
601 print(astor
.dump_tree(p
[2]))
602 p
[0] = apply_trailer(p
[1], p
[2])
604 def p_atom_name(self
, p
):
607 if name
in self
.available_op_fields
:
608 self
.op_fields
.add(name
)
609 if name
in ['CR', 'LR', 'CTR', 'TAR', 'FPSCR']:
610 self
.special_regs
.add(name
)
611 self
.write_regs
.add(name
) # and add to list to write
612 p
[0] = ast
.Name(id=name
, ctx
=ast
.Load())
614 def p_atom_number(self
, p
):
619 p
[0] = ast
.Constant(p
[1])
621 # '[' [listmaker] ']' |
623 def p_atom_listmaker(self
, p
):
624 """atom : LBRACK listmaker RBRACK"""
627 def p_listmaker(self
, p
):
628 """listmaker : test COMMA listmaker
632 p
[0] = ast
.List([p
[1]], ast
.Load())
634 p
[0] = ast
.List([p
[1]] + p
[3].nodes
, ast
.Load())
636 def p_atom_tuple(self
, p
):
637 """atom : LPAR testlist RPAR"""
640 print(astor
.dump_tree(p
[2]))
642 if isinstance(p
[2], ast
.Name
):
644 print("tuple name", name
)
645 if name
in self
.gprs
:
646 self
.read_regs
.add(name
) # add to list of regs to read
647 #p[0] = ast.Subscript(ast.Name("GPR", ast.Load()), ast.Str(p[2].id))
650 elif isinstance(p
[2], ast
.BinOp
):
651 if isinstance(p
[2].left
, ast
.Name
) and \
652 isinstance(p
[2].right
, ast
.Constant
) and \
653 p
[2].right
.value
== 0 and \
654 p
[2].left
.id in self
.gprs
:
656 self
.read_regs
.add(rid
) # add to list of regs to read
657 # create special call to GPR.getz
658 gprz
= ast
.Name("GPR", ast
.Load())
659 # get testzero function
660 gprz
= ast
.Attribute(gprz
, "getz", ast
.Load())
661 # *sigh* see class GPR. we need index itself not reg value
662 ridx
= ast
.Name("_%s" % rid
, ast
.Load())
663 p
[0] = ast
.Call(gprz
, [ridx
], [])
664 print("tree", astor
.dump_tree(p
[0]))
670 def p_trailerlist(self
, p
):
671 """trailerlist : trailer trailerlist
677 p
[0] = ("TLIST", p
[1], p
[2])
679 # trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
680 def p_trailer(self
, p
):
681 """trailer : trailer_arglist
686 def p_trailer_arglist(self
, p
):
687 "trailer_arglist : LPAR arglist RPAR"
688 p
[0] = ("CALL", p
[2])
690 def p_trailer_subscript(self
, p
):
691 "trailer_subscript : LBRACK subscript RBRACK"
692 p
[0] = ("SUBS", p
[2])
694 # subscript: '.' '.' '.' | test | [test] ':' [test]
696 def p_subscript(self
, p
):
697 """subscript : test COLON test
702 if isinstance(p
[3], ast
.Constant
):
703 end
= ast
.Constant(p
[3].value
+1)
705 end
= ast
.BinOp(p
[3], ast
.Add(), ast
.Constant(1))
710 # testlist: test (',' test)* [',']
711 # Contains shift/reduce error
713 def p_testlist(self
, p
):
714 """testlist : testlist_multi COMMA
719 # May need to promote singleton to tuple
720 if isinstance(p
[1], list):
724 # Convert into a tuple?
725 if isinstance(p
[0], list):
726 p
[0] = ast
.Tuple(p
[0])
728 def p_testlist_multi(self
, p
):
729 """testlist_multi : testlist_multi COMMA test
735 if isinstance(p
[1], list):
741 # test: or_test ['if' or_test 'else' test] | lambdef
742 # as I don't support 'and', 'or', and 'not' this works down to 'comparison'
748 # arglist: (argument ',')* (argument [',']| '*' test [',' '**' test]
750 # XXX INCOMPLETE: this doesn't allow the trailing comma
752 def p_arglist(self
, p
):
753 """arglist : arglist COMMA argument
760 # argument: test [gen_for] | test '=' test # Really [keyword '='] test
761 def p_argument(self
, p
):
765 def p_error(self
, p
):
766 # print "Error!", repr(p)
770 class GardenSnakeParser(PowerParser
):
771 def __init__(self
, lexer
=None, debug
=False, form
=None):
772 self
.sd
= create_pdecode()
773 PowerParser
.__init
__(self
, form
)
776 lexer
= IndentLexer(debug
=0)
778 self
.tokens
= lexer
.tokens
779 self
.parser
= yacc
.yacc(module
=self
, start
="file_input_end",
780 debug
=debug
, write_tables
=False)
782 def parse(self
, code
):
783 # self.lexer.input(code)
784 result
= self
.parser
.parse(code
, lexer
=self
.lexer
, debug
=self
.debug
)
785 return ast
.Module(result
)
788 ###### Code generation ######
790 #from compiler import misc, syntax, pycodegen
792 class GardenSnakeCompiler(object):
793 def __init__(self
, debug
=False, form
=None):
794 self
.parser
= GardenSnakeParser(debug
=debug
, form
=form
)
796 def compile(self
, code
, mode
="exec", filename
="<string>"):
797 tree
= self
.parser
.parse(code
)
801 #misc.set_filename(filename, tree)
802 return compile(tree
, mode
="exec", filename
="<string>")
804 gen
= pycodegen
.ModuleCodeGenerator(tree
)