1466f87cf1f63a0526e91910f93534e46716675e
1 # Based on GardenSnake - a parser generator demonstration program
2 # GardenSnake was released into the Public Domain by Andrew Dalke.
4 # Portions of this work are derived from Python's Grammar definition
5 # and may be covered under the Python copyright and license
7 # Andrew Dalke / Dalke Scientific Software, LLC
8 # 30 August 2006 / Cape Town, South Africa
10 # Modifications for inclusion in PLY distribution
11 from pprint
import pprint
12 from ply
import lex
, yacc
14 from copy
import deepcopy
16 from soc
.decoder
.power_decoder
import create_pdecode
17 from soc
.decoder
.pseudo
.lexer
import IndentLexer
18 from soc
.decoder
.orderedset
import OrderedSet
20 # I use the Python AST
21 #from compiler import ast
27 def Assign(autoassign
, assignname
, left
, right
, iea_mode
):
29 print("Assign", assignname
, left
, right
)
30 if isinstance(left
, ast
.Name
):
31 # Single assignment on left
32 # XXX when doing IntClass, which will have an "eq" function,
33 # this is how to access it
34 # eq = ast.Attribute(left, "eq") # get eq fn
35 # return ast.Call(eq, [right], []) # now call left.eq(right)
36 return ast
.Assign([ast
.Name(left
.id, ast
.Store())], right
)
37 elif isinstance(left
, ast
.Tuple
):
38 # List of things - make sure they are Name nodes
40 for child
in left
.getChildren():
41 if not isinstance(child
, ast
.Name
):
42 raise SyntaxError("that assignment not supported")
43 names
.append(child
.name
)
44 ass_list
= [ast
.AssName(name
, 'OP_ASSIGN') for name
in names
]
45 return ast
.Assign([ast
.AssTuple(ass_list
)], right
)
46 elif isinstance(left
, ast
.Subscript
):
48 if (isinstance(ls
, ast
.Slice
) and isinstance(right
, ast
.Name
) and
49 right
.id == 'undefined'):
50 # undefined needs to be copied the exact same slice
51 right
= ast
.Subscript(right
, ls
, ast
.Load())
52 return ast
.Assign([left
], right
)
53 res
= ast
.Assign([left
], right
)
54 if autoassign
and isinstance(ls
, ast
.Slice
):
55 # hack to create a variable pre-declared based on a slice.
56 # dividend[0:32] = (RA)[0:32] will create
58 # dividend[0:32] = (RA)[0:32]
59 # the declaration makes the slice-assignment "work"
60 lower
, upper
, step
= ls
.lower
, ls
.upper
, ls
.step
61 print("lower, upper, step", repr(lower
), repr(upper
), step
)
62 if not isinstance(lower
, ast
.Constant
) or \
63 not isinstance(upper
, ast
.Constant
):
65 qty
= ast
.Num(upper
.value
-lower
.value
)
66 keywords
= [ast
.keyword(arg
='repeat', value
=qty
)]
68 right
= ast
.Call(ast
.Name("concat", ast
.Load()), l
, keywords
)
69 declare
= ast
.Assign([ast
.Name(assignname
, ast
.Store())], right
)
72 # XXX HMMM probably not needed...
74 if isinstance(ls
, ast
.Slice
):
75 lower
, upper
, step
= ls
.lower
, ls
.upper
, ls
.step
76 print("slice assign", lower
, upper
, step
)
78 ls
= (lower
, upper
, None)
80 ls
= (lower
, upper
, step
)
82 return ast
.Call(ast
.Name("selectassign", ast
.Load()),
83 [left
.value
, ls
, right
], [])
86 raise SyntaxError("Can't do that yet")
89 # I implemented INDENT / DEDENT generation as a post-processing filter
91 # The original lex token stream contains WS and NEWLINE characters.
92 # WS will only occur before any other tokens on a line.
94 # I have three filters. One tags tokens by adding two attributes.
95 # "must_indent" is True if the token must be indented from the
96 # previous code. The other is "at_line_start" which is True for WS
97 # and the first non-WS/non-NEWLINE on a line. It flags the check so
98 # see if the new line has changed indication level.
101 # No using Python's approach because Ply supports precedence
103 # comparison: expr (comp_op expr)*
104 # arith_expr: term (('+'|'-') term)*
105 # term: factor (('*'|'/'|'%'|'//') factor)*
106 # factor: ('+'|'-'|'~') factor | power
107 # comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
109 def make_le_compare(arg
):
111 return ast
.Call(ast
.Name("le", ast
.Load()), (left
, right
), [])
114 def make_ge_compare(arg
):
116 return ast
.Call(ast
.Name("ge", ast
.Load()), (left
, right
), [])
119 def make_lt_compare(arg
):
121 return ast
.Call(ast
.Name("lt", ast
.Load()), (left
, right
), [])
124 def make_gt_compare(arg
):
126 return ast
.Call(ast
.Name("gt", ast
.Load()), (left
, right
), [])
129 def make_eq_compare(arg
):
131 return ast
.Call(ast
.Name("eq", ast
.Load()), (left
, right
), [])
134 def make_ne_compare(arg
):
136 return ast
.Call(ast
.Name("ne", ast
.Load()), (left
, right
), [])
148 "<=": make_le_compare
,
149 ">=": make_ge_compare
,
150 "<": make_lt_compare
,
151 ">": make_gt_compare
,
152 "=": make_eq_compare
,
153 "!=": make_ne_compare
,
162 def check_concat(node
): # checks if the comparison is already a concat
163 print("check concat", node
)
164 if not isinstance(node
, ast
.Call
):
166 print("func", node
.func
.id)
167 if node
.func
.id != 'concat':
169 if node
.keywords
: # a repeated list-constant, don't optimise
174 # identify SelectableInt pattern [something] * N
175 # must return concat(something, repeat=N)
176 def identify_sint_mul_pattern(p
):
177 if p
[2] != '*': # multiply
179 if not isinstance(p
[3], ast
.Constant
): # rhs = Num
181 if not isinstance(p
[1], ast
.List
): # lhs is a list
184 if len(l
) != 1: # lhs is a list of length 1
186 return True # yippee!
189 def apply_trailer(atom
, trailer
):
190 if trailer
[0] == "TLIST":
191 # assume depth of one
192 atom
= apply_trailer(atom
, trailer
[1])
194 if trailer
[0] == "CALL":
195 #p[0] = ast.Expr(ast.Call(p[1], p[2][1], []))
196 return ast
.Call(atom
, trailer
[1], [])
197 # if p[1].id == 'print':
198 # p[0] = ast.Printnl(ast.Tuple(p[2][1]), None, None)
200 # p[0] = ast.CallFunc(p[1], p[2][1], None, None)
202 print("subscript atom", trailer
[1])
203 #raise AssertionError("not implemented %s" % p[2][0])
208 idx
= ast
.Slice(subs
[0], subs
[1], None)
209 # if isinstance(atom, ast.Name) and atom.id == 'CR':
210 # atom.id = 'CR' # bad hack
211 #print ("apply_trailer Subscript", atom.id, idx)
212 return ast
.Subscript(atom
, idx
, ast
.Load())
214 ########## Parser (tokens -> AST) ######
219 # https://www.mathcs.emory.edu/~valerie/courses/fall10/155/resources/op_precedence.html
220 # python operator precedence
221 # Highest precedence at top, lowest at bottom.
222 # Operators in the same box evaluate left to right.
224 # Operator Description
225 # () Parentheses (grouping)
226 # f(args...) Function call
227 # x[index:index] Slicing
228 # x[index] Subscription
229 # x.attribute Attribute reference
232 # +x, -x Positive, negative
233 # *, /, % mul, div, remainder
234 # +, - Addition, subtraction
235 # <<, >> Bitwise shifts
239 # in, not in, is, is not, <, <=, >, >=, <>, !=, == comp, membership, ident
243 # lambda Lambda expression
249 ("left", "EQ", "NE", "GT", "LT", "LE", "GE", "LTU", "GTU"),
253 ("left", "PLUS", "MINUS"),
254 ("left", "MULT", "DIV", "MOD"),
258 def __init__(self
, form
, include_carry_in_write
=False):
259 self
.include_ca_in_write
= include_carry_in_write
261 form
= self
.sd
.sigforms
[form
]
263 formkeys
= form
._asdict
().keys()
264 self
.declared_vars
= set()
265 for rname
in ['RA', 'RB', 'RC', 'RT', 'RS']:
266 self
.gprs
[rname
] = None
267 self
.declared_vars
.add(rname
)
268 self
.available_op_fields
= set()
270 if k
not in self
.gprs
:
271 if k
== 'SPR': # sigh, lower-case to not conflict
273 self
.available_op_fields
.add(k
)
274 self
.op_fields
= OrderedSet()
275 self
.read_regs
= OrderedSet()
276 self
.uninit_regs
= OrderedSet()
277 self
.write_regs
= OrderedSet()
278 self
.special_regs
= OrderedSet() # see p_atom_name
280 # The grammar comments come from Python's Grammar/Grammar file
282 # NB: compound_stmt in single_input is followed by extra NEWLINE!
283 # file_input: (NEWLINE | stmt)* ENDMARKER
285 def p_file_input_end(self
, p
):
286 """file_input_end : file_input ENDMARKER"""
290 def p_file_input(self
, p
):
291 """file_input : file_input NEWLINE
295 if isinstance(p
[len(p
)-1], str):
299 p
[0] = [] # p == 2 --> only a blank line
306 # funcdef: [decorators] 'def' NAME parameters ':' suite
307 # ignoring decorators
309 def p_funcdef(self
, p
):
310 "funcdef : DEF NAME parameters COLON suite"
311 p
[0] = ast
.FunctionDef(p
[2], p
[3], p
[5], ())
313 # parameters: '(' [varargslist] ')'
314 def p_parameters(self
, p
):
315 """parameters : LPAR RPAR
316 | LPAR varargslist RPAR"""
321 p
[0] = ast
.arguments(args
=args
, vararg
=None, kwarg
=None, defaults
=[])
323 # varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] |
327 def p_varargslist(self
, p
):
328 """varargslist : varargslist COMMA NAME
335 # stmt: simple_stmt | compound_stmt
336 def p_stmt_simple(self
, p
):
337 """stmt : simple_stmt"""
338 # simple_stmt is a list
341 def p_stmt_compound(self
, p
):
342 """stmt : compound_stmt"""
345 # simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
346 def p_simple_stmt(self
, p
):
347 """simple_stmt : small_stmts NEWLINE
348 | small_stmts SEMICOLON NEWLINE"""
351 def p_small_stmts(self
, p
):
352 """small_stmts : small_stmts SEMICOLON small_stmt
356 elif isinstance(p
[1], list):
361 # small_stmt: expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt |
362 # import_stmt | global_stmt | exec_stmt | assert_stmt
363 def p_small_stmt(self
, p
):
364 """small_stmt : flow_stmt
367 if isinstance(p
[1], ast
.Call
):
368 p
[0] = ast
.Expr(p
[1])
369 elif isinstance(p
[1], ast
.Name
) and p
[1].id == 'TRAP':
370 # TRAP needs to actually be a function
371 name
= ast
.Name("self", ast
.Load())
372 name
= ast
.Attribute(name
, "TRAP", ast
.Load())
373 p
[0] = ast
.Call(name
, [], [])
377 # expr_stmt: testlist (augassign (yield_expr|testlist) |
378 # ('=' (yield_expr|testlist))*)
379 # augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
380 # '<<=' | '>>=' | '**=' | '//=')
381 def p_expr_stmt(self
, p
):
382 """expr_stmt : testlist ASSIGNEA testlist
383 | testlist ASSIGN testlist
385 print("expr_stmt", p
)
387 # a list of expressions
388 #p[0] = ast.Discard(p[1])
391 iea_mode
= p
[2] == '<-iea'
394 if isinstance(p
[1], ast
.Name
):
396 elif isinstance(p
[1], ast
.Subscript
):
397 if isinstance(p
[1].value
, ast
.Name
):
399 if name
in self
.gprs
:
400 # add to list of uninitialised
401 self
.uninit_regs
.add(name
)
402 autoassign
= (name
not in self
.declared_vars
and
403 name
not in self
.special_regs
)
404 elif isinstance(p
[1], ast
.Call
) and p
[1].func
.id in ['GPR', 'SPR']:
405 print(astor
.dump_tree(p
[1]))
406 # replace GPR(x) with GPR[x]
408 p
[1] = ast
.Subscript(p
[1].func
, idx
, ast
.Load())
409 elif isinstance(p
[1], ast
.Call
) and p
[1].func
.id == 'MEM':
411 print(astor
.dump_tree(p
[1]))
412 p
[1].func
.id = "memassign" # change function name to set
413 p
[1].args
.append(p
[3])
416 print(astor
.dump_tree(p
[0]))
420 print(astor
.dump_tree(p
[1]))
421 print("expr assign", name
, p
[1])
422 if name
and name
in self
.gprs
:
423 self
.write_regs
.add(name
) # add to list of regs to write
424 p
[0] = Assign(autoassign
, name
, p
[1], p
[3], iea_mode
)
426 self
.declared_vars
.add(name
)
428 def p_flow_stmt(self
, p
):
429 "flow_stmt : return_stmt"
432 # return_stmt: 'return' [testlist]
433 def p_return_stmt(self
, p
):
434 "return_stmt : RETURN testlist"
435 p
[0] = ast
.Return(p
[2])
437 def p_compound_stmt(self
, p
):
438 """compound_stmt : if_stmt
446 def p_break_stmt(self
, p
):
447 """break_stmt : BREAK
451 def p_for_stmt(self
, p
):
452 """for_stmt : FOR atom EQ test TO test COLON suite
453 | DO atom EQ test TO test COLON suite
457 if start
.value
> end
.value
: # start greater than end, must go -ve
458 # auto-subtract-one (sigh) due to python range
459 end
= ast
.BinOp(p
[6], ast
.Add(), ast
.Constant(-1))
460 arange
= [start
, end
, ast
.Constant(-1)]
462 # auto-add-one (sigh) due to python range
463 end
= ast
.BinOp(p
[6], ast
.Add(), ast
.Constant(1))
464 arange
= [start
, end
]
465 it
= ast
.Call(ast
.Name("range", ast
.Load()), arange
, [])
466 p
[0] = ast
.For(p
[2], it
, p
[8], [])
468 def p_while_stmt(self
, p
):
469 """while_stmt : DO WHILE test COLON suite ELSE COLON suite
470 | DO WHILE test COLON suite
473 p
[0] = ast
.While(p
[3], p
[5], [])
475 p
[0] = ast
.While(p
[3], p
[5], p
[8])
477 def p_switch_smt(self
, p
):
478 """switch_stmt : SWITCH LPAR atom RPAR COLON NEWLINE INDENT switches DEDENT
482 print(astor
.dump_tree(p
[1]))
485 current_cases
= [] # for deferral
486 for (case
, suite
) in p
[8]:
487 print("for", case
, suite
)
490 current_cases
.append(ast
.Num(c
))
492 if case
== 'default': # last
495 current_cases
.append(ast
.Num(c
))
496 print("cases", current_cases
)
497 compare
= ast
.Compare(switchon
, [ast
.In()],
498 [ast
.List(current_cases
, ast
.Load())])
500 cases
.append((compare
, suite
))
502 print("ended", case
, current_cases
)
503 if case
== 'default':
505 compare
= ast
.Compare(switchon
, [ast
.In()],
506 [ast
.List(current_cases
, ast
.Load())])
507 cases
.append((compare
, suite
))
508 cases
.append((None, suite
))
512 for compare
, suite
in cases
:
513 print("after rev", compare
, suite
)
515 assert len(res
) == 0, "last case should be default"
518 if not isinstance(res
, list):
520 res
= ast
.If(compare
, suite
, res
)
523 def p_switches(self
, p
):
524 """switches : switch_list switch_default
532 def p_switch_list(self
, p
):
533 """switch_list : switch_case switch_list
541 def p_switch_case(self
, p
):
542 """switch_case : CASE LPAR atomlist RPAR COLON suite
545 if isinstance(p
[6][0], ast
.Name
) and p
[6][0].id == 'fallthrough':
549 def p_switch_default(self
, p
):
550 """switch_default : DEFAULT COLON suite
552 p
[0] = ('default', p
[3])
554 def p_atomlist(self
, p
):
555 """atomlist : atom COMMA atomlist
558 assert isinstance(p
[1], ast
.Constant
), "case must be numbers"
560 p
[0] = [p
[1].value
] + p
[3]
564 def p_if_stmt(self
, p
):
565 """if_stmt : IF test COLON suite ELSE COLON if_stmt
566 | IF test COLON suite ELSE COLON suite
567 | IF test COLON suite
569 if len(p
) == 8 and isinstance(p
[7], ast
.If
):
570 p
[0] = ast
.If(p
[2], p
[4], [p
[7]])
572 p
[0] = ast
.If(p
[2], p
[4], [])
574 p
[0] = ast
.If(p
[2], p
[4], p
[7])
576 def p_suite(self
, p
):
577 """suite : simple_stmt
578 | NEWLINE INDENT stmts DEDENT"""
584 def p_stmts(self
, p
):
585 """stmts : stmts stmt
592 def p_comparison(self
, p
):
593 """comparison : comparison PLUS comparison
594 | comparison MINUS comparison
595 | comparison MULT comparison
596 | comparison DIV comparison
597 | comparison MOD comparison
598 | comparison EQ comparison
599 | comparison NE comparison
600 | comparison LE comparison
601 | comparison GE comparison
602 | comparison LTU comparison
603 | comparison GTU comparison
604 | comparison LT comparison
605 | comparison GT comparison
606 | comparison BITOR comparison
607 | comparison BITXOR comparison
608 | comparison BITAND comparison
612 | comparison APPEND comparison
617 p
[0] = ast
.Call(ast
.Name("ltu", ast
.Load()), (p
[1], p
[3]), [])
619 p
[0] = ast
.Call(ast
.Name("gtu", ast
.Load()), (p
[1], p
[3]), [])
621 l
= check_concat(p
[1]) + check_concat(p
[3])
622 p
[0] = ast
.Call(ast
.Name("concat", ast
.Load()), l
, [])
623 elif p
[2] in ['/', '%']:
624 # bad hack: if % or / used anywhere other than div/mod ops,
625 # do % or /. however if the argument names are "dividend"
626 # we must call the special trunc_divs and trunc_rems functions
628 # actual call will be "dividend / divisor" - just check
630 # XXX DISABLE BAD HACK (False)
631 if False and isinstance(l
, ast
.Name
) and l
.id == 'dividend':
636 # return "function trunc_xxx(l, r)"
637 p
[0] = ast
.Call(ast
.Name(fn
, ast
.Load()), (l
, r
), [])
639 # return "l {binop} r"
640 p
[0] = ast
.BinOp(p
[1], binary_ops
[p
[2]], p
[3])
641 elif p
[2] in ['<', '>', '=', '<=', '>=', '!=']:
642 p
[0] = binary_ops
[p
[2]]((p
[1], p
[3]))
643 elif identify_sint_mul_pattern(p
):
644 keywords
= [ast
.keyword(arg
='repeat', value
=p
[3])]
646 p
[0] = ast
.Call(ast
.Name("concat", ast
.Load()), l
, keywords
)
648 p
[0] = ast
.BinOp(p
[1], binary_ops
[p
[2]], p
[3])
650 if isinstance(p
[2], str) and p
[2] == '-':
651 p
[0] = ast
.UnaryOp(unary_ops
[p
[2]], p
[1])
653 p
[0] = ast
.UnaryOp(unary_ops
[p
[1]], p
[2])
657 # power: atom trailer* ['**' factor]
658 # trailers enables function calls (and subscripts).
659 # so this is 'trailerlist'
660 def p_power(self
, p
):
662 | atom trailerlist"""
666 print("power dump atom")
667 print(astor
.dump_tree(p
[1]))
668 print("power dump trailerlist")
669 print(astor
.dump_tree(p
[2]))
670 p
[0] = apply_trailer(p
[1], p
[2])
671 if isinstance(p
[1], ast
.Name
):
673 if name
in ['RA', 'RS', 'RB', 'RC']:
674 self
.read_regs
.add(name
)
676 def p_atom_name(self
, p
):
679 if name
in self
.available_op_fields
:
680 self
.op_fields
.add(name
)
681 if name
== 'overflow':
682 self
.write_regs
.add(name
)
683 if self
.include_ca_in_write
:
684 if name
in ['CA', 'CA32']:
685 self
.write_regs
.add(name
)
686 if name
in ['CR', 'LR', 'CTR', 'TAR', 'FPSCR', 'MSR']:
687 self
.special_regs
.add(name
)
688 self
.write_regs
.add(name
) # and add to list to write
689 p
[0] = ast
.Name(id=name
, ctx
=ast
.Load())
691 def p_atom_number(self
, p
):
696 p
[0] = ast
.Constant(p
[1])
698 # '[' [listmaker] ']' |
700 def p_atom_listmaker(self
, p
):
701 """atom : LBRACK listmaker RBRACK"""
704 def p_listmaker(self
, p
):
705 """listmaker : test COMMA listmaker
709 p
[0] = ast
.List([p
[1]], ast
.Load())
711 p
[0] = ast
.List([p
[1]] + p
[3].nodes
, ast
.Load())
713 def p_atom_tuple(self
, p
):
714 """atom : LPAR testlist RPAR"""
717 print(astor
.dump_tree(p
[2]))
719 if isinstance(p
[2], ast
.Name
):
721 print("tuple name", name
)
722 if name
in self
.gprs
:
723 self
.read_regs
.add(name
) # add to list of regs to read
724 #p[0] = ast.Subscript(ast.Name("GPR", ast.Load()), ast.Str(p[2].id))
727 elif isinstance(p
[2], ast
.BinOp
):
728 if isinstance(p
[2].left
, ast
.Name
) and \
729 isinstance(p
[2].right
, ast
.Constant
) and \
730 p
[2].right
.value
== 0 and \
731 p
[2].left
.id in self
.gprs
:
733 self
.read_regs
.add(rid
) # add to list of regs to read
734 # create special call to GPR.getz
735 gprz
= ast
.Name("GPR", ast
.Load())
736 # get testzero function
737 gprz
= ast
.Attribute(gprz
, "getz", ast
.Load())
738 # *sigh* see class GPR. we need index itself not reg value
739 ridx
= ast
.Name("_%s" % rid
, ast
.Load())
740 p
[0] = ast
.Call(gprz
, [ridx
], [])
741 print("tree", astor
.dump_tree(p
[0]))
747 def p_trailerlist(self
, p
):
748 """trailerlist : trailer trailerlist
754 p
[0] = ("TLIST", p
[1], p
[2])
756 # trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
757 def p_trailer(self
, p
):
758 """trailer : trailer_arglist
763 def p_trailer_arglist(self
, p
):
764 "trailer_arglist : LPAR arglist RPAR"
765 p
[0] = ("CALL", p
[2])
767 def p_trailer_subscript(self
, p
):
768 "trailer_subscript : LBRACK subscript RBRACK"
769 p
[0] = ("SUBS", p
[2])
771 # subscript: '.' '.' '.' | test | [test] ':' [test]
773 def p_subscript(self
, p
):
774 """subscript : test COLON test
779 if isinstance(p
[3], ast
.Constant
):
780 end
= ast
.Constant(p
[3].value
+1)
782 end
= ast
.BinOp(p
[3], ast
.Add(), ast
.Constant(1))
787 # testlist: test (',' test)* [',']
788 # Contains shift/reduce error
790 def p_testlist(self
, p
):
791 """testlist : testlist_multi COMMA
796 # May need to promote singleton to tuple
797 if isinstance(p
[1], list):
801 # Convert into a tuple?
802 if isinstance(p
[0], list):
803 p
[0] = ast
.Tuple(p
[0])
805 def p_testlist_multi(self
, p
):
806 """testlist_multi : testlist_multi COMMA test
812 if isinstance(p
[1], list):
818 # test: or_test ['if' or_test 'else' test] | lambdef
819 # as I don't support 'and', 'or', and 'not' this works down to 'comparison'
825 # arglist: (argument ',')* (argument [',']| '*' test [',' '**' test]
827 # XXX INCOMPLETE: this doesn't allow the trailing comma
829 def p_arglist(self
, p
):
830 """arglist : arglist COMMA argument
837 # argument: test [gen_for] | test '=' test # Really [keyword '='] test
838 def p_argument(self
, p
):
842 def p_error(self
, p
):
843 # print "Error!", repr(p)
847 class GardenSnakeParser(PowerParser
):
848 def __init__(self
, lexer
=None, debug
=False, form
=None, incl_carry
=False):
849 self
.sd
= create_pdecode()
850 PowerParser
.__init
__(self
, form
, incl_carry
)
853 lexer
= IndentLexer(debug
=0)
855 self
.tokens
= lexer
.tokens
856 self
.parser
= yacc
.yacc(module
=self
, start
="file_input_end",
857 debug
=debug
, write_tables
=False)
859 def parse(self
, code
):
860 # self.lexer.input(code)
861 result
= self
.parser
.parse(code
, lexer
=self
.lexer
, debug
=self
.debug
)
862 return ast
.Module(result
)
865 ###### Code generation ######
867 #from compiler import misc, syntax, pycodegen
870 _CACHE_PARSERS
= True
873 class GardenSnakeCompiler(object):
874 def __init__(self
, debug
=False, form
=None, incl_carry
=False):
877 parser
= _CACHED_PARSERS
[debug
, form
, incl_carry
]
879 parser
= GardenSnakeParser(debug
=debug
, form
=form
,
880 incl_carry
=incl_carry
)
881 _CACHED_PARSERS
[debug
, form
, incl_carry
] = parser
883 self
.parser
= deepcopy(parser
)
885 self
.parser
= GardenSnakeParser(debug
=debug
, form
=form
,
886 incl_carry
=incl_carry
)
888 def compile(self
, code
, mode
="exec", filename
="<string>"):
889 tree
= self
.parser
.parse(code
)
893 #misc.set_filename(filename, tree)
894 return compile(tree
, mode
="exec", filename
="<string>")
896 gen
= pycodegen
.ModuleCodeGenerator(tree
)