1 # Based on GardenSnake - a parser generator demonstration program
2 # GardenSnake was released into the Public Domain by Andrew Dalke.
4 # Portions of this work are derived from Python's Grammar definition
5 # and may be covered under the Python copyright and license
7 # Andrew Dalke / Dalke Scientific Software, LLC
8 # 30 August 2006 / Cape Town, South Africa
10 # Modifications for inclusion in PLY distribution
12 from pprint
import pprint
14 from ply
import lex
, yacc
17 from soc
.decoder
.power_decoder
import create_pdecode
18 from nmigen
.back
.pysim
import Simulator
, Delay
19 from nmigen
import Module
, Signal
21 from soc
.decoder
.pseudo
.lexer
import IndentLexer
23 # I use the Python AST
24 #from compiler import ast
28 def Assign(left
, right
):
30 if isinstance(left
, ast
.Name
):
31 # Single assignment on left
32 return ast
.Assign([ast
.Name(left
.id, ast
.Store())], right
)
33 elif isinstance(left
, ast
.Tuple
):
34 # List of things - make sure they are Name nodes
36 for child
in left
.getChildren():
37 if not isinstance(child
, ast
.Name
):
38 raise SyntaxError("that assignment not supported")
39 names
.append(child
.name
)
40 ass_list
= [ast
.AssName(name
, 'OP_ASSIGN') for name
in names
]
41 return ast
.Assign([ast
.AssTuple(ass_list
)], right
)
43 raise SyntaxError("Can't do that yet")
46 ## I implemented INDENT / DEDENT generation as a post-processing filter
48 # The original lex token stream contains WS and NEWLINE characters.
49 # WS will only occur before any other tokens on a line.
51 # I have three filters. One tags tokens by adding two attributes.
52 # "must_indent" is True if the token must be indented from the
53 # previous code. The other is "at_line_start" which is True for WS
54 # and the first non-WS/non-NEWLINE on a line. It flags the check so
55 # see if the new line has changed indication level.
58 ## No using Python's approach because Ply supports precedence
60 # comparison: expr (comp_op expr)*
61 # arith_expr: term (('+'|'-') term)*
62 # term: factor (('*'|'/'|'%'|'//') factor)*
63 # factor: ('+'|'-'|'~') factor | power
64 # comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
66 def make_lt_compare(arg
):
68 return ast
.Compare(left
, [ast
.Lt()], [right
])
69 def make_gt_compare(arg
):
71 return ast
.Compare(left
, [ast
.Gt()], [right
])
72 def make_eq_compare(arg
):
74 return ast
.Compare(left
, [ast
.Eq()], [right
])
90 def check_concat(node
): # checks if the comparison is already a concat
92 if not isinstance(node
, ast
.Call
):
94 if node
[0].id != 'concat':
99 ########## Parser (tokens -> AST) ######
107 ("left", "EQ", "GT", "LT"),
108 ("left", "PLUS", "MINUS"),
109 ("left", "MULT", "DIV"),
114 for rname
in ['RA', 'RB', 'RC', 'RT', 'RS']:
115 self
.gprs
[rname
] = None
119 # The grammar comments come from Python's Grammar/Grammar file
121 ## NB: compound_stmt in single_input is followed by extra NEWLINE!
122 # file_input: (NEWLINE | stmt)* ENDMARKER
124 def p_file_input_end(self
, p
):
125 """file_input_end : file_input ENDMARKER"""
129 def p_file_input(self
, p
):
130 """file_input : file_input NEWLINE
134 if isinstance(p
[len(p
)-1], str):
138 p
[0] = [] # p == 2 --> only a blank line
146 # funcdef: [decorators] 'def' NAME parameters ':' suite
147 # ignoring decorators
148 def p_funcdef(self
, p
):
149 "funcdef : DEF NAME parameters COLON suite"
150 p
[0] = ast
.FunctionDef(p
[2], p
[3], p
[5], ())
152 # parameters: '(' [varargslist] ')'
153 def p_parameters(self
, p
):
154 """parameters : LPAR RPAR
155 | LPAR varargslist RPAR"""
160 p
[0] = ast
.arguments(args
=args
, vararg
=None, kwarg
=None, defaults
=[])
163 # varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] |
166 def p_varargslist(self
, p
):
167 """varargslist : varargslist COMMA NAME
174 # stmt: simple_stmt | compound_stmt
175 def p_stmt_simple(self
, p
):
176 """stmt : simple_stmt"""
177 # simple_stmt is a list
180 def p_stmt_compound(self
, p
):
181 """stmt : compound_stmt"""
184 # simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
185 def p_simple_stmt(self
, p
):
186 """simple_stmt : small_stmts NEWLINE
187 | small_stmts SEMICOLON NEWLINE"""
190 def p_small_stmts(self
, p
):
191 """small_stmts : small_stmts SEMICOLON small_stmt
198 # small_stmt: expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt |
199 # import_stmt | global_stmt | exec_stmt | assert_stmt
200 def p_small_stmt(self
, p
):
201 """small_stmt : flow_stmt
204 if isinstance(p
[1], ast
.Call
):
205 p
[0] = ast
.Expr(p
[1])
209 # expr_stmt: testlist (augassign (yield_expr|testlist) |
210 # ('=' (yield_expr|testlist))*)
211 # augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
212 # '<<=' | '>>=' | '**=' | '//=')
213 def p_expr_stmt(self
, p
):
214 """expr_stmt : testlist ASSIGN testlist
217 # a list of expressions
218 #p[0] = ast.Discard(p[1])
221 if p
[1].id in self
.gprs
:
222 self
.write_regs
.append(p
[1].id) # add to list of regs to write
223 p
[0] = Assign(p
[1], p
[3])
225 def p_flow_stmt(self
, p
):
226 "flow_stmt : return_stmt"
229 # return_stmt: 'return' [testlist]
230 def p_return_stmt(self
, p
):
231 "return_stmt : RETURN testlist"
232 p
[0] = ast
.Return(p
[2])
235 def p_compound_stmt(self
, p
):
236 """compound_stmt : if_stmt
243 def p_break_stmt(self
, p
):
244 """break_stmt : BREAK
248 def p_for_stmt(self
, p
):
249 """for_stmt : FOR test EQ test TO test COLON suite
251 p
[0] = ast
.While(p
[2], p
[4], [])
252 # auto-add-one (sigh) due to python range
254 end
= ast
.BinOp(p
[6], ast
.Add(), ast
.Constant(1))
255 it
= ast
.Call(ast
.Name("range"), [start
, end
], [])
256 p
[0] = ast
.For(p
[2], it
, p
[8], [])
258 def p_while_stmt(self
, p
):
259 """while_stmt : DO WHILE test COLON suite ELSE COLON suite
260 | DO WHILE test COLON suite
263 p
[0] = ast
.While(p
[3], p
[5], [])
265 p
[0] = ast
.While(p
[3], p
[5], p
[8])
267 def p_if_stmt(self
, p
):
268 """if_stmt : IF test COLON suite ELSE COLON suite
269 | IF test COLON suite
272 p
[0] = ast
.If(p
[2], p
[4], [])
274 p
[0] = ast
.If(p
[2], p
[4], p
[7])
276 def p_suite(self
, p
):
277 """suite : simple_stmt
278 | NEWLINE INDENT stmts DEDENT"""
285 def p_stmts(self
, p
):
286 """stmts : stmts stmt
293 def p_comparison(self
, p
):
294 """comparison : comparison PLUS comparison
295 | comparison MINUS comparison
296 | comparison MULT comparison
297 | comparison DIV comparison
298 | comparison LT comparison
299 | comparison EQ comparison
300 | comparison GT comparison
303 | comparison APPEND comparison
308 l
= check_concat(p
[1]) + check_concat(p
[3])
309 p
[0] = ast
.Call(ast
.Name("concat"), l
, [])
310 elif p
[2] in ['<', '>', '=']:
311 p
[0] = binary_ops
[p
[2]]((p
[1],p
[3]))
313 p
[0] = ast
.BinOp(p
[1], binary_ops
[p
[2]], p
[3])
315 p
[0] = unary_ops
[p
[1]](p
[2])
319 # power: atom trailer* ['**' factor]
320 # trailers enables function calls (and subscripts).
321 # I only allow one level of calls
322 # so this is 'trailer'
323 def p_power(self
, p
):
329 if p
[2][0] == "CALL":
330 #p[0] = ast.Expr(ast.Call(p[1], p[2][1], []))
331 p
[0] = ast
.Call(p
[1], p
[2][1], [])
332 #if p[1].id == 'print':
333 # p[0] = ast.Printnl(ast.Tuple(p[2][1]), None, None)
335 # p[0] = ast.CallFunc(p[1], p[2][1], None, None)
338 #raise AssertionError("not implemented %s" % p[2][0])
343 idx
= ast
.Slice(subs
[0], subs
[1], None)
344 p
[0] = ast
.Subscript(p
[1], idx
)
346 def p_atom_name(self
, p
):
348 p
[0] = ast
.Name(p
[1], ctx
=ast
.Load())
350 def p_atom_number(self
, p
):
354 p
[0] = ast
.Constant(p
[1])
356 #'[' [listmaker] ']' |
358 def p_atom_listmaker(self
, p
):
359 """atom : LBRACK listmaker RBRACK"""
362 def p_listmaker(self
, p
):
363 """listmaker : test COMMA listmaker
367 p
[0] = ast
.List([p
[1]])
369 p
[0] = ast
.List([p
[1]] + p
[3].nodes
)
371 def p_atom_tuple(self
, p
):
372 """atom : LPAR testlist RPAR"""
373 print ("tuple", p
[2])
374 if isinstance(p
[2], ast
.Name
):
375 print ("tuple name", p
[2].id)
376 if p
[2].id in self
.gprs
:
377 self
.read_regs
.append(p
[2].id) # add to list of regs to read
378 #p[0] = ast.Subscript(ast.Name("GPR"), ast.Str(p[2].id))
382 # trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
383 def p_trailer(self
, p
):
384 """trailer : trailer_arglist
389 def p_trailer_arglist(self
, p
):
390 "trailer_arglist : LPAR arglist RPAR"
391 p
[0] = ("CALL", p
[2])
393 def p_trailer_subscript(self
, p
):
394 "trailer_subscript : LBRACK subscript RBRACK"
395 p
[0] = ("SUBS", p
[2])
397 #subscript: '.' '.' '.' | test | [test] ':' [test]
399 def p_subscript(self
, p
):
400 """subscript : test COLON test
409 # testlist: test (',' test)* [',']
410 # Contains shift/reduce error
411 def p_testlist(self
, p
):
412 """testlist : testlist_multi COMMA
417 # May need to promote singleton to tuple
418 if isinstance(p
[1], list):
422 # Convert into a tuple?
423 if isinstance(p
[0], list):
424 p
[0] = ast
.Tuple(p
[0])
426 def p_testlist_multi(self
, p
):
427 """testlist_multi : testlist_multi COMMA test
433 if isinstance(p
[1], list):
440 # test: or_test ['if' or_test 'else' test] | lambdef
441 # as I don't support 'and', 'or', and 'not' this works down to 'comparison'
448 # arglist: (argument ',')* (argument [',']| '*' test [',' '**' test]
450 # XXX INCOMPLETE: this doesn't allow the trailing comma
451 def p_arglist(self
, p
):
452 """arglist : arglist COMMA argument
459 # argument: test [gen_for] | test '=' test # Really [keyword '='] test
460 def p_argument(self
, p
):
464 def p_error(self
, p
):
465 #print "Error!", repr(p)
469 class GardenSnakeParser(PowerParser
):
470 def __init__(self
, lexer
= None):
471 PowerParser
.__init
__(self
)
473 lexer
= IndentLexer(debug
=1)
475 self
.tokens
= lexer
.tokens
476 self
.parser
= yacc
.yacc(module
=self
, start
="file_input_end",
477 debug
=False, write_tables
=False)
479 self
.sd
= create_pdecode()
481 def parse(self
, code
):
482 self
.lexer
.input(code
)
483 result
= self
.parser
.parse(lexer
= self
.lexer
, debug
=False)
484 return ast
.Module(result
)
487 ###### Code generation ######
489 #from compiler import misc, syntax, pycodegen
491 class GardenSnakeCompiler(object):
493 self
.parser
= GardenSnakeParser()
494 def compile(self
, code
, mode
="exec", filename
="<string>"):
495 tree
= self
.parser
.parse(code
)
499 #misc.set_filename(filename, tree)
500 return compile(tree
, mode
="exec", filename
="<string>")
502 gen
= pycodegen
.ModuleCodeGenerator(tree
)
506 ####### Test code #######
511 index <- (RS)[8*i:8*i+7]
512 RA <- [0]*56 || perm[0:7]
517 if index < 64 then index <- 0
528 index <- (RS)[8*i:8*i+7]
533 RA <- [0]*56|| perm[0:7]
539 if (RS)[63-n] = 0b1 then
550 lexer
= IndentLexer(debug
=1)
551 # Give the lexer some input
560 break # No more input
568 l
.append(1 if (num
& (1<<i
)) else 0)
573 def get_reg_hex(reg
):
574 report
= ''.join(map(str, reg
))
575 return hex(int('0b%s' % report
, 2))
578 gsc
= GardenSnakeCompiler()
580 def __init__(self
, sd
, regfile
):
583 self
.regfile
= regfile
587 def set_form(self
, form
):
590 def ___getitem__(self
, attr
):
591 print ("GPR getitem", attr
)
592 getform
= self
.sd
.sigforms
[self
.form
]
593 rnum
= getattr(getform
, attr
)
594 print ("GPR get", rnum
, rnum
, dir(rnum
))
596 print (l
[0]._as
_const
())
598 #print (x, x.value, dir(x))
599 #print (x.value, dir(x.value))
601 return self
.regfile
[rnum
]
607 gsc
.gpr
= GPR(gsc
.parser
.sd
, gsc
.regfile
)
609 _compile
= gsc
.compile
612 tree
= _compile(code
, mode
="single", filename
="string")
614 tree
= ast
.fix_missing_locations(tree
)
615 print ( ast
.dump(tree
) )
618 print (astor
.dump_tree(tree
))
620 source
= astor
.to_source(tree
)
625 # Set up the GardenSnake run-time environment
628 print ("-->", " ".join(map(str,args
)))
630 def listconcat(l1
, l2
):
633 from soc
.decoder
.helpers
import (EXTS64
, EXTZ64
, ROTL64
, ROTL32
, MASK
,)
639 d
["concat"] = listconcat
643 gsc
.gpr
.set_form(form
)
644 getform
= gsc
.parser
.sd
.sigforms
[form
]._asdict
()
645 #print ("getform", form)
646 #for k, f in getform.items():
650 compiled_code
= compile(source
, mode
="exec", filename
="<string>")
654 instruction
= Signal(32)
656 m
.submodules
.decode
= decode
= gsc
.parser
.sd
657 comb
+= decode
.raw_opcode_in
.eq(instruction
)
664 print("0x{:X}".format(ins
& 0xffffffff))
666 # ask the decoder to decode this binary data (endian'd)
667 yield decode
.bigendian
.eq(0) # little / big?
668 yield instruction
.eq(ins
) # raw binary instr.
671 # read regs, drop them into dict for function
672 for rname
in gsc
.parser
.read_regs
:
673 regidx
= yield getattr(decode
.sigforms
['X'], rname
)
674 d
[rname
] = gsc
.gpr
[regidx
]
675 print ("read reg", rname
, regidx
, get_reg_hex(d
[rname
]))
677 exec (compiled_code
, d
)
682 print (decode
.sigforms
['X'])
683 x
= yield decode
.sigforms
['X'].RS
684 ra
= yield decode
.sigforms
['X'].RA
685 print ("RA", ra
, d
['RA'])
688 for wname
in gsc
.parser
.write_regs
:
690 print ("write regs", wname
, d
[wname
], reg
)
692 gsc
.gpr
[regidx
] = tolist(d
[wname
])
694 sim
.add_process(process
)
695 with sim
.write_vcd("simulator.vcd", "simulator.gtkw",
696 traces
=[decode
.ports()]):
699 for i
in range(len(gsc
.gpr
)):
700 print ("regfile", i
, get_reg_hex(gsc
.gpr
[i
]))
703 if __name__
== '__main__':