#code = addpcis
#code = bpermd
+
def tolist(num):
l = []
for i in range(64):
- l.append(1 if (num & (1<<i)) else 0)
+ l.append(1 if (num & (1 << i)) else 0)
l.reverse()
return l
self.form = form
def ___getitem__(self, attr):
- print ("GPR getitem", attr)
+ print("GPR getitem", attr)
getform = self.sd.sigforms[self.form]
rnum = getattr(getform, attr)
- print ("GPR get", rnum, rnum, dir(rnum))
+ print("GPR get", rnum, rnum, dir(rnum))
l = list(rnum)
- print (l[0]._as_const())
- #for x in rnum:
- #print (x, x.value, dir(x))
- #print (x.value, dir(x.value))
- print (list(rnum))
+ print(l[0]._as_const())
+ # for x in rnum:
+ #print (x, x.value, dir(x))
+ #print (x.value, dir(x.value))
+ print(list(rnum))
return self.regfile[rnum]
tree = _compile(code, mode="single", filename="string")
tree = ast.fix_missing_locations(tree)
- print ( ast.dump(tree) )
+ print(ast.dump(tree))
- print ("astor dump")
- print (astor.dump_tree(tree))
- print ("to source")
+ print("astor dump")
+ print(astor.dump_tree(tree))
+ print("to source")
source = astor.to_source(tree)
- print (source)
+ print(source)
- #sys.exit(0)
+ # sys.exit(0)
# Set up the GardenSnake run-time environment
def print_(*args):
- print ("args", args)
- print ("-->", " ".join(map(str,args)))
+ print("args", args)
+ print("-->", " ".join(map(str, args)))
from soc.decoder.helpers import (EXTS64, EXTZ64, ROTL64, ROTL32, MASK,)
gsc.gpr.set_form(form)
getform = gsc.parser.sd.sigforms[form]._asdict()
#print ("getform", form)
- #for k, f in getform.items():
- #print (k, f)
- #d[k] = getform[k]
+ # for k, f in getform.items():
+ #print (k, f)
+ #d[k] = getform[k]
compiled_code = compile(source, mode="exec", filename="<string>")
comb = m.d.comb
instruction = Signal(32)
- m.submodules.decode = decode = gsc.parser.sd
+ m.submodules.decode = decode = gsc.parser.sd
comb += decode.raw_opcode_in.eq(instruction)
sim = Simulator(m)
# uninitialised regs, drop them into dict for function
for rname in gsc.parser.uninit_regs:
- d[rname] = SelectableInt(0, 64) # uninitialised (to zero)
- print ("uninitialised", rname, get_reg_hex(d[rname]))
+ d[rname] = SelectableInt(0, 64) # uninitialised (to zero)
+ print("uninitialised", rname, get_reg_hex(d[rname]))
# read regs, drop them into dict for function
for rname in gsc.parser.read_regs:
regidx = yield getattr(decode.sigforms['X'], rname)
d[rname] = gsc.gpr[regidx]
- print ("read reg", rname, regidx, get_reg_hex(d[rname]))
+ print("read reg", rname, regidx, get_reg_hex(d[rname]))
- exec (compiled_code, d) # code gets executed here in dict "d"
- print ("Done")
+ exec(compiled_code, d) # code gets executed here in dict "d"
+ print("Done")
- print (d.keys()) # shows the variables that may have been created
+ print(d.keys()) # shows the variables that may have been created
- print (decode.sigforms['X'])
+ print(decode.sigforms['X'])
x = yield decode.sigforms['X'].RS
ra = yield decode.sigforms['X'].RA
- print ("RA", ra, d['RA'])
- print ("RS", x)
+ print("RA", ra, d['RA'])
+ print("RS", x)
for wname in gsc.parser.write_regs:
reg = getform[wname]
- print ("write regs", wname, d[wname], reg)
+ print("write regs", wname, d[wname], reg)
regidx = yield reg
gsc.gpr[regidx] = d[wname]
sim.run()
for i in range(len(gsc.gpr)):
- print ("regfile", i, get_reg_hex(gsc.gpr[i]))
+ print("regfile", i, get_reg_hex(gsc.gpr[i]))
if __name__ == '__main__':
import ast
# Helper function
+
+
def Assign(left, right):
names = []
- print ("Assign", left, right)
+ print("Assign", left, right)
if isinstance(left, ast.Name):
# Single assignment on left
# XXX when doing IntClass, which will have an "eq" function,
ls = left.slice
if isinstance(ls, ast.Slice):
lower, upper, step = ls.lower, ls.upper, ls.step
- print ("slice assign", lower, upper, step)
+ print("slice assign", lower, upper, step)
if step is None:
ls = (lower, upper, None)
else:
return ast.Call(ast.Name("selectassign"),
[left.value, ls, right], [])
else:
- print ("Assign fail")
+ print("Assign fail")
raise SyntaxError("Can't do that yet")
-## I implemented INDENT / DEDENT generation as a post-processing filter
+# I implemented INDENT / DEDENT generation as a post-processing filter
# The original lex token stream contains WS and NEWLINE characters.
# WS will only occur before any other tokens on a line.
# see if the new line has changed indication level.
-## No using Python's approach because Ply supports precedence
+# No using Python's approach because Ply supports precedence
# comparison: expr (comp_op expr)*
# arith_expr: term (('+'|'-') term)*
def make_le_compare(arg):
(left, right) = arg
return ast.Compare(left, [ast.LtE()], [right])
+
+
def make_ge_compare(arg):
(left, right) = arg
return ast.Compare(left, [ast.GtE()], [right])
+
+
def make_lt_compare(arg):
(left, right) = arg
return ast.Compare(left, [ast.Lt()], [right])
+
+
def make_gt_compare(arg):
(left, right) = arg
return ast.Compare(left, [ast.Gt()], [right])
+
+
def make_eq_compare(arg):
(left, right) = arg
return ast.Compare(left, [ast.Eq()], [right])
+
binary_ops = {
"&": ast.BitAnd(),
"|": ast.BitOr(),
"+": ast.UAdd(),
"-": ast.USub(),
"¬": ast.Invert(),
- }
+}
+
-def check_concat(node): # checks if the comparison is already a concat
- print ("check concat", node)
+def check_concat(node): # checks if the comparison is already a concat
+ print("check concat", node)
if not isinstance(node, ast.Call):
return [node]
- print ("func", node.func.id)
+ print("func", node.func.id)
if node.func.id != 'concat':
return [node]
return node.args
class PowerParser:
precedence = (
- ("left", "BITOR", "BITAND"),
("left", "EQ", "GT", "LT", "LE", "GE", "LTU", "GTU"),
+ ("left", "BITOR"),
+ ("left", "BITAND"),
("left", "PLUS", "MINUS"),
("left", "MULT", "DIV"),
("left", "INVERT"),
- )
+ )
def __init__(self):
self.gprs = {}
# The grammar comments come from Python's Grammar/Grammar file
- ## NB: compound_stmt in single_input is followed by extra NEWLINE!
+ # NB: compound_stmt in single_input is followed by extra NEWLINE!
# file_input: (NEWLINE | stmt)* ENDMARKER
def p_file_input_end(self, p):
"""file_input_end : file_input ENDMARKER"""
- print ("end", p[1])
+ print("end", p[1])
p[0] = p[1]
def p_file_input(self, p):
if len(p) == 3:
p[0] = p[1]
else:
- p[0] = [] # p == 2 --> only a blank line
+ p[0] = [] # p == 2 --> only a blank line
else:
if len(p) == 3:
p[0] = p[1] + p[2]
else:
p[0] = p[1]
-
# funcdef: [decorators] 'def' NAME parameters ':' suite
# ignoring decorators
+
def p_funcdef(self, p):
"funcdef : DEF NAME parameters COLON suite"
p[0] = ast.FunctionDef(p[2], p[3], p[5], ())
"""parameters : LPAR RPAR
| LPAR varargslist RPAR"""
if len(p) == 3:
- args=[]
+ args = []
else:
args = p[2]
p[0] = ast.arguments(args=args, vararg=None, kwarg=None, defaults=[])
-
# varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] |
# '**' NAME) |
# highly simplified
+
def p_varargslist(self, p):
"""varargslist : varargslist COMMA NAME
| NAME"""
def p_expr_stmt(self, p):
"""expr_stmt : testlist ASSIGN testlist
| testlist """
- print ("expr_stmt", p)
+ print("expr_stmt", p)
if len(p) == 2:
# a list of expressions
#p[0] = ast.Discard(p[1])
elif isinstance(p[1], ast.Subscript):
name = p[1].value.id
if name in self.gprs:
- self.uninit_regs.append(name) # add to list of uninitialised
- print ("expr assign", name, p[1])
+ # add to list of uninitialised
+ self.uninit_regs.append(name)
+ print("expr assign", name, p[1])
if name in self.gprs:
- self.write_regs.append(name) # add to list of regs to write
+ self.write_regs.append(name) # add to list of regs to write
p[0] = Assign(p[1], p[3])
def p_flow_stmt(self, p):
"return_stmt : RETURN testlist"
p[0] = ast.Return(p[2])
-
def p_compound_stmt(self, p):
"""compound_stmt : if_stmt
| while_stmt
else:
p[0] = p[3]
-
def p_stmts(self, p):
"""stmts : stmts stmt
| stmt"""
| comparison APPEND comparison
| power"""
if len(p) == 4:
- print (list(p))
+ print(list(p))
if p[2] == '<u':
p[0] = ast.Call(ast.Name("ltu"), (p[1], p[3]), [])
elif p[2] == '>u':
l = check_concat(p[1]) + check_concat(p[3])
p[0] = ast.Call(ast.Name("concat"), l, [])
elif p[2] in ['<', '>', '=', '<=', '>=']:
- p[0] = binary_ops[p[2]]((p[1],p[3]))
+ p[0] = binary_ops[p[2]]((p[1], p[3]))
else:
p[0] = ast.BinOp(p[1], binary_ops[p[2]], p[3])
elif len(p) == 3:
if p[2][0] == "CALL":
#p[0] = ast.Expr(ast.Call(p[1], p[2][1], []))
p[0] = ast.Call(p[1], p[2][1], [])
- #if p[1].id == 'print':
+ # if p[1].id == 'print':
# p[0] = ast.Printnl(ast.Tuple(p[2][1]), None, None)
- #else:
+ # else:
# p[0] = ast.CallFunc(p[1], p[2][1], None, None)
else:
- print ("subscript atom", p[2][1])
+ print("subscript atom", p[2][1])
#raise AssertionError("not implemented %s" % p[2][0])
subs = p[2][1]
if len(subs) == 1:
idx = subs[0]
else:
idx = ast.Slice(subs[0], subs[1], None)
- p[0] = ast.Subscript(p[1], idx)
+ p[0] = ast.Subscript(p[1], idx, ast.Load())
def p_atom_name(self, p):
"""atom : NAME"""
- p[0] = ast.Name(p[1], ctx=ast.Load())
+ p[0] = ast.Name(id=p[1], ctx=ast.Load())
def p_atom_number(self, p):
"""atom : BINARY
| STRING"""
p[0] = ast.Constant(p[1])
- #'[' [listmaker] ']' |
+ # '[' [listmaker] ']' |
def p_atom_listmaker(self, p):
"""atom : LBRACK listmaker RBRACK"""
def p_atom_tuple(self, p):
"""atom : LPAR testlist RPAR"""
- print ("tuple", p[2])
+ print("tuple", p[2])
if isinstance(p[2], ast.Name):
- print ("tuple name", p[2].id)
+ print("tuple name", p[2].id)
if p[2].id in self.gprs:
- self.read_regs.append(p[2].id) # add to list of regs to read
+ self.read_regs.append(p[2].id) # add to list of regs to read
#p[0] = ast.Subscript(ast.Name("GPR"), ast.Str(p[2].id))
- #return
+ # return
p[0] = p[2]
# trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
"trailer_subscript : LBRACK subscript RBRACK"
p[0] = ("SUBS", p[2])
- #subscript: '.' '.' '.' | test | [test] ':' [test]
+ # subscript: '.' '.' '.' | test | [test] ':' [test]
def p_subscript(self, p):
"""subscript : test COLON test
else:
p[0] = [p[1]]
-
# testlist: test (',' test)* [',']
# Contains shift/reduce error
+
def p_testlist(self, p):
"""testlist : testlist_multi COMMA
| testlist_multi """
# singleton -> tuple
p[0] = [p[1], p[3]]
-
# test: or_test ['if' or_test 'else' test] | lambdef
# as I don't support 'and', 'or', and 'not' this works down to 'comparison'
+
def p_test(self, p):
"test : comparison"
p[0] = p[1]
-
-
# arglist: (argument ',')* (argument [',']| '*' test [',' '**' test]
# | '**' test)
# XXX INCOMPLETE: this doesn't allow the trailing comma
+
def p_arglist(self, p):
"""arglist : arglist COMMA argument
| argument"""
p[0] = p[1]
def p_error(self, p):
- #print "Error!", repr(p)
+ # print "Error!", repr(p)
raise SyntaxError(p)
class GardenSnakeParser(PowerParser):
- def __init__(self, lexer = None):
+ def __init__(self, lexer=None):
PowerParser.__init__(self)
if lexer is None:
lexer = IndentLexer(debug=0)
self.sd = create_pdecode()
def parse(self, code):
- #self.lexer.input(code)
+ # self.lexer.input(code)
result = self.parser.parse(code, lexer=self.lexer, debug=False)
return ast.Module(result)
def compile(self, code, mode="exec", filename="<string>"):
tree = self.parser.parse(code)
- print ("snake")
+ print("snake")
pprint(tree)
return tree
#misc.set_filename(filename, tree)
return compile(tree, mode="exec", filename="<string>")
- #syntax.check(tree)
+ # syntax.check(tree)
gen = pycodegen.ModuleCodeGenerator(tree)
code = gen.getCode()
return code
-