"""
import gc
-from collections import namedtuple
-from nmigen import Module, Elaboratable, Signal, Cat, Mux
-from nmigen.cli import rtlil
+from collections import namedtuple, OrderedDict
+from nmigen import Module, Elaboratable, Signal, Cat, Mux, Const
+from nmigen.cli import rtlil, verilog
from openpower.decoder.power_enums import (Function, Form, MicrOp,
- In1Sel, In2Sel, In3Sel, OutSel,
- SVEXTRA, SVEtype, SVPtype, # Simple-V
- RC, LdstLen, LDSTMode, CryIn,
- single_bit_flags, CRInSel,
- CROutSel, get_signal_name,
- default_values, insns, asmidx)
+ In1Sel, In2Sel, In3Sel, OutSel,
+ SVEXTRA, SVEtype, SVPtype, # Simple-V
+ RC, LdstLen, LDSTMode, CryIn,
+ single_bit_flags, CRInSel,
+ CROutSel, get_signal_name,
+ default_values, insns, asmidx,
+ asmlen)
from openpower.decoder.power_fields import DecodeFields
from openpower.decoder.power_fieldsn import SigDecode, SignalBitRange
from openpower.decoder.power_svp64 import SVP64RM
power_op_types = {'function_unit': Function,
'internal_op': MicrOp,
'form': Form,
- 'asmcode': 8,
+ 'asmcode': asmlen,
'SV_Etype': SVEtype,
'SV_Ptype': SVPtype,
'in1_sel': In1Sel,
which generates an awful lot of wires, hence the subsetting
"""
- def __init__(self, incl_asm=True, name=None, subset=None):
+ def __init__(self, incl_asm=True, name=None, subset=None, fields=None):
+ self.name = name
self.subset = subset
+ if fields is not None:
+ for k, v in fields.items():
+ setattr(self, k, v)
+ return
debug_report = set()
fields = set()
for field, ptype in power_op_types.items():
debug_report.add(field)
fname = get_pname(field, name)
setattr(self, field, Signal(reset_less=True, name=fname))
+ self._fields = fields
# comment out, bit too high debug level
#print("PowerOp debug", name, debug_report)
#print(" fields", fields)
+ @staticmethod
+ def like(other):
+ """PowerOp.like: creates a duplicate of a given PowerOp instance
+ """
+ fields = {}
+ for fname in other._fields:
+ sig = getattr(other, fname, None)
+ if sig is not None:
+ fields[fname] = sig.__class__.like(sig)
+ return PowerOp(subset=other.subset, fields=fields)
+
def _eq(self, row=None):
if row is None:
row = default_values
if field not in power_op_csvmap:
continue
csvname = power_op_csvmap[field]
- #print(field, ptype, csvname, row)
+ # log(field, ptype, csvname, row)
val = row[csvname]
if csvname == 'upd' and isinstance(val, int): # LDSTMode different
val = ptype(val)
# process the comment field, strip out "equals" for FP
if "=" in asmcode:
asmcode = asmcode.split("=")[-1]
- log ("asmcode stripping =", asmcode,
- asmcode in asmidx, hasattr(self, "asmcode"))
+ log("asmcode stripping =", asmcode,
+ asmcode in asmidx, hasattr(self, "asmcode"))
if hasattr(self, "asmcode") and asmcode in asmidx:
res.append(self.asmcode.eq(asmidx[asmcode]))
for bit in single_bit_flags:
the constructor is called. all quite messy.
"""
- def __init__(self, width, dec, name=None, col_subset=None, row_subset=None):
+ def __init__(self, width, dec, name=None, col_subset=None,
+ row_subset=None, conditions=None):
+ if conditions is None:
+ # XXX conditions = {}
+ conditions = {'SVP64BREV': Const(0, 1),
+ 'SVP64FFT': Const(0, 1),
+ }
self.actually_does_something = False
self.pname = name
+ self.conditions = conditions
self.col_subset = col_subset
self.row_subsetfn = row_subset
if not isinstance(dec, list):
for d in dec:
if d.suffix is not None and d.suffix >= width:
d.suffix = None
+
self.width = width
+ # create some case statement condition patterns for matching
+ # a single condition. "1----" for the first condition,
+ # "-1----" for the 2nd etc.
+ # also create a matching ordered list of conditions, for the switch,
+ # which will Cat() them together
+ self.ccases = {}
+ self.ckeys = list(conditions.keys())
+ self.ckeys.sort()
+
+ def find_conditions(self, opcodes):
+ # look for conditions, create dictionary entries for them
+ # sorted by opcode
+ rows = OrderedDict() # start as a dictionary, get as list (after)
+ for row in opcodes:
+ condition = row['CONDITIONS']
+ opcode = row['opcode']
+ if condition:
+ # check it's expected
+ assert (condition in self.conditions or
+ (condition[0] == '~' and
+ condition[1:] in self.conditions)), \
+ "condition %s not in %s" % (condition, str(conditions))
+ if opcode not in rows:
+ rows[opcode] = {}
+ rows[opcode][condition] = row
+ else:
+ # check it's unique
+ assert opcode not in rows, \
+ "opcode %s already in rows for %s" % \
+ (opcode, self.pname)
+ rows[opcode] = row
+ # after checking for conditions, get just the values (ordered)
+ return list(rows.values())
+
def suffix_mask(self, d):
return ((1 << d.suffix) - 1)
reset_less=True)
eq = []
case_does_something = False
- eq.append(opcode_switch.eq(
- self.opcode_in[d.bitsel[0]:d.bitsel[1]]))
+ look_for = self.opcode_in[d.bitsel[0]:d.bitsel[1]]
+ eq.append(opcode_switch.eq(look_for))
if d.suffix:
opcodes = self.divide_opcodes(d)
+ # TODO opcodes = self.find_conditions(opcodes)
opc_in = Signal(d.suffix, reset_less=True)
eq.append(opc_in.eq(opcode_switch[:d.suffix]))
# begin the dynamic Switch statement here
subdecoder = PowerDecoder(width=32, dec=sd,
name=mname,
col_subset=self.col_subset,
- row_subset=self.row_subsetfn)
+ row_subset=self.row_subsetfn,
+ conditions=self.conditions)
if not subdecoder.tree_analyse():
del subdecoder
continue
if seqs:
case_does_something = True
eq += seqs
- for row in d.opcodes:
- opcode = row['opcode']
+ opcodes = self.find_conditions(d.opcodes)
+ for row in opcodes:
+ # urrr this is an awful hack. if "conditions" are active
+ # get the FIRST item (will be the same opcode), and it
+ # had BETTER have the same unit and also pass other
+ # row subset conditions.
+ if 'opcode' not in row: # must be a "CONDITIONS" dict...
+ is_conditions = True
+ _row = row[list(row.keys())[0]]
+ else:
+ is_conditions = False
+ _row = row
+ opcode = _row['opcode']
if d.opint and '-' not in opcode:
opcode = int(opcode, 0)
- if not row['unit']:
+ if not _row['unit']:
continue
if self.row_subsetfn:
- if not self.row_subsetfn(opcode, row):
+ if not self.row_subsetfn(opcode, _row):
continue
# add in the dynamic Case statement here
- switch_case[opcode] = self.op._eq(row)
+ if is_conditions:
+ switch_case[opcode] = {}
+ for k, crow in row.items():
+ # log("ordered", k, crow)
+ switch_case[opcode][k] = self.op._eq(crow)
+ else:
+ switch_case[opcode] = self.op._eq(row)
self.actually_does_something = True
case_does_something = True
subdecoder = PowerDecoder(self.width, dec,
name=mname,
col_subset=self.col_subset,
- row_subset=self.row_subsetfn)
- log ("subdecoder", mname, subdecoder)
+ row_subset=self.row_subsetfn,
+ conditions=self.conditions)
+ log("subdecoder", mname, subdecoder)
if not subdecoder.tree_analyse(): # doesn't do anything
- log ("analysed, DELETING", mname)
+ log("analysed, DELETING", mname)
del subdecoder
continue # skip
submodules[mname] = subdecoder
with m.Switch(switch):
for key, eqs in cases.items():
with m.Case(key):
- comb += eqs
+ # "conditions" are a further switch statement
+ if isinstance(eqs, dict):
+ self.condition_switch(m, eqs)
+ else:
+ comb += eqs
return m
+ def condition_switch(self, m, cases):
+ """against the global list of "conditions", having matched against
+ bits of the opcode, we FINALLY now have to match against some
+ additional "conditions". this is because there can be **MULTIPLE**
+ entries for a given opcode match. here we discern them.
+ """
+ comb = m.d.comb
+ cswitch = []
+ ccases = []
+ for casekey, eqs in cases.items():
+ if casekey.startswith('~'):
+ with m.If(~self.conditions[casekey[1:]]):
+ comb += eqs
+ else:
+ with m.If(self.conditions[casekey]):
+ comb += eqs
+
def ports(self):
return [self.opcode_in] + self.op.ports()
(reverses byte order). See V3.0B p44 1.11.2
"""
- def __init__(self, width, dec, name=None, col_subset=None, row_subset=None):
- PowerDecoder.__init__(self, width, dec, name, col_subset, row_subset)
+ def __init__(self, width, dec, name=None, col_subset=None,
+ row_subset=None, conditions=None):
+ PowerDecoder.__init__(self, width, dec, name,
+ col_subset, row_subset, conditions)
self.fields = df = DecodeFields(SignalBitRange, [self.opcode_in])
self.fields.create_specs()
self.raw_opcode_in = Signal.like(self.opcode_in, reset_less=True)
return m
def ports(self):
- return [self.raw_opcode_in, self.bigendian] + PowerDecoder.ports(self)
+ res = [self.raw_opcode_in, self.bigendian] + PowerDecoder.ports(self)
+ for condition in self.conditions.values():
+ res.append(condition)
+ return res
+
+
+#############################################################
+# PRIMARY FUNCTION SPECIFYING ALTERNATIVE SVP64 POWER DECODER
+
+def create_pdecode_svp64_ldst(name=None, col_subset=None, row_subset=None,
+ include_fp=False):
+ """create_pdecode - creates a cascading hierarchical POWER ISA decoder
+
+ subsetting of the PowerOp decoding is possible by setting col_subset
+ """
+ log("create_pdecode_svp64_ldst", name, col_subset, row_subset, include_fp)
+
+ # some alteration to the CSV files is required for SV so we use
+ # a class to do it
+ isa = SVP64RM()
+ get_csv = isa.get_svp64_csv
+
+ # minor opcodes.
+ pminor = [
+ Subdecoder(pattern=58, opcodes=get_csv("svldst_minor_58.csv"),
+ opint=True, bitsel=(0, 2), suffix=None, subdecoders=[]),
+ # nope - needs 4-in regs
+ # Subdecoder(pattern=62, opcodes=get_csv("svldst_minor_62.csv"),
+ # opint=True, bitsel=(0, 2), suffix=None, subdecoders=[]),
+ ]
+
+ # FP 63L/H decoders. TODO: move mffsfamily to separate subdecoder
+ if False and include_fp:
+ pminor.append(
+ Subdecoder(pattern=63, opcodes=get_csv("minor_63.csv"),
+ opint=False, bitsel=(1, 11), suffix=None,
+ subdecoders=[]),
+ )
+ pminor.append(
+ Subdecoder(pattern=59, opcodes=get_csv("minor_59.csv"),
+ opint=False, bitsel=(1, 11), suffix=None,
+ subdecoders=[]),
+ )
+
+ # top level: extra merged with major
+ dec = []
+ opcodes = get_csv("svldst_major.csv")
+ dec.append(Subdecoder(pattern=None, opint=True, opcodes=opcodes,
+ bitsel=(26, 32), suffix=None, subdecoders=pminor))
+
+ return TopPowerDecoder(32, dec, name=name, col_subset=col_subset,
+ row_subset=row_subset)
####################################################
# PRIMARY FUNCTION SPECIFYING THE FULL POWER DECODER
def create_pdecode(name=None, col_subset=None, row_subset=None,
- include_fp=False):
+ include_fp=False, conditions=None):
"""create_pdecode - creates a cascading hierarchical POWER ISA decoder
subsetting of the PowerOp decoding is possible by setting col_subset
"""
- log ("create_pdecode", name, col_subset, row_subset, include_fp)
+ log("create_pdecode", name, col_subset, row_subset, include_fp)
# some alteration to the CSV files is required for SV so we use
# a class to do it
subdecoders=[]))
# XXX problem with sub-decoders (can only handle one),
# sort this another time
- #m19.append(Subdecoder(pattern=19, opcodes=get_csv("minor_19_00000.csv"),
+ # m19.append(Subdecoder(pattern=19, opcodes=get_csv("minor_19_00000.csv"),
# opint=True, bitsel=(1, 6), suffix=None,
# subdecoders=[]))
opint=True, bitsel=(0, 2), suffix=None, subdecoders=[]),
Subdecoder(pattern=22, opcodes=get_csv("minor_22.csv"),
opint=True, bitsel=(1, 5), suffix=None, subdecoders=[]),
+ Subdecoder(pattern=5, opcodes=get_csv("minor_5.csv"),
+ opint=True, bitsel=(0, 11), suffix=None, subdecoders=[]),
]
# FP 63L/H decoders. TODO: move mffsfamily to separate subdecoder
if include_fp:
pminor.append(
Subdecoder(pattern=63, opcodes=get_csv("minor_63.csv"),
- opint=False, bitsel=(1, 11), suffix=None,
- subdecoders=[]),
- )
+ opint=False, bitsel=(1, 11), suffix=None,
+ subdecoders=[]),
+ )
pminor.append(
Subdecoder(pattern=59, opcodes=get_csv("minor_59.csv"),
- opint=False, bitsel=(1, 11), suffix=None,
- subdecoders=[]),
- )
+ opint=False, bitsel=(1, 11), suffix=None,
+ subdecoders=[]),
+ )
# top level: extra merged with major
dec = []
bitsel=(0, 32), suffix=None, subdecoders=[]))
return TopPowerDecoder(32, dec, name=name, col_subset=col_subset,
- row_subset=row_subset)
+ row_subset=row_subset,
+ conditions=conditions)
+
+# test function from
+# https://github.com/apertus-open-source-cinema/naps/blob/9ebbc0/naps/soc/cli.py#L17
+
+
+def fragment_repr(original):
+ from textwrap import indent
+ attrs_str = "\n"
+ for attr in ['ports', 'drivers', 'statements', 'attrs',
+ 'generated', 'flatten']:
+ attrs_str += f"{attr}={repr(getattr(original, attr))},\n"
+
+ domains_str = "\n"
+ for name, domain in original.domains.items():
+ # TODO: this is not really sound because domains could be non local
+ domains_str += f"{name}: {domain.name}\n"
+ attrs_str += f"domains={{{indent(domains_str, ' ')}}},\n"
+
+ children_str = "\n"
+ for child, name in original.subfragments:
+ children_str += f"[{name}, {fragment_repr(child)}]\n"
+ attrs_str += f"children=[{indent(children_str, ' ')}],\n"
+
+ return f"Fragment({indent(attrs_str, ' ')})"
if __name__ == '__main__':
def rowsubsetfn(opcode, row):
log("row_subset", opcode, row)
- return row['unit'] == 'FPU'
+ return row['unit'] in ['LDST', 'FPU']
+ conditions = {'SVP64BREV': Signal(name="svp64brev", reset_less=True),
+ 'SVP64FFT': Signal(name="svp64fft", reset_less=True),
+ }
pdecode = create_pdecode(name="rowsub",
col_subset={'opcode', 'function_unit',
- 'form'},
+ 'asmcode',
+ 'in2_sel', 'in3_sel'},
row_subset=rowsubsetfn,
- include_fp=True)
+ include_fp=True,
+ conditions=conditions)
vl = rtlil.convert(pdecode, ports=pdecode.ports())
with open("row_subset_decoder.il", "w") as f:
f.write(vl)
+ vl = verilog.convert(pdecode, ports=pdecode.ports())
+ with open("row_subset_decoder.v", "w") as f:
+ f.write(vl)
+
# col subset
- pdecode = create_pdecode(name="fusubset", col_subset={'function_unit'})
+ pdecode = create_pdecode(name="fusubset", col_subset={'function_unit'},
+ conditions=conditions)
vl = rtlil.convert(pdecode, ports=pdecode.ports())
with open("col_subset_decoder.il", "w") as f:
f.write(vl)
- # full decoder
+ from nmigen.hdl.ir import Fragment
+ elaborated = Fragment.get(pdecode, platform=None)
+ elaborated_repr = fragment_repr(elaborated)
+ print(elaborated_repr)
+
+ exit(0)
+ exit(0)
+
+ # full decoder
pdecode = create_pdecode(include_fp=True)
vl = rtlil.convert(pdecode, ports=pdecode.ports())
with open("decoder.il", "w") as f:
f.write(vl)
+
+ # full SVP64 decoder
+ pdecode = create_pdecode_svp64_ldst(include_fp=True)
+ vl = rtlil.convert(pdecode, ports=pdecode.ports())
+ with open("decoder_svp64.il", "w") as f:
+ f.write(vl)