1 """Cascading Power ISA Decoder
5 # Copyright (C) 2020 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
6 # Copyright (C) 2020 Michael Nolan <mtnolan2640@gmail.com>
8 This module uses CSV tables in a hierarchical/peer cascading fashion,
9 to create a multi-level instruction decoder by recognising appropriate
10 patterns. The output is a wide, flattened (1-level) series of bitfields,
11 suitable for a simple RISC engine.
13 This is based on Anton Blanchard's excellent microwatt work:
14 https://github.com/antonblanchard/microwatt/blob/master/decode1.vhdl
16 The basic principle is that the python code does the heavy lifting
17 (reading the CSV files, constructing the hierarchy), creating the HDL
18 AST with for-loops generating switch-case statements.
20 Where "normal" HDL would do this, in laborious excruciating detail:
22 switch (opcode & major_mask_bits):
23 case opcode_2: decode_opcode_2()
25 switch (opcode & minor_19_mask_bits)
26 case minor_opcode_19_operation_X:
27 case minor_opcode_19_operation_y:
29 we take *full* advantage of the decoupling between python and the
30 nmigen AST data structure, to do this:
32 with m.Switch(opcode & self.mask):
33 for case_bitmask in subcases:
34 with m.If(opcode & case_bitmask): {do_something}
36 this includes specifying the information sufficient to perform subdecoding.
40 the full hierarchical tree for decoding POWER9 is specified here
41 subsetting is possible by specifying col_subset (row_subset TODO)
45 takes a *list* of CSV files with an associated bit-range that it
46 is requested to match against the "opcode" row of the CSV file.
47 This pattern can be either an integer, a binary number, *or* a
48 wildcard nmigen Case pattern of the form "001--1-100".
52 these are *additional* cases with further decoding. The "pattern"
53 argument is specified as one of the Case statements (a peer of the
54 opcode row in the CSV file), and thus further fields of the opcode
55 may be decoded giving increasing levels of detail.
59 [ (extra.csv: bit-fields entire 32-bit range
61 000000---------------01000000000 -> ILLEGAL instruction
62 01100000000000000000000000000000 -> SIM_CONFIG instruction
63 ................................ ->
65 (major.csv: first 6 bits ONLY
67 001100 -> ALU,OP_ADD (add)
68 001101 -> ALU,OP_ADD (another type of add)
72 001011 this must match *MAJOR*.CSV
73 [ (minor_19.csv: bits 21 through 30 inclusive:
75 0b0000000000 -> ALU,OP_MCRF
78 (minor_19_00000.csv: bits 21 through 25 inclusive:
80 0b00010 -> ALU,add_pcis
90 from collections
import namedtuple
, OrderedDict
91 from nmigen
import Module
, Elaboratable
, Signal
, Cat
, Mux
, Const
92 from nmigen
.cli
import rtlil
, verilog
93 from openpower
.decoder
.power_enums
import (Function
, Form
, MicrOp
,
94 In1Sel
, In2Sel
, In3Sel
, OutSel
,
95 SVEXTRA
, SVEtype
, SVPtype
, # Simple-V
96 RC
, LdstLen
, LDSTMode
, CryIn
,
97 single_bit_flags
, CRInSel
,
98 CROutSel
, get_signal_name
,
99 default_values
, insns
, asmidx
,
101 from openpower
.decoder
.power_fields
import DecodeFields
102 from openpower
.decoder
.power_fieldsn
import SigDecode
, SignalBitRange
103 from openpower
.decoder
.power_svp64
import SVP64RM
105 from openpower
.util
import log
107 # key data structure in which the POWER decoder is specified,
108 # in a hierarchical fashion
109 Subdecoder
= namedtuple( # fix autoformatter
111 ["pattern", # the major pattern to search for (e.g. major opcode)
112 "opcodes", # a dictionary of minor patterns to find
113 "opint", # true => the pattern must not be in "10----11" format
114 # the bits (as a range) against which "pattern" matches
116 "suffix", # shift the opcode down before decoding
117 "subdecoders" # list of further subdecoders for *additional* matches,
118 # *ONLY* after "pattern" has *ALSO* been matched against.
121 power_op_types
= {'function_unit': Function
,
122 'internal_op': MicrOp
,
139 'sv_cr_out': SVEXTRA
,
146 power_op_csvmap
= {'function_unit': 'unit',
148 'internal_op': 'internal op',
157 'sv_out2': 'sv_out2',
158 'sv_cr_in': 'sv_cr_in',
159 'sv_cr_out': 'sv_cr_out',
160 'SV_Etype': 'SV_Etype',
161 'SV_Ptype': 'SV_Ptype',
164 'ldst_len': 'ldst len',
171 def get_pname(field
, pname
):
174 return "%s_%s" % (pname
, field
)
178 """PowerOp - a dynamic class that stores (subsets of) CSV rows of data
179 about a PowerISA instruction. this is a "micro-code" expanded format
180 which generates an awful lot of wires, hence the subsetting
183 def __init__(self
, incl_asm
=True, name
=None, subset
=None, fields
=None):
186 if fields
is not None:
187 for k
, v
in fields
.items():
192 for field
, ptype
in power_op_types
.items():
194 if subset
and field
not in subset
:
196 fname
= get_pname(field
, name
)
197 setattr(self
, field
, Signal(ptype
, reset_less
=True, name
=fname
))
198 debug_report
.add(field
)
199 for bit
in single_bit_flags
:
200 field
= get_signal_name(bit
)
202 if subset
and field
not in subset
:
204 debug_report
.add(field
)
205 fname
= get_pname(field
, name
)
206 setattr(self
, field
, Signal(reset_less
=True, name
=fname
))
207 self
._fields
= fields
208 # comment out, bit too high debug level
209 #print("PowerOp debug", name, debug_report)
210 #print(" fields", fields)
214 """PowerOp.like: creates a duplicate of a given PowerOp instance
217 for fname
in other
._fields
:
218 sig
= getattr(other
, fname
, None)
220 fields
[fname
] = sig
.__class
__.like(sig
)
221 return PowerOp(subset
=other
.subset
, fields
=fields
)
223 def _eq(self
, row
=None):
226 # TODO: this conversion process from a dict to an object
227 # should really be done using e.g. namedtuple and then
229 if False: # debugging
230 if row
['CR in'] == '1':
234 if row
['CR out'] == '0':
239 ldst_mode
= row
['upd']
240 if ldst_mode
.isdigit():
241 row
['upd'] = int(ldst_mode
)
243 for field
, ptype
in power_op_types
.items():
244 if not hasattr(self
, field
):
246 if field
not in power_op_csvmap
:
248 csvname
= power_op_csvmap
[field
]
249 # log(field, ptype, csvname, row)
251 if csvname
== 'upd' and isinstance(val
, int): # LDSTMode different
255 res
.append(getattr(self
, field
).eq(val
))
258 asmcode
= row
['comment']
259 # process the comment field, strip out "equals" for FP
261 asmcode
= asmcode
.split("=")[-1]
262 log("asmcode stripping =", asmcode
,
263 asmcode
in asmidx
, hasattr(self
, "asmcode"))
264 if hasattr(self
, "asmcode") and asmcode
in asmidx
:
265 res
.append(self
.asmcode
.eq(asmidx
[asmcode
]))
266 for bit
in single_bit_flags
:
267 field
= get_signal_name(bit
)
268 if not hasattr(self
, field
):
270 sig
= getattr(self
, field
)
271 res
.append(sig
.eq(int(row
.get(bit
, 0))))
274 def _get_eq(self
, res
, field
, otherop
):
275 copyfrom
= getattr(otherop
, field
, None)
276 copyto
= getattr(self
, field
, None)
277 if copyfrom
is not None and copyto
is not None:
278 res
.append(copyto
.eq(copyfrom
))
280 def eq(self
, otherop
):
282 for field
in power_op_types
.keys():
283 self
._get
_eq
(res
, field
, otherop
)
284 for bit
in single_bit_flags
:
285 self
._get
_eq
(res
, get_signal_name(bit
), otherop
)
290 for field
in power_op_types
.keys():
291 if hasattr(self
, field
):
292 res
.append(getattr(self
, field
))
293 if hasattr(self
, "asmcode"):
294 res
.append(self
.asmcode
)
295 for field
in single_bit_flags
:
296 field
= get_signal_name(field
)
297 if hasattr(self
, field
):
298 res
.append(getattr(self
, field
))
302 class PowerDecoder(Elaboratable
):
303 """PowerDecoder - decodes an incoming opcode into the type of operation
305 this is a recursive algorithm, creating Switch statements that can
306 have further match-and-decode on other parts of the opcode field before
307 finally landing at a "this CSV entry details gets returned" thing.
309 the complicating factor is the row and col subsetting. column subsetting
310 dynamically chooses only the CSV columns requested, whilst row subsetting
311 allows a function to be called on the row to determine if the Case
312 statement is to be generated for that row. this not only generates
313 completely different Decoders, it also means that some sub-decoders
314 will turn up blank (empty switch statements). if that happens we do
315 not want the parent to include a Mux for an entirely blank switch statement
316 so we have to store the switch/case statements in a tree, and
319 the reason for the tree is because elaborate can only be called *after*
320 the constructor is called. all quite messy.
323 def __init__(self
, width
, dec
, name
=None, col_subset
=None,
324 row_subset
=None, conditions
=None):
325 if conditions
is None:
326 # XXX conditions = {}
327 conditions
= {'SVP64BREV': Const(0, 1),
328 'SVP64FFT': Const(0, 1),
330 self
.actually_does_something
= False
332 self
.conditions
= conditions
333 self
.col_subset
= col_subset
334 self
.row_subsetfn
= row_subset
335 if not isinstance(dec
, list):
338 self
.opcode_in
= Signal(width
, reset_less
=True)
340 self
.op
= PowerOp(name
=name
, subset
=col_subset
)
342 if d
.suffix
is not None and d
.suffix
>= width
:
347 # create some case statement condition patterns for matching
348 # a single condition. "1----" for the first condition,
349 # "-1----" for the 2nd etc.
350 # also create a matching ordered list of conditions, for the switch,
351 # which will Cat() them together
353 self
.ckeys
= list(conditions
.keys())
356 def find_conditions(self
, opcodes
):
357 # look for conditions, create dictionary entries for them
359 rows
= OrderedDict() # start as a dictionary, get as list (after)
361 condition
= row
['CONDITIONS']
362 opcode
= row
['opcode']
364 # check it's expected
365 assert (condition
in self
.conditions
or
366 (condition
[0] == '~' and
367 condition
[1:] in self
.conditions
)), \
368 "condition %s not in %s" % (condition
, str(conditions
))
369 if opcode
not in rows
:
371 rows
[opcode
][condition
] = row
374 assert opcode
not in rows
, \
375 "opcode %s already in rows for %s" % \
378 # after checking for conditions, get just the values (ordered)
379 return list(rows
.values())
381 def suffix_mask(self
, d
):
382 return ((1 << d
.suffix
) - 1)
384 def divide_opcodes(self
, d
):
386 mask
= self
.suffix_mask(d
)
387 #print("mask", hex(mask))
388 for row
in d
.opcodes
:
389 opcode
= row
['opcode']
390 if d
.opint
and '-' not in opcode
:
391 opcode
= int(opcode
, 0)
393 opcode
= opcode
>> d
.suffix
394 if key
not in divided
:
398 divided
[key
].append(r
)
401 def tree_analyse(self
):
402 self
.decs
= decs
= []
403 self
.submodules
= submodules
= {}
406 # go through the list of CSV decoders first
409 opcode_switch
= Signal(d
.bitsel
[1] - d
.bitsel
[0],
412 case_does_something
= False
413 look_for
= self
.opcode_in
[d
.bitsel
[0]:d
.bitsel
[1]]
414 eq
.append(opcode_switch
.eq(look_for
))
416 opcodes
= self
.divide_opcodes(d
)
417 # TODO opcodes = self.find_conditions(opcodes)
418 opc_in
= Signal(d
.suffix
, reset_less
=True)
419 eq
.append(opc_in
.eq(opcode_switch
[:d
.suffix
]))
420 # begin the dynamic Switch statement here
422 cases
.append([opc_in
, switch_case
])
424 for key
, row
in opcodes
.items():
425 bitsel
= (d
.suffix
+d
.bitsel
[0], d
.bitsel
[1])
426 sd
= Subdecoder(pattern
=None, opcodes
=row
,
427 bitsel
=bitsel
, suffix
=None,
428 opint
=False, subdecoders
=[])
429 mname
= get_pname("dec_sub%d" % key
, self
.pname
)
430 subdecoder
= PowerDecoder(width
=32, dec
=sd
,
432 col_subset
=self
.col_subset
,
433 row_subset
=self
.row_subsetfn
,
434 conditions
=self
.conditions
)
435 if not subdecoder
.tree_analyse():
438 submodules
[mname
] = subdecoder
439 sub_eqs
.append(subdecoder
.opcode_in
.eq(self
.opcode_in
))
440 # add in the dynamic Case statement here
441 switch_case
[key
] = self
.op
.eq(subdecoder
.op
)
442 self
.actually_does_something
= True
443 case_does_something
= True
444 if case_does_something
:
447 # TODO: arguments, here (all of them) need to be a list.
448 # a for-loop around the *list* of decoder args.
450 cases
.append([opcode_switch
, switch_case
])
451 seqs
= self
.handle_subdecoders(switch_case
, submodules
, d
)
453 case_does_something
= True
455 opcodes
= self
.find_conditions(d
.opcodes
)
457 # urrr this is an awful hack. if "conditions" are active
458 # get the FIRST item (will be the same opcode), and it
459 # had BETTER have the same unit and also pass other
460 # row subset conditions.
461 if 'opcode' not in row
: # must be a "CONDITIONS" dict...
463 _row
= row
[list(row
.keys())[0]]
465 is_conditions
= False
467 opcode
= _row
['opcode']
468 if d
.opint
and '-' not in opcode
:
469 opcode
= int(opcode
, 0)
472 if self
.row_subsetfn
:
473 if not self
.row_subsetfn(opcode
, _row
):
475 # add in the dynamic Case statement here
477 switch_case
[opcode
] = {}
478 for k
, crow
in row
.items():
479 # log("ordered", k, crow)
480 switch_case
[opcode
][k
] = self
.op
._eq
(crow
)
482 switch_case
[opcode
] = self
.op
._eq
(row
)
483 self
.actually_does_something
= True
484 case_does_something
= True
488 if case_does_something
:
490 #print("submodule eqs", self.pname, eq)
492 #print("submodules", self.pname, submodules)
495 return self
.actually_does_something
497 def handle_subdecoders(self
, switch_case
, submodules
, d
):
499 for dlist
in d
.subdecoders
:
500 if not isinstance(dlist
, list): # XXX HACK: take first pattern
503 #print("subdec", dec.pattern, self.pname)
504 mname
= get_pname("dec%d" % dec
.pattern
, self
.pname
)
505 if mname
in submodules
:
508 assert mname
not in submodules
509 subdecoder
= PowerDecoder(self
.width
, dec
,
511 col_subset
=self
.col_subset
,
512 row_subset
=self
.row_subsetfn
,
513 conditions
=self
.conditions
)
514 log("subdecoder", mname
, subdecoder
)
515 if not subdecoder
.tree_analyse(): # doesn't do anything
516 log("analysed, DELETING", mname
)
519 submodules
[mname
] = subdecoder
520 eqs
.append(subdecoder
.opcode_in
.eq(self
.opcode_in
))
521 switch_case
[dec
.pattern
] = self
.op
.eq(subdecoder
.op
)
522 self
.actually_does_something
= True
526 def elaborate(self
, platform
):
527 #print("decoder elaborate", self.pname, self.submodules)
533 for mname
, subdecoder
in self
.submodules
.items():
534 setattr(m
.submodules
, mname
, subdecoder
)
536 for switch_case
in self
.decs
:
537 for (switch
, cases
) in switch_case
:
538 with m
.Switch(switch
):
539 for key
, eqs
in cases
.items():
541 # "conditions" are a further switch statement
542 if isinstance(eqs
, dict):
543 self
.condition_switch(m
, eqs
)
548 def condition_switch(self
, m
, cases
):
549 """against the global list of "conditions", having matched against
550 bits of the opcode, we FINALLY now have to match against some
551 additional "conditions". this is because there can be **MULTIPLE**
552 entries for a given opcode match. here we discern them.
557 for casekey
, eqs
in cases
.items():
558 if casekey
.startswith('~'):
559 with m
.If(~self
.conditions
[casekey
[1:]]):
562 with m
.If(self
.conditions
[casekey
]):
566 return [self
.opcode_in
] + self
.op
.ports()
569 class TopPowerDecoder(PowerDecoder
):
572 top-level hierarchical decoder for POWER ISA
573 bigendian dynamically switches between big and little endian decoding
574 (reverses byte order). See V3.0B p44 1.11.2
577 def __init__(self
, width
, dec
, name
=None, col_subset
=None,
578 row_subset
=None, conditions
=None):
579 PowerDecoder
.__init
__(self
, width
, dec
, name
,
580 col_subset
, row_subset
, conditions
)
581 self
.fields
= df
= DecodeFields(SignalBitRange
, [self
.opcode_in
])
582 self
.fields
.create_specs()
583 self
.raw_opcode_in
= Signal
.like(self
.opcode_in
, reset_less
=True)
584 self
.bigendian
= Signal(reset_less
=True)
586 for fname
, value
in self
.fields
.common_fields
.items():
587 signame
= get_pname(fname
, name
)
588 sig
= Signal(value
[0:-1].shape(), reset_less
=True, name
=signame
)
589 setattr(self
, fname
, sig
)
591 # create signals for all field forms
592 forms
= self
.form_names
595 fields
= self
.fields
.instrs
[form
]
597 Fields
= namedtuple("Fields", fk
)
599 for k
, value
in fields
.items():
600 fname
= "%s_%s" % (form
, k
)
601 sig
= Signal(value
[0:-1].shape(), reset_less
=True, name
=fname
)
604 setattr(self
, "Form%s" % form
, instr
)
605 self
.sigforms
[form
] = instr
610 def form_names(self
):
611 return self
.fields
.instrs
.keys()
613 def elaborate(self
, platform
):
614 m
= PowerDecoder
.elaborate(self
, platform
)
616 # sigh duplicated in SVP64PowerDecoder
617 # raw opcode in assumed to be in LE order: byte-reverse it to get BE
618 raw_le
= self
.raw_opcode_in
620 for i
in range(0, self
.width
, 8):
621 l
.append(raw_le
[i
:i
+8])
624 comb
+= self
.opcode_in
.eq(Mux(self
.bigendian
, raw_be
, raw_le
))
626 # add all signal from commonly-used fields
627 for fname
, value
in self
.fields
.common_fields
.items():
628 sig
= getattr(self
, fname
)
629 comb
+= sig
.eq(value
[0:-1])
631 # link signals for all field forms
632 forms
= self
.form_names
634 sf
= self
.sigforms
[form
]
635 fields
= self
.fields
.instrs
[form
]
636 for k
, value
in fields
.items():
638 comb
+= sig
.eq(value
[0:-1])
643 res
= [self
.raw_opcode_in
, self
.bigendian
] + PowerDecoder
.ports(self
)
644 for condition
in self
.conditions
.values():
645 res
.append(condition
)
649 #############################################################
650 # PRIMARY FUNCTION SPECIFYING ALTERNATIVE SVP64 POWER DECODER
652 def create_pdecode_svp64_ldst(name
=None, col_subset
=None, row_subset
=None,
654 """create_pdecode - creates a cascading hierarchical POWER ISA decoder
656 subsetting of the PowerOp decoding is possible by setting col_subset
658 log("create_pdecode_svp64_ldst", name
, col_subset
, row_subset
, include_fp
)
660 # some alteration to the CSV files is required for SV so we use
663 get_csv
= isa
.get_svp64_csv
667 Subdecoder(pattern
=58, opcodes
=get_csv("svldst_minor_58.csv"),
668 opint
=True, bitsel
=(0, 2), suffix
=None, subdecoders
=[]),
669 # nope - needs 4-in regs
670 # Subdecoder(pattern=62, opcodes=get_csv("svldst_minor_62.csv"),
671 # opint=True, bitsel=(0, 2), suffix=None, subdecoders=[]),
674 # FP 63L/H decoders. TODO: move mffsfamily to separate subdecoder
675 if False and include_fp
:
677 Subdecoder(pattern
=63, opcodes
=get_csv("minor_63.csv"),
678 opint
=False, bitsel
=(1, 11), suffix
=None,
682 Subdecoder(pattern
=59, opcodes
=get_csv("minor_59.csv"),
683 opint
=False, bitsel
=(1, 11), suffix
=None,
687 # top level: extra merged with major
689 opcodes
= get_csv("svldst_major.csv")
690 dec
.append(Subdecoder(pattern
=None, opint
=True, opcodes
=opcodes
,
691 bitsel
=(26, 32), suffix
=None, subdecoders
=pminor
))
693 return TopPowerDecoder(32, dec
, name
=name
, col_subset
=col_subset
,
694 row_subset
=row_subset
)
697 ####################################################
698 # PRIMARY FUNCTION SPECIFYING THE FULL POWER DECODER
700 def create_pdecode(name
=None, col_subset
=None, row_subset
=None,
701 include_fp
=False, conditions
=None):
702 """create_pdecode - creates a cascading hierarchical POWER ISA decoder
704 subsetting of the PowerOp decoding is possible by setting col_subset
706 log("create_pdecode", name
, col_subset
, row_subset
, include_fp
)
708 # some alteration to the CSV files is required for SV so we use
711 get_csv
= isa
.get_svp64_csv
713 # minor 19 has extra patterns
715 m19
.append(Subdecoder(pattern
=19, opcodes
=get_csv("minor_19.csv"),
716 opint
=True, bitsel
=(1, 11), suffix
=None,
718 # XXX problem with sub-decoders (can only handle one),
719 # sort this another time
720 # m19.append(Subdecoder(pattern=19, opcodes=get_csv("minor_19_00000.csv"),
721 # opint=True, bitsel=(1, 6), suffix=None,
727 Subdecoder(pattern
=30, opcodes
=get_csv("minor_30.csv"),
728 opint
=True, bitsel
=(1, 5), suffix
=None, subdecoders
=[]),
729 Subdecoder(pattern
=31, opcodes
=get_csv("minor_31.csv"),
730 opint
=True, bitsel
=(1, 11), suffix
=0b00101, subdecoders
=[]),
731 Subdecoder(pattern
=58, opcodes
=get_csv("minor_58.csv"),
732 opint
=True, bitsel
=(0, 2), suffix
=None, subdecoders
=[]),
733 Subdecoder(pattern
=62, opcodes
=get_csv("minor_62.csv"),
734 opint
=True, bitsel
=(0, 2), suffix
=None, subdecoders
=[]),
735 Subdecoder(pattern
=22, opcodes
=get_csv("minor_22.csv"),
736 opint
=True, bitsel
=(1, 5), suffix
=None, subdecoders
=[]),
737 Subdecoder(pattern
=5, opcodes
=get_csv("minor_5.csv"),
738 opint
=True, bitsel
=(0, 11), suffix
=None, subdecoders
=[]),
741 # FP 63L/H decoders. TODO: move mffsfamily to separate subdecoder
744 Subdecoder(pattern
=63, opcodes
=get_csv("minor_63.csv"),
745 opint
=False, bitsel
=(1, 11), suffix
=None,
749 Subdecoder(pattern
=59, opcodes
=get_csv("minor_59.csv"),
750 opint
=False, bitsel
=(1, 11), suffix
=None,
754 # top level: extra merged with major
756 opcodes
= get_csv("major.csv")
757 dec
.append(Subdecoder(pattern
=None, opint
=True, opcodes
=opcodes
,
758 bitsel
=(26, 32), suffix
=None, subdecoders
=pminor
))
759 opcodes
= get_csv("extra.csv")
760 dec
.append(Subdecoder(pattern
=None, opint
=False, opcodes
=opcodes
,
761 bitsel
=(0, 32), suffix
=None, subdecoders
=[]))
763 return TopPowerDecoder(32, dec
, name
=name
, col_subset
=col_subset
,
764 row_subset
=row_subset
,
765 conditions
=conditions
)
768 # https://github.com/apertus-open-source-cinema/naps/blob/9ebbc0/naps/soc/cli.py#L17
771 def fragment_repr(original
):
772 from textwrap
import indent
774 for attr
in ['ports', 'drivers', 'statements', 'attrs',
775 'generated', 'flatten']:
776 attrs_str
+= f
"{attr}={repr(getattr(original, attr))},\n"
779 for name
, domain
in original
.domains
.items():
780 # TODO: this is not really sound because domains could be non local
781 domains_str
+= f
"{name}: {domain.name}\n"
782 attrs_str
+= f
"domains={{{indent(domains_str, ' ')}}},\n"
785 for child
, name
in original
.subfragments
:
786 children_str
+= f
"[{name}, {fragment_repr(child)}]\n"
787 attrs_str
+= f
"children=[{indent(children_str, ' ')}],\n"
789 return f
"Fragment({indent(attrs_str, ' ')})"
792 if __name__
== '__main__':
797 def rowsubsetfn(opcode
, row
):
798 log("row_subset", opcode
, row
)
799 return row
['unit'] in ['LDST', 'FPU']
801 conditions
= {'SVP64BREV': Signal(name
="svp64brev", reset_less
=True),
802 'SVP64FFT': Signal(name
="svp64fft", reset_less
=True),
804 pdecode
= create_pdecode(name
="rowsub",
805 col_subset
={'opcode', 'function_unit',
807 'in2_sel', 'in3_sel'},
808 row_subset
=rowsubsetfn
,
810 conditions
=conditions
)
811 vl
= rtlil
.convert(pdecode
, ports
=pdecode
.ports())
812 with
open("row_subset_decoder.il", "w") as f
:
815 vl
= verilog
.convert(pdecode
, ports
=pdecode
.ports())
816 with
open("row_subset_decoder.v", "w") as f
:
821 pdecode
= create_pdecode(name
="fusubset", col_subset
={'function_unit'},
822 conditions
=conditions
)
823 vl
= rtlil
.convert(pdecode
, ports
=pdecode
.ports())
824 with
open("col_subset_decoder.il", "w") as f
:
827 from nmigen
.hdl
.ir
import Fragment
828 elaborated
= Fragment
.get(pdecode
, platform
=None)
829 elaborated_repr
= fragment_repr(elaborated
)
830 print(elaborated_repr
)
837 pdecode
= create_pdecode(include_fp
=True)
838 vl
= rtlil
.convert(pdecode
, ports
=pdecode
.ports())
839 with
open("decoder.il", "w") as f
:
843 pdecode
= create_pdecode_svp64_ldst(include_fp
=True)
844 vl
= rtlil
.convert(pdecode
, ports
=pdecode
.ports())
845 with
open("decoder_svp64.il", "w") as f
: