1 # SPDX-License-Identifier: LGPL-3-or-later
2 """Cascading Power ISA Decoder
6 # Copyright (C) 2020 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
7 # Copyright (C) 2020 Michael Nolan <mtnolan2640@gmail.com>
9 This module uses CSV tables in a hierarchical/peer cascading fashion,
10 to create a multi-level instruction decoder by recognising appropriate
11 patterns. The output is a wide, flattened (1-level) series of bitfields,
12 suitable for a simple RISC engine.
14 This is based on Anton Blanchard's excellent microwatt work:
15 https://github.com/antonblanchard/microwatt/blob/master/decode1.vhdl
17 The basic principle is that the python code does the heavy lifting
18 (reading the CSV files, constructing the hierarchy), creating the HDL
19 AST with for-loops generating switch-case statements.
21 Where "normal" HDL would do this, in laborious excruciating detail:
23 switch (opcode & major_mask_bits):
24 case opcode_2: decode_opcode_2()
26 switch (opcode & minor_19_mask_bits)
27 case minor_opcode_19_operation_X:
28 case minor_opcode_19_operation_y:
30 we take *full* advantage of the decoupling between python and the
31 nmigen AST data structure, to do this:
33 with m.Switch(opcode & self.mask):
34 for case_bitmask in subcases:
35 with m.If(opcode & case_bitmask): {do_something}
37 this includes specifying the information sufficient to perform subdecoding.
41 the full hierarchical tree for decoding POWER9 is specified here
42 subsetting is possible by specifying col_subset (row_subset TODO)
46 takes a *list* of CSV files with an associated bit-range that it
47 is requested to match against the "opcode" row of the CSV file.
48 This pattern can be either an integer, a binary number, *or* a
49 wildcard nmigen Case pattern of the form "001--1-100".
53 these are *additional* cases with further decoding. The "pattern"
54 argument is specified as one of the Case statements (a peer of the
55 opcode row in the CSV file), and thus further fields of the opcode
56 may be decoded giving increasing levels of detail.
60 [ (extra.csv: bit-fields entire 32-bit range
62 000000---------------01000000000 -> ILLEGAL instruction
63 01100000000000000000000000000000 -> SIM_CONFIG instruction
64 ................................ ->
66 (major.csv: first 6 bits ONLY
68 001100 -> ALU,OP_ADD (add)
69 001101 -> ALU,OP_ADD (another type of add)
73 001011 this must match *MAJOR*.CSV
74 [ (minor_19.csv: bits 21 through 30 inclusive:
76 0b0000000000 -> ALU,OP_MCRF
79 (minor_19_00000.csv: bits 21 through 25 inclusive:
81 0b00010 -> ALU,add_pcis
91 from collections
import namedtuple
, OrderedDict
92 from nmigen
import Module
, Elaboratable
, Signal
, Cat
, Mux
, Const
93 from nmigen
.cli
import rtlil
, verilog
94 from openpower
.decoder
.power_enums
import (Function
, Form
, MicrOp
,
95 In1Sel
, In2Sel
, In3Sel
, OutSel
,
96 SVEXTRA
, SVEtype
, SVPtype
, # Simple-V
97 RC
, LdstLen
, LDSTMode
, CryIn
,
98 single_bit_flags
, CRInSel
,
99 CROutSel
, get_signal_name
,
100 default_values
, insns
, asmidx
,
102 from openpower
.decoder
.power_fields
import DecodeFields
103 from openpower
.decoder
.power_fieldsn
import SigDecode
, SignalBitRange
104 from openpower
.decoder
.power_svp64
import SVP64RM
106 from openpower
.util
import log
108 # key data structure in which the POWER decoder is specified,
109 # in a hierarchical fashion
110 Subdecoder
= namedtuple( # fix autoformatter
112 ["pattern", # the major pattern to search for (e.g. major opcode)
113 "opcodes", # a dictionary of minor patterns to find
114 "opint", # true => the pattern must not be in "10----11" format
115 # the bits (as a range) against which "pattern" matches
117 "suffix", # shift the opcode down before decoding
118 "subdecoders" # list of further subdecoders for *additional* matches,
119 # *ONLY* after "pattern" has *ALSO* been matched against.
122 power_op_types
= {'function_unit': Function
,
123 'internal_op': MicrOp
,
140 'sv_cr_out': SVEXTRA
,
147 power_op_csvmap
= {'function_unit': 'unit',
149 'internal_op': 'internal op',
158 'sv_out2': 'sv_out2',
159 'sv_cr_in': 'sv_cr_in',
160 'sv_cr_out': 'sv_cr_out',
161 'SV_Etype': 'SV_Etype',
162 'SV_Ptype': 'SV_Ptype',
165 'ldst_len': 'ldst len',
167 'rsrv': 'rsrv', # atomic operation
173 def get_pname(field
, pname
):
176 return "%s_%s" % (pname
, field
)
180 """PowerOp - a dynamic class that stores (subsets of) CSV rows of data
181 about a PowerISA instruction. this is a "micro-code" expanded format
182 which generates an awful lot of wires, hence the subsetting
185 def __init__(self
, incl_asm
=True, name
=None, subset
=None, fields
=None):
188 if fields
is not None:
189 for k
, v
in fields
.items():
194 for field
, ptype
in power_op_types
.items():
196 if subset
and field
not in subset
:
198 fname
= get_pname(field
, name
)
199 setattr(self
, field
, Signal(ptype
, reset_less
=True, name
=fname
))
200 debug_report
.add(field
)
201 for bit
in single_bit_flags
:
202 field
= get_signal_name(bit
)
204 if subset
and field
not in subset
:
206 debug_report
.add(field
)
207 fname
= get_pname(field
, name
)
208 setattr(self
, field
, Signal(reset_less
=True, name
=fname
))
209 self
._fields
= fields
210 # comment out, bit too high debug level
211 #print("PowerOp debug", name, debug_report)
212 #print(" fields", fields)
216 """PowerOp.like: creates a duplicate of a given PowerOp instance
219 for fname
in other
._fields
:
220 sig
= getattr(other
, fname
, None)
222 fields
[fname
] = sig
.__class
__.like(sig
)
223 return PowerOp(subset
=other
.subset
, fields
=fields
)
225 def _eq(self
, row
=None):
228 # TODO: this conversion process from a dict to an object
229 # should really be done using e.g. namedtuple and then
231 if False: # debugging
232 if row
['CR in'] == '1':
236 if row
['CR out'] == '0':
241 ldst_mode
= row
['upd']
242 if ldst_mode
.isdigit():
243 row
['upd'] = int(ldst_mode
)
245 for field
, ptype
in power_op_types
.items():
246 if not hasattr(self
, field
):
248 if field
not in power_op_csvmap
:
250 csvname
= power_op_csvmap
[field
]
251 # log(field, ptype, csvname, row)
253 if csvname
== 'upd' and isinstance(val
, int): # LDSTMode different
257 res
.append(getattr(self
, field
).eq(val
))
260 asmcode
= row
['comment']
261 # process the comment field, strip out "equals" for FP
263 asmcode
= asmcode
.split("=")[-1]
264 log("asmcode stripping =", asmcode
,
265 asmcode
in asmidx
, hasattr(self
, "asmcode"))
266 if hasattr(self
, "asmcode") and asmcode
in asmidx
:
267 res
.append(self
.asmcode
.eq(asmidx
[asmcode
]))
268 for bit
in single_bit_flags
:
269 field
= get_signal_name(bit
)
270 if not hasattr(self
, field
):
272 sig
= getattr(self
, field
)
273 res
.append(sig
.eq(int(row
.get(bit
, 0))))
276 def _get_eq(self
, res
, field
, otherop
):
277 copyfrom
= getattr(otherop
, field
, None)
278 copyto
= getattr(self
, field
, None)
279 if copyfrom
is not None and copyto
is not None:
280 res
.append(copyto
.eq(copyfrom
))
282 def eq(self
, otherop
):
284 for field
in power_op_types
.keys():
285 self
._get
_eq
(res
, field
, otherop
)
286 for bit
in single_bit_flags
:
287 self
._get
_eq
(res
, get_signal_name(bit
), otherop
)
292 for field
in power_op_types
.keys():
293 if hasattr(self
, field
):
294 res
.append(getattr(self
, field
))
295 if hasattr(self
, "asmcode"):
296 res
.append(self
.asmcode
)
297 for field
in single_bit_flags
:
298 field
= get_signal_name(field
)
299 if hasattr(self
, field
):
300 res
.append(getattr(self
, field
))
304 class PowerDecoder(Elaboratable
):
305 """PowerDecoder - decodes an incoming opcode into the type of operation
307 this is a recursive algorithm, creating Switch statements that can
308 have further match-and-decode on other parts of the opcode field before
309 finally landing at a "this CSV entry details gets returned" thing.
311 the complicating factor is the row and col subsetting. column subsetting
312 dynamically chooses only the CSV columns requested, whilst row subsetting
313 allows a function to be called on the row to determine if the Case
314 statement is to be generated for that row. this not only generates
315 completely different Decoders, it also means that some sub-decoders
316 will turn up blank (empty switch statements). if that happens we do
317 not want the parent to include a Mux for an entirely blank switch statement
318 so we have to store the switch/case statements in a tree, and
321 the reason for the tree is because elaborate can only be called *after*
322 the constructor is called. all quite messy.
325 def __init__(self
, width
, dec
, name
=None, col_subset
=None,
326 row_subset
=None, conditions
=None):
327 if conditions
is None:
328 # XXX conditions = {}
329 conditions
= {'SVP64BREV': Const(0, 1),
330 'SVP64FFT': Const(0, 1),
332 self
.actually_does_something
= False
334 self
.conditions
= conditions
335 self
.col_subset
= col_subset
336 self
.row_subsetfn
= row_subset
337 if not isinstance(dec
, list):
340 self
.opcode_in
= Signal(width
, reset_less
=True)
342 self
.op
= PowerOp(name
=name
, subset
=col_subset
)
344 if d
.suffix
is not None and d
.suffix
>= width
:
349 # create some case statement condition patterns for matching
350 # a single condition. "1----" for the first condition,
351 # "-1----" for the 2nd etc.
352 # also create a matching ordered list of conditions, for the switch,
353 # which will Cat() them together
355 self
.ckeys
= list(conditions
.keys())
358 def find_conditions(self
, opcodes
):
359 # look for conditions, create dictionary entries for them
361 rows
= OrderedDict() # start as a dictionary, get as list (after)
363 condition
= row
['CONDITIONS']
364 opcode
= row
['opcode']
366 # check it's expected
367 assert (condition
in self
.conditions
or
368 (condition
[0] == '~' and
369 condition
[1:] in self
.conditions
)), \
370 "condition %s not in %s" % (condition
, str(conditions
))
371 if opcode
not in rows
:
373 rows
[opcode
][condition
] = row
376 assert opcode
not in rows
, \
377 "opcode %s already in rows for %s" % \
380 # after checking for conditions, get just the values (ordered)
381 return list(rows
.values())
383 def suffix_mask(self
, d
):
384 return ((1 << d
.suffix
) - 1)
386 def divide_opcodes(self
, d
):
388 mask
= self
.suffix_mask(d
)
389 #print("mask", hex(mask))
390 for row
in d
.opcodes
:
391 opcode
= row
['opcode']
392 if d
.opint
and '-' not in opcode
:
393 opcode
= int(opcode
, 0)
395 opcode
= opcode
>> d
.suffix
396 if key
not in divided
:
400 divided
[key
].append(r
)
403 def tree_analyse(self
):
404 self
.decs
= decs
= []
405 self
.submodules
= submodules
= {}
408 # go through the list of CSV decoders first
411 opcode_switch
= Signal(d
.bitsel
[1] - d
.bitsel
[0],
414 case_does_something
= False
415 look_for
= self
.opcode_in
[d
.bitsel
[0]:d
.bitsel
[1]]
416 eq
.append(opcode_switch
.eq(look_for
))
418 opcodes
= self
.divide_opcodes(d
)
419 # TODO opcodes = self.find_conditions(opcodes)
420 opc_in
= Signal(d
.suffix
, reset_less
=True)
421 eq
.append(opc_in
.eq(opcode_switch
[:d
.suffix
]))
422 # begin the dynamic Switch statement here
424 cases
.append([opc_in
, switch_case
])
426 for key
, row
in opcodes
.items():
427 bitsel
= (d
.suffix
+d
.bitsel
[0], d
.bitsel
[1])
428 sd
= Subdecoder(pattern
=None, opcodes
=row
,
429 bitsel
=bitsel
, suffix
=None,
430 opint
=False, subdecoders
=[])
431 mname
= get_pname("dec_sub%d" % key
, self
.pname
)
432 subdecoder
= PowerDecoder(width
=32, dec
=sd
,
434 col_subset
=self
.col_subset
,
435 row_subset
=self
.row_subsetfn
,
436 conditions
=self
.conditions
)
437 if not subdecoder
.tree_analyse():
440 submodules
[mname
] = subdecoder
441 sub_eqs
.append(subdecoder
.opcode_in
.eq(self
.opcode_in
))
442 # add in the dynamic Case statement here
443 switch_case
[key
] = self
.op
.eq(subdecoder
.op
)
444 self
.actually_does_something
= True
445 case_does_something
= True
446 if case_does_something
:
449 # TODO: arguments, here (all of them) need to be a list.
450 # a for-loop around the *list* of decoder args.
452 cases
.append([opcode_switch
, switch_case
])
453 seqs
= self
.handle_subdecoders(switch_case
, submodules
, d
)
455 case_does_something
= True
457 opcodes
= self
.find_conditions(d
.opcodes
)
459 # urrr this is an awful hack. if "conditions" are active
460 # get the FIRST item (will be the same opcode), and it
461 # had BETTER have the same unit and also pass other
462 # row subset conditions.
463 if 'opcode' not in row
: # must be a "CONDITIONS" dict...
465 _row
= row
[list(row
.keys())[0]]
467 is_conditions
= False
469 opcode
= _row
['opcode']
470 if d
.opint
and '-' not in opcode
:
471 opcode
= int(opcode
, 0)
474 if self
.row_subsetfn
:
475 if not self
.row_subsetfn(opcode
, _row
):
477 # add in the dynamic Case statement here
479 switch_case
[opcode
] = {}
480 for k
, crow
in row
.items():
481 # log("ordered", k, crow)
482 switch_case
[opcode
][k
] = self
.op
._eq
(crow
)
484 switch_case
[opcode
] = self
.op
._eq
(row
)
485 self
.actually_does_something
= True
486 case_does_something
= True
490 if case_does_something
:
492 #print("submodule eqs", self.pname, eq)
494 #print("submodules", self.pname, submodules)
497 return self
.actually_does_something
499 def handle_subdecoders(self
, switch_case
, submodules
, d
):
501 for dlist
in d
.subdecoders
:
502 if not isinstance(dlist
, list): # XXX HACK: take first pattern
505 #print("subdec", dec.pattern, self.pname)
506 mname
= get_pname("dec%d" % dec
.pattern
, self
.pname
)
507 if mname
in submodules
:
510 assert mname
not in submodules
511 subdecoder
= PowerDecoder(self
.width
, dec
,
513 col_subset
=self
.col_subset
,
514 row_subset
=self
.row_subsetfn
,
515 conditions
=self
.conditions
)
516 log("subdecoder", mname
, subdecoder
)
517 if not subdecoder
.tree_analyse(): # doesn't do anything
518 log("analysed, DELETING", mname
)
521 submodules
[mname
] = subdecoder
522 eqs
.append(subdecoder
.opcode_in
.eq(self
.opcode_in
))
523 switch_case
[dec
.pattern
] = self
.op
.eq(subdecoder
.op
)
524 self
.actually_does_something
= True
528 def elaborate(self
, platform
):
529 #print("decoder elaborate", self.pname, self.submodules)
535 for mname
, subdecoder
in self
.submodules
.items():
536 setattr(m
.submodules
, mname
, subdecoder
)
538 for switch_case
in self
.decs
:
539 for (switch
, cases
) in switch_case
:
540 with m
.Switch(switch
):
541 for key
, eqs
in cases
.items():
543 # "conditions" are a further switch statement
544 if isinstance(eqs
, dict):
545 self
.condition_switch(m
, eqs
)
550 def condition_switch(self
, m
, cases
):
551 """against the global list of "conditions", having matched against
552 bits of the opcode, we FINALLY now have to match against some
553 additional "conditions". this is because there can be **MULTIPLE**
554 entries for a given opcode match. here we discern them.
559 for casekey
, eqs
in cases
.items():
560 if casekey
.startswith('~'):
561 with m
.If(~self
.conditions
[casekey
[1:]]):
564 with m
.If(self
.conditions
[casekey
]):
568 return [self
.opcode_in
] + self
.op
.ports()
571 class TopPowerDecoder(PowerDecoder
):
574 top-level hierarchical decoder for POWER ISA
575 bigendian dynamically switches between big and little endian decoding
576 (reverses byte order). See V3.0B p44 1.11.2
579 def __init__(self
, width
, dec
, name
=None, col_subset
=None,
580 row_subset
=None, conditions
=None):
581 PowerDecoder
.__init
__(self
, width
, dec
, name
,
582 col_subset
, row_subset
, conditions
)
583 self
.fields
= df
= DecodeFields(SignalBitRange
, [self
.opcode_in
])
584 self
.fields
.create_specs()
585 self
.raw_opcode_in
= Signal
.like(self
.opcode_in
, reset_less
=True)
586 self
.bigendian
= Signal(reset_less
=True)
588 for fname
, value
in self
.fields
.common_fields
.items():
589 signame
= get_pname(fname
, name
)
590 sig
= Signal(value
[0:-1].shape(), reset_less
=True, name
=signame
)
591 setattr(self
, fname
, sig
)
593 # create signals for all field forms
594 forms
= self
.form_names
597 fields
= self
.fields
.instrs
[form
]
599 Fields
= namedtuple("Fields", fk
)
601 for k
, value
in fields
.items():
602 fname
= "%s_%s" % (form
, k
)
603 sig
= Signal(value
[0:-1].shape(), reset_less
=True, name
=fname
)
606 setattr(self
, "Form%s" % form
, instr
)
607 self
.sigforms
[form
] = instr
612 def form_names(self
):
613 return self
.fields
.instrs
.keys()
615 def elaborate(self
, platform
):
616 m
= PowerDecoder
.elaborate(self
, platform
)
618 # sigh duplicated in SVP64PowerDecoder
619 # raw opcode in assumed to be in LE order: byte-reverse it to get BE
620 raw_le
= self
.raw_opcode_in
622 for i
in range(0, self
.width
, 8):
623 l
.append(raw_le
[i
:i
+8])
626 comb
+= self
.opcode_in
.eq(Mux(self
.bigendian
, raw_be
, raw_le
))
628 # add all signal from commonly-used fields
629 for fname
, value
in self
.fields
.common_fields
.items():
630 sig
= getattr(self
, fname
)
631 comb
+= sig
.eq(value
[0:-1])
633 # link signals for all field forms
634 forms
= self
.form_names
636 sf
= self
.sigforms
[form
]
637 fields
= self
.fields
.instrs
[form
]
638 for k
, value
in fields
.items():
640 comb
+= sig
.eq(value
[0:-1])
645 res
= [self
.raw_opcode_in
, self
.bigendian
] + PowerDecoder
.ports(self
)
646 for condition
in self
.conditions
.values():
647 res
.append(condition
)
651 #############################################################
652 # PRIMARY FUNCTION SPECIFYING ALTERNATIVE SVP64 POWER DECODER
654 def create_pdecode_svp64_ldst(name
=None, col_subset
=None, row_subset
=None,
656 """create_pdecode - creates a cascading hierarchical POWER ISA decoder
658 subsetting of the PowerOp decoding is possible by setting col_subset
660 log("create_pdecode_svp64_ldst", name
, col_subset
, row_subset
, include_fp
)
662 # some alteration to the CSV files is required for SV so we use
665 get_csv
= isa
.get_svp64_csv
669 Subdecoder(pattern
=58, opcodes
=get_csv("svldst_minor_58.csv"),
670 opint
=True, bitsel
=(0, 2), suffix
=None, subdecoders
=[]),
671 # nope - needs 4-in regs
672 # Subdecoder(pattern=62, opcodes=get_csv("svldst_minor_62.csv"),
673 # opint=True, bitsel=(0, 2), suffix=None, subdecoders=[]),
676 # FP 63L/H decoders. TODO: move mffsfamily to separate subdecoder
677 if False and include_fp
:
679 Subdecoder(pattern
=63, opcodes
=get_csv("minor_63.csv"),
680 opint
=False, bitsel
=(1, 11), suffix
=None,
684 Subdecoder(pattern
=59, opcodes
=get_csv("minor_59.csv"),
685 opint
=False, bitsel
=(1, 11), suffix
=None,
689 # top level: extra merged with major
691 opcodes
= get_csv("svldst_major.csv")
692 dec
.append(Subdecoder(pattern
=None, opint
=True, opcodes
=opcodes
,
693 bitsel
=(26, 32), suffix
=None, subdecoders
=pminor
))
695 return TopPowerDecoder(32, dec
, name
=name
, col_subset
=col_subset
,
696 row_subset
=row_subset
)
699 ####################################################
700 # PRIMARY FUNCTION SPECIFYING THE FULL POWER DECODER
702 def create_pdecode(name
=None, col_subset
=None, row_subset
=None,
703 include_fp
=False, conditions
=None):
704 """create_pdecode - creates a cascading hierarchical POWER ISA decoder
706 subsetting of the PowerOp decoding is possible by setting col_subset
708 log("create_pdecode", name
, col_subset
, row_subset
, include_fp
)
710 # some alteration to the CSV files is required for SV so we use
713 get_csv
= isa
.get_svp64_csv
715 # minor 19 has extra patterns
717 m19
.append(Subdecoder(pattern
=19, opcodes
=get_csv("minor_19.csv"),
718 opint
=True, bitsel
=(1, 11), suffix
=None,
720 # XXX problem with sub-decoders (can only handle one),
721 # sort this another time
722 # m19.append(Subdecoder(pattern=19, opcodes=get_csv("minor_19_00000.csv"),
723 # opint=True, bitsel=(1, 6), suffix=None,
729 Subdecoder(pattern
=30, opcodes
=get_csv("minor_30.csv"),
730 opint
=True, bitsel
=(1, 5), suffix
=None, subdecoders
=[]),
731 Subdecoder(pattern
=31, opcodes
=get_csv("minor_31.csv"),
732 opint
=True, bitsel
=(1, 11), suffix
=0b00101, subdecoders
=[]),
733 Subdecoder(pattern
=58, opcodes
=get_csv("minor_58.csv"),
734 opint
=True, bitsel
=(0, 2), suffix
=None, subdecoders
=[]),
735 Subdecoder(pattern
=62, opcodes
=get_csv("minor_62.csv"),
736 opint
=True, bitsel
=(0, 2), suffix
=None, subdecoders
=[]),
737 Subdecoder(pattern
=22, opcodes
=get_csv("minor_22.csv"),
738 opint
=True, bitsel
=(0, 5), suffix
=None, subdecoders
=[]),
739 Subdecoder(pattern
=5, opcodes
=get_csv("minor_5.csv"),
740 opint
=True, bitsel
=(0, 11), suffix
=None, subdecoders
=[]),
743 # FP 63L/H decoders. TODO: move mffsfamily to separate subdecoder
746 Subdecoder(pattern
=63, opcodes
=get_csv("minor_63.csv"),
747 opint
=False, bitsel
=(1, 11), suffix
=None,
751 Subdecoder(pattern
=59, opcodes
=get_csv("minor_59.csv"),
752 opint
=False, bitsel
=(1, 11), suffix
=None,
756 # top level: extra merged with major
758 opcodes
= get_csv("major.csv")
759 dec
.append(Subdecoder(pattern
=None, opint
=True, opcodes
=opcodes
,
760 bitsel
=(26, 32), suffix
=None, subdecoders
=pminor
))
761 opcodes
= get_csv("extra.csv")
762 dec
.append(Subdecoder(pattern
=None, opint
=False, opcodes
=opcodes
,
763 bitsel
=(0, 32), suffix
=None, subdecoders
=[]))
765 return TopPowerDecoder(32, dec
, name
=name
, col_subset
=col_subset
,
766 row_subset
=row_subset
,
767 conditions
=conditions
)
770 # https://github.com/apertus-open-source-cinema/naps/blob/9ebbc0/naps/soc/cli.py#L17
773 def fragment_repr(original
):
774 from textwrap
import indent
776 for attr
in ['ports', 'drivers', 'statements', 'attrs',
777 'generated', 'flatten']:
778 attrs_str
+= f
"{attr}={repr(getattr(original, attr))},\n"
781 for name
, domain
in original
.domains
.items():
782 # TODO: this is not really sound because domains could be non local
783 domains_str
+= f
"{name}: {domain.name}\n"
784 attrs_str
+= f
"domains={{{indent(domains_str, ' ')}}},\n"
787 for child
, name
in original
.subfragments
:
788 children_str
+= f
"[{name}, {fragment_repr(child)}]\n"
789 attrs_str
+= f
"children=[{indent(children_str, ' ')}],\n"
791 return f
"Fragment({indent(attrs_str, ' ')})"
794 if __name__
== '__main__':
799 def rowsubsetfn(opcode
, row
):
800 log("row_subset", opcode
, row
)
801 return row
['unit'] in ['LDST', 'FPU']
803 conditions
= {'SVP64BREV': Signal(name
="svp64brev", reset_less
=True),
804 'SVP64FFT': Signal(name
="svp64fft", reset_less
=True),
806 pdecode
= create_pdecode(name
="rowsub",
807 col_subset
={'opcode', 'function_unit',
809 'in2_sel', 'in3_sel'},
810 row_subset
=rowsubsetfn
,
812 conditions
=conditions
)
813 vl
= rtlil
.convert(pdecode
, ports
=pdecode
.ports())
814 with
open("row_subset_decoder.il", "w") as f
:
817 vl
= verilog
.convert(pdecode
, ports
=pdecode
.ports())
818 with
open("row_subset_decoder.v", "w") as f
:
823 pdecode
= create_pdecode(name
="fusubset", col_subset
={'function_unit'},
824 conditions
=conditions
)
825 vl
= rtlil
.convert(pdecode
, ports
=pdecode
.ports())
826 with
open("col_subset_decoder.il", "w") as f
:
829 from nmigen
.hdl
.ir
import Fragment
830 elaborated
= Fragment
.get(pdecode
, platform
=None)
831 elaborated_repr
= fragment_repr(elaborated
)
832 print(elaborated_repr
)
839 pdecode
= create_pdecode(include_fp
=True)
840 vl
= rtlil
.convert(pdecode
, ports
=pdecode
.ports())
841 with
open("decoder.il", "w") as f
:
845 pdecode
= create_pdecode_svp64_ldst(include_fp
=True)
846 vl
= rtlil
.convert(pdecode
, ports
=pdecode
.ports())
847 with
open("decoder_svp64.il", "w") as f
: