5f9f577c59ff58df7c715bd19409093a61a2dd17
1 """Cascading Power ISA Decoder
5 # Copyright (C) 2020 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
6 # Copyright (C) 2020 Michael Nolan <mtnolan2640@gmail.com>
8 This module uses CSV tables in a hierarchical/peer cascading fashion,
9 to create a multi-level instruction decoder by recognising appropriate
10 patterns. The output is a wide, flattened (1-level) series of bitfields,
11 suitable for a simple RISC engine.
13 This is based on Anton Blanchard's excellent microwatt work:
14 https://github.com/antonblanchard/microwatt/blob/master/decode1.vhdl
16 The basic principle is that the python code does the heavy lifting
17 (reading the CSV files, constructing the hierarchy), creating the HDL
18 AST with for-loops generating switch-case statements.
20 Where "normal" HDL would do this, in laborious excruciating detail:
22 switch (opcode & major_mask_bits):
23 case opcode_2: decode_opcode_2()
25 switch (opcode & minor_19_mask_bits)
26 case minor_opcode_19_operation_X:
27 case minor_opcode_19_operation_y:
29 we take *full* advantage of the decoupling between python and the
30 nmigen AST data structure, to do this:
32 with m.Switch(opcode & self.mask):
33 for case_bitmask in subcases:
34 with m.If(opcode & case_bitmask): {do_something}
36 this includes specifying the information sufficient to perform subdecoding.
40 the full hierarchical tree for decoding POWER9 is specified here
41 subsetting is possible by specifying col_subset (row_subset TODO)
45 takes a *list* of CSV files with an associated bit-range that it
46 is requested to match against the "opcode" row of the CSV file.
47 This pattern can be either an integer, a binary number, *or* a
48 wildcard nmigen Case pattern of the form "001--1-100".
52 these are *additional* cases with further decoding. The "pattern"
53 argument is specified as one of the Case statements (a peer of the
54 opcode row in the CSV file), and thus further fields of the opcode
55 may be decoded giving increasing levels of detail.
59 [ (extra.csv: bit-fields entire 32-bit range
61 000000---------------01000000000 -> ILLEGAL instruction
62 01100000000000000000000000000000 -> SIM_CONFIG instruction
63 ................................ ->
65 (major.csv: first 6 bits ONLY
67 001100 -> ALU,OP_ADD (add)
68 001101 -> ALU,OP_ADD (another type of add)
72 001011 this must match *MAJOR*.CSV
73 [ (minor_19.csv: bits 21 through 30 inclusive:
75 0b0000000000 -> ALU,OP_MCRF
78 (minor_19_00000.csv: bits 21 through 25 inclusive:
80 0b00010 -> ALU,add_pcis
90 from collections
import namedtuple
, OrderedDict
91 from nmigen
import Module
, Elaboratable
, Signal
, Cat
, Mux
, Const
92 from nmigen
.cli
import rtlil
, verilog
93 from openpower
.decoder
.power_enums
import (Function
, Form
, MicrOp
,
94 In1Sel
, In2Sel
, In3Sel
, OutSel
,
95 SVEXTRA
, SVEtype
, SVPtype
, # Simple-V
96 RC
, LdstLen
, LDSTMode
, CryIn
,
97 single_bit_flags
, CRInSel
,
98 CROutSel
, get_signal_name
,
99 default_values
, insns
, asmidx
)
100 from openpower
.decoder
.power_fields
import DecodeFields
101 from openpower
.decoder
.power_fieldsn
import SigDecode
, SignalBitRange
102 from openpower
.decoder
.power_svp64
import SVP64RM
104 from openpower
.util
import log
106 # key data structure in which the POWER decoder is specified,
107 # in a hierarchical fashion
108 Subdecoder
= namedtuple( # fix autoformatter
110 ["pattern", # the major pattern to search for (e.g. major opcode)
111 "opcodes", # a dictionary of minor patterns to find
112 "opint", # true => the pattern must not be in "10----11" format
113 # the bits (as a range) against which "pattern" matches
115 "suffix", # shift the opcode down before decoding
116 "subdecoders" # list of further subdecoders for *additional* matches,
117 # *ONLY* after "pattern" has *ALSO* been matched against.
120 power_op_types
= {'function_unit': Function
,
121 'internal_op': MicrOp
,
138 'sv_cr_out': SVEXTRA
,
145 power_op_csvmap
= {'function_unit': 'unit',
147 'internal_op': 'internal op',
156 'sv_out2': 'sv_out2',
157 'sv_cr_in': 'sv_cr_in',
158 'sv_cr_out': 'sv_cr_out',
159 'SV_Etype': 'SV_Etype',
160 'SV_Ptype': 'SV_Ptype',
163 'ldst_len': 'ldst len',
170 def get_pname(field
, pname
):
173 return "%s_%s" % (pname
, field
)
177 """PowerOp - a dynamic class that stores (subsets of) CSV rows of data
178 about a PowerISA instruction. this is a "micro-code" expanded format
179 which generates an awful lot of wires, hence the subsetting
182 def __init__(self
, incl_asm
=True, name
=None, subset
=None, fields
=None):
185 if fields
is not None:
186 for k
, v
in fields
.items():
191 for field
, ptype
in power_op_types
.items():
193 if subset
and field
not in subset
:
195 fname
= get_pname(field
, name
)
196 setattr(self
, field
, Signal(ptype
, reset_less
=True, name
=fname
))
197 debug_report
.add(field
)
198 for bit
in single_bit_flags
:
199 field
= get_signal_name(bit
)
201 if subset
and field
not in subset
:
203 debug_report
.add(field
)
204 fname
= get_pname(field
, name
)
205 setattr(self
, field
, Signal(reset_less
=True, name
=fname
))
206 self
._fields
= fields
207 # comment out, bit too high debug level
208 #print("PowerOp debug", name, debug_report)
209 #print(" fields", fields)
213 """PowerOp.like: creates a duplicate of a given PowerOp instance
216 for fname
in other
._fields
:
217 sig
= getattr(other
, fname
, None)
219 fields
[fname
] = sig
.__class
__.like(sig
)
220 return PowerOp(subset
=other
.subset
, fields
=fields
)
222 def _eq(self
, row
=None):
225 # TODO: this conversion process from a dict to an object
226 # should really be done using e.g. namedtuple and then
228 if False: # debugging
229 if row
['CR in'] == '1':
233 if row
['CR out'] == '0':
238 ldst_mode
= row
['upd']
239 if ldst_mode
.isdigit():
240 row
['upd'] = int(ldst_mode
)
242 for field
, ptype
in power_op_types
.items():
243 if not hasattr(self
, field
):
245 if field
not in power_op_csvmap
:
247 csvname
= power_op_csvmap
[field
]
248 # log(field, ptype, csvname, row)
250 if csvname
== 'upd' and isinstance(val
, int): # LDSTMode different
254 res
.append(getattr(self
, field
).eq(val
))
257 asmcode
= row
['comment']
258 # process the comment field, strip out "equals" for FP
260 asmcode
= asmcode
.split("=")[-1]
261 log ("asmcode stripping =", asmcode
,
262 asmcode
in asmidx
, hasattr(self
, "asmcode"))
263 if hasattr(self
, "asmcode") and asmcode
in asmidx
:
264 res
.append(self
.asmcode
.eq(asmidx
[asmcode
]))
265 for bit
in single_bit_flags
:
266 field
= get_signal_name(bit
)
267 if not hasattr(self
, field
):
269 sig
= getattr(self
, field
)
270 res
.append(sig
.eq(int(row
.get(bit
, 0))))
273 def _get_eq(self
, res
, field
, otherop
):
274 copyfrom
= getattr(otherop
, field
, None)
275 copyto
= getattr(self
, field
, None)
276 if copyfrom
is not None and copyto
is not None:
277 res
.append(copyto
.eq(copyfrom
))
279 def eq(self
, otherop
):
281 for field
in power_op_types
.keys():
282 self
._get
_eq
(res
, field
, otherop
)
283 for bit
in single_bit_flags
:
284 self
._get
_eq
(res
, get_signal_name(bit
), otherop
)
289 for field
in power_op_types
.keys():
290 if hasattr(self
, field
):
291 res
.append(getattr(self
, field
))
292 if hasattr(self
, "asmcode"):
293 res
.append(self
.asmcode
)
294 for field
in single_bit_flags
:
295 field
= get_signal_name(field
)
296 if hasattr(self
, field
):
297 res
.append(getattr(self
, field
))
301 class PowerDecoder(Elaboratable
):
302 """PowerDecoder - decodes an incoming opcode into the type of operation
304 this is a recursive algorithm, creating Switch statements that can
305 have further match-and-decode on other parts of the opcode field before
306 finally landing at a "this CSV entry details gets returned" thing.
308 the complicating factor is the row and col subsetting. column subsetting
309 dynamically chooses only the CSV columns requested, whilst row subsetting
310 allows a function to be called on the row to determine if the Case
311 statement is to be generated for that row. this not only generates
312 completely different Decoders, it also means that some sub-decoders
313 will turn up blank (empty switch statements). if that happens we do
314 not want the parent to include a Mux for an entirely blank switch statement
315 so we have to store the switch/case statements in a tree, and
318 the reason for the tree is because elaborate can only be called *after*
319 the constructor is called. all quite messy.
322 def __init__(self
, width
, dec
, name
=None, col_subset
=None,
323 row_subset
=None, conditions
=None):
324 if conditions
is None:
325 # XXX conditions = {}
326 conditions
= {'SVP64BREV': Const(0, 1),
327 'SVP64FFT': Const(0, 1),
329 self
.actually_does_something
= False
331 self
.conditions
= conditions
332 self
.col_subset
= col_subset
333 self
.row_subsetfn
= row_subset
334 if not isinstance(dec
, list):
337 self
.opcode_in
= Signal(width
, reset_less
=True)
339 self
.op
= PowerOp(name
=name
, subset
=col_subset
)
341 if d
.suffix
is not None and d
.suffix
>= width
:
346 # create some case statement condition patterns for matching
347 # a single condition. "1----" for the first condition,
348 # "-1----" for the 2nd etc.
349 # also create a matching ordered list of conditions, for the switch,
350 # which will Cat() them together
352 self
.ckeys
= list(conditions
.keys())
355 def find_conditions(self
, opcodes
):
356 # look for conditions, create dictionary entries for them
358 rows
= OrderedDict() # start as a dictionary, get as list (after)
360 condition
= row
['CONDITIONS']
361 opcode
= row
['opcode']
363 # check it's expected
364 assert (condition
in self
.conditions
or
365 (condition
[0] == '~' and
366 condition
[1:] in self
.conditions
)), \
367 "condition %s not in %s" % (condition
, str(conditions
))
368 if opcode
not in rows
:
370 rows
[opcode
][condition
] = row
373 assert opcode
not in rows
, \
374 "opcode %s already in rows for %s" % \
377 # after checking for conditions, get just the values (ordered)
378 return list(rows
.values())
380 def suffix_mask(self
, d
):
381 return ((1 << d
.suffix
) - 1)
383 def divide_opcodes(self
, d
):
385 mask
= self
.suffix_mask(d
)
386 #print("mask", hex(mask))
387 for row
in d
.opcodes
:
388 opcode
= row
['opcode']
389 if d
.opint
and '-' not in opcode
:
390 opcode
= int(opcode
, 0)
392 opcode
= opcode
>> d
.suffix
393 if key
not in divided
:
397 divided
[key
].append(r
)
400 def tree_analyse(self
):
401 self
.decs
= decs
= []
402 self
.submodules
= submodules
= {}
405 # go through the list of CSV decoders first
408 opcode_switch
= Signal(d
.bitsel
[1] - d
.bitsel
[0],
411 case_does_something
= False
412 look_for
= self
.opcode_in
[d
.bitsel
[0]:d
.bitsel
[1]]
413 eq
.append(opcode_switch
.eq(look_for
))
415 opcodes
= self
.divide_opcodes(d
)
416 # TODO opcodes = self.find_conditions(opcodes)
417 opc_in
= Signal(d
.suffix
, reset_less
=True)
418 eq
.append(opc_in
.eq(opcode_switch
[:d
.suffix
]))
419 # begin the dynamic Switch statement here
421 cases
.append([opc_in
, switch_case
])
423 for key
, row
in opcodes
.items():
424 bitsel
= (d
.suffix
+d
.bitsel
[0], d
.bitsel
[1])
425 sd
= Subdecoder(pattern
=None, opcodes
=row
,
426 bitsel
=bitsel
, suffix
=None,
427 opint
=False, subdecoders
=[])
428 mname
= get_pname("dec_sub%d" % key
, self
.pname
)
429 subdecoder
= PowerDecoder(width
=32, dec
=sd
,
431 col_subset
=self
.col_subset
,
432 row_subset
=self
.row_subsetfn
,
433 conditions
=self
.conditions
)
434 if not subdecoder
.tree_analyse():
437 submodules
[mname
] = subdecoder
438 sub_eqs
.append(subdecoder
.opcode_in
.eq(self
.opcode_in
))
439 # add in the dynamic Case statement here
440 switch_case
[key
] = self
.op
.eq(subdecoder
.op
)
441 self
.actually_does_something
= True
442 case_does_something
= True
443 if case_does_something
:
446 # TODO: arguments, here (all of them) need to be a list.
447 # a for-loop around the *list* of decoder args.
449 cases
.append([opcode_switch
, switch_case
])
450 seqs
= self
.handle_subdecoders(switch_case
, submodules
, d
)
452 case_does_something
= True
454 opcodes
= self
.find_conditions(d
.opcodes
)
456 # urrr this is an awful hack. if "conditions" are active
457 # get the FIRST item (will be the same opcode), and it
458 # had BETTER have the same unit and also pass other
459 # row subset conditions.
460 if 'opcode' not in row
: # must be a "CONDITIONS" dict...
462 _row
= row
[list(row
.keys())[0]]
464 is_conditions
= False
466 opcode
= _row
['opcode']
467 if d
.opint
and '-' not in opcode
:
468 opcode
= int(opcode
, 0)
471 if self
.row_subsetfn
:
472 if not self
.row_subsetfn(opcode
, _row
):
474 # add in the dynamic Case statement here
476 switch_case
[opcode
] = {}
477 for k
, crow
in row
.items():
478 # log("ordered", k, crow)
479 switch_case
[opcode
][k
] = self
.op
._eq
(crow
)
481 switch_case
[opcode
] = self
.op
._eq
(row
)
482 self
.actually_does_something
= True
483 case_does_something
= True
487 if case_does_something
:
489 #print("submodule eqs", self.pname, eq)
491 #print("submodules", self.pname, submodules)
494 return self
.actually_does_something
496 def handle_subdecoders(self
, switch_case
, submodules
, d
):
498 for dlist
in d
.subdecoders
:
499 if not isinstance(dlist
, list): # XXX HACK: take first pattern
502 #print("subdec", dec.pattern, self.pname)
503 mname
= get_pname("dec%d" % dec
.pattern
, self
.pname
)
504 if mname
in submodules
:
507 assert mname
not in submodules
508 subdecoder
= PowerDecoder(self
.width
, dec
,
510 col_subset
=self
.col_subset
,
511 row_subset
=self
.row_subsetfn
,
512 conditions
=self
.conditions
)
513 log ("subdecoder", mname
, subdecoder
)
514 if not subdecoder
.tree_analyse(): # doesn't do anything
515 log ("analysed, DELETING", mname
)
518 submodules
[mname
] = subdecoder
519 eqs
.append(subdecoder
.opcode_in
.eq(self
.opcode_in
))
520 switch_case
[dec
.pattern
] = self
.op
.eq(subdecoder
.op
)
521 self
.actually_does_something
= True
525 def elaborate(self
, platform
):
526 #print("decoder elaborate", self.pname, self.submodules)
532 for mname
, subdecoder
in self
.submodules
.items():
533 setattr(m
.submodules
, mname
, subdecoder
)
535 for switch_case
in self
.decs
:
536 for (switch
, cases
) in switch_case
:
537 with m
.Switch(switch
):
538 for key
, eqs
in cases
.items():
540 # "conditions" are a further switch statement
541 if isinstance(eqs
, dict):
542 self
.condition_switch(m
, eqs
)
547 def condition_switch(self
, m
, cases
):
548 """against the global list of "conditions", having matched against
549 bits of the opcode, we FINALLY now have to match against some
550 additional "conditions". this is because there can be **MULTIPLE**
551 entries for a given opcode match. here we discern them.
556 for casekey
, eqs
in cases
.items():
557 if casekey
.startswith('~'):
558 with m
.If(~self
.conditions
[casekey
[1:]]):
561 with m
.If(self
.conditions
[casekey
]):
565 return [self
.opcode_in
] + self
.op
.ports()
568 class TopPowerDecoder(PowerDecoder
):
571 top-level hierarchical decoder for POWER ISA
572 bigendian dynamically switches between big and little endian decoding
573 (reverses byte order). See V3.0B p44 1.11.2
576 def __init__(self
, width
, dec
, name
=None, col_subset
=None,
577 row_subset
=None, conditions
=None):
578 PowerDecoder
.__init
__(self
, width
, dec
, name
,
579 col_subset
, row_subset
, conditions
)
580 self
.fields
= df
= DecodeFields(SignalBitRange
, [self
.opcode_in
])
581 self
.fields
.create_specs()
582 self
.raw_opcode_in
= Signal
.like(self
.opcode_in
, reset_less
=True)
583 self
.bigendian
= Signal(reset_less
=True)
585 for fname
, value
in self
.fields
.common_fields
.items():
586 signame
= get_pname(fname
, name
)
587 sig
= Signal(value
[0:-1].shape(), reset_less
=True, name
=signame
)
588 setattr(self
, fname
, sig
)
590 # create signals for all field forms
591 forms
= self
.form_names
594 fields
= self
.fields
.instrs
[form
]
596 Fields
= namedtuple("Fields", fk
)
598 for k
, value
in fields
.items():
599 fname
= "%s_%s" % (form
, k
)
600 sig
= Signal(value
[0:-1].shape(), reset_less
=True, name
=fname
)
603 setattr(self
, "Form%s" % form
, instr
)
604 self
.sigforms
[form
] = instr
609 def form_names(self
):
610 return self
.fields
.instrs
.keys()
612 def elaborate(self
, platform
):
613 m
= PowerDecoder
.elaborate(self
, platform
)
615 # sigh duplicated in SVP64PowerDecoder
616 # raw opcode in assumed to be in LE order: byte-reverse it to get BE
617 raw_le
= self
.raw_opcode_in
619 for i
in range(0, self
.width
, 8):
620 l
.append(raw_le
[i
:i
+8])
623 comb
+= self
.opcode_in
.eq(Mux(self
.bigendian
, raw_be
, raw_le
))
625 # add all signal from commonly-used fields
626 for fname
, value
in self
.fields
.common_fields
.items():
627 sig
= getattr(self
, fname
)
628 comb
+= sig
.eq(value
[0:-1])
630 # link signals for all field forms
631 forms
= self
.form_names
633 sf
= self
.sigforms
[form
]
634 fields
= self
.fields
.instrs
[form
]
635 for k
, value
in fields
.items():
637 comb
+= sig
.eq(value
[0:-1])
642 res
= [self
.raw_opcode_in
, self
.bigendian
] + PowerDecoder
.ports(self
)
643 for condition
in self
.conditions
.values():
644 res
.append(condition
)
648 #############################################################
649 # PRIMARY FUNCTION SPECIFYING ALTERNATIVE SVP64 POWER DECODER
651 def create_pdecode_svp64_ldst(name
=None, col_subset
=None, row_subset
=None,
653 """create_pdecode - creates a cascading hierarchical POWER ISA decoder
655 subsetting of the PowerOp decoding is possible by setting col_subset
657 log ("create_pdecode_svp64_ldst", name
, col_subset
, row_subset
, include_fp
)
659 # some alteration to the CSV files is required for SV so we use
662 get_csv
= isa
.get_svp64_csv
666 Subdecoder(pattern
=58, opcodes
=get_csv("svldst_minor_58.csv"),
667 opint
=True, bitsel
=(0, 2), suffix
=None, subdecoders
=[]),
668 # nope - needs 4-in regs
669 #Subdecoder(pattern=62, opcodes=get_csv("svldst_minor_62.csv"),
670 # opint=True, bitsel=(0, 2), suffix=None, subdecoders=[]),
673 # FP 63L/H decoders. TODO: move mffsfamily to separate subdecoder
674 if False and include_fp
:
676 Subdecoder(pattern
=63, opcodes
=get_csv("minor_63.csv"),
677 opint
=False, bitsel
=(1, 11), suffix
=None,
681 Subdecoder(pattern
=59, opcodes
=get_csv("minor_59.csv"),
682 opint
=False, bitsel
=(1, 11), suffix
=None,
686 # top level: extra merged with major
688 opcodes
= get_csv("svldst_major.csv")
689 dec
.append(Subdecoder(pattern
=None, opint
=True, opcodes
=opcodes
,
690 bitsel
=(26, 32), suffix
=None, subdecoders
=pminor
))
692 return TopPowerDecoder(32, dec
, name
=name
, col_subset
=col_subset
,
693 row_subset
=row_subset
)
696 ####################################################
697 # PRIMARY FUNCTION SPECIFYING THE FULL POWER DECODER
699 def create_pdecode(name
=None, col_subset
=None, row_subset
=None,
700 include_fp
=False, conditions
=None):
701 """create_pdecode - creates a cascading hierarchical POWER ISA decoder
703 subsetting of the PowerOp decoding is possible by setting col_subset
705 log ("create_pdecode", name
, col_subset
, row_subset
, include_fp
)
707 # some alteration to the CSV files is required for SV so we use
710 get_csv
= isa
.get_svp64_csv
712 # minor 19 has extra patterns
714 m19
.append(Subdecoder(pattern
=19, opcodes
=get_csv("minor_19.csv"),
715 opint
=True, bitsel
=(1, 11), suffix
=None,
717 # XXX problem with sub-decoders (can only handle one),
718 # sort this another time
719 #m19.append(Subdecoder(pattern=19, opcodes=get_csv("minor_19_00000.csv"),
720 # opint=True, bitsel=(1, 6), suffix=None,
726 Subdecoder(pattern
=30, opcodes
=get_csv("minor_30.csv"),
727 opint
=True, bitsel
=(1, 5), suffix
=None, subdecoders
=[]),
728 Subdecoder(pattern
=31, opcodes
=get_csv("minor_31.csv"),
729 opint
=True, bitsel
=(1, 11), suffix
=0b00101, subdecoders
=[]),
730 Subdecoder(pattern
=58, opcodes
=get_csv("minor_58.csv"),
731 opint
=True, bitsel
=(0, 2), suffix
=None, subdecoders
=[]),
732 Subdecoder(pattern
=62, opcodes
=get_csv("minor_62.csv"),
733 opint
=True, bitsel
=(0, 2), suffix
=None, subdecoders
=[]),
734 Subdecoder(pattern
=22, opcodes
=get_csv("minor_22.csv"),
735 opint
=True, bitsel
=(1, 5), suffix
=None, subdecoders
=[]),
738 # FP 63L/H decoders. TODO: move mffsfamily to separate subdecoder
741 Subdecoder(pattern
=63, opcodes
=get_csv("minor_63.csv"),
742 opint
=False, bitsel
=(1, 11), suffix
=None,
746 Subdecoder(pattern
=59, opcodes
=get_csv("minor_59.csv"),
747 opint
=False, bitsel
=(1, 11), suffix
=None,
751 # top level: extra merged with major
753 opcodes
= get_csv("major.csv")
754 dec
.append(Subdecoder(pattern
=None, opint
=True, opcodes
=opcodes
,
755 bitsel
=(26, 32), suffix
=None, subdecoders
=pminor
))
756 opcodes
= get_csv("extra.csv")
757 dec
.append(Subdecoder(pattern
=None, opint
=False, opcodes
=opcodes
,
758 bitsel
=(0, 32), suffix
=None, subdecoders
=[]))
760 return TopPowerDecoder(32, dec
, name
=name
, col_subset
=col_subset
,
761 row_subset
=row_subset
,
762 conditions
=conditions
)
765 if __name__
== '__main__':
770 def rowsubsetfn(opcode
, row
):
771 log("row_subset", opcode
, row
)
772 return row
['unit'] in ['LDST', 'FPU']
774 conditions
= {'SVP64BREV': Signal(name
="svp64brev", reset_less
=True),
775 'SVP64FFT': Signal(name
="svp64fft", reset_less
=True),
777 pdecode
= create_pdecode(name
="rowsub",
778 col_subset
={'opcode', 'function_unit',
779 'in2_sel', 'in3_sel'},
780 row_subset
=rowsubsetfn
,
782 conditions
=conditions
)
783 vl
= rtlil
.convert(pdecode
, ports
=pdecode
.ports())
784 with
open("row_subset_decoder.il", "w") as f
:
787 vl
= verilog
.convert(pdecode
, ports
=pdecode
.ports())
788 with
open("row_subset_decoder.v", "w") as f
:
795 pdecode
= create_pdecode(name
="fusubset", col_subset
={'function_unit'})
796 vl
= rtlil
.convert(pdecode
, ports
=pdecode
.ports())
797 with
open("col_subset_decoder.il", "w") as f
:
801 pdecode
= create_pdecode(include_fp
=True)
802 vl
= rtlil
.convert(pdecode
, ports
=pdecode
.ports())
803 with
open("decoder.il", "w") as f
:
807 pdecode
= create_pdecode_svp64_ldst(include_fp
=True)
808 vl
= rtlil
.convert(pdecode
, ports
=pdecode
.ports())
809 with
open("decoder_svp64.il", "w") as f
: