e89060f59cc8d43d7b6959d9ee867c992459ebae
1 """Cascading Power ISA Decoder
5 # Copyright (C) 2020 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
6 # Copyright (C) 2020 Michael Nolan <mtnolan2640@gmail.com>
8 This module uses CSV tables in a hierarchical/peer cascading fashion,
9 to create a multi-level instruction decoder by recognising appropriate
10 patterns. The output is a wide, flattened (1-level) series of bitfields,
11 suitable for a simple RISC engine.
13 This is based on Anton Blanchard's excellent microwatt work:
14 https://github.com/antonblanchard/microwatt/blob/master/decode1.vhdl
16 The basic principle is that the python code does the heavy lifting
17 (reading the CSV files, constructing the hierarchy), creating the HDL
18 AST with for-loops generating switch-case statements.
20 Where "normal" HDL would do this, in laborious excruciating detail:
22 switch (opcode & major_mask_bits):
23 case opcode_2: decode_opcode_2()
25 switch (opcode & minor_19_mask_bits)
26 case minor_opcode_19_operation_X:
27 case minor_opcode_19_operation_y:
29 we take *full* advantage of the decoupling between python and the
30 nmigen AST data structure, to do this:
32 with m.Switch(opcode & self.mask):
33 for case_bitmask in subcases:
34 with m.If(opcode & case_bitmask): {do_something}
36 this includes specifying the information sufficient to perform subdecoding.
40 the full hierarchical tree for decoding POWER9 is specified here
41 subsetting is possible by specifying col_subset (row_subset TODO)
45 takes a *list* of CSV files with an associated bit-range that it
46 is requested to match against the "opcode" row of the CSV file.
47 This pattern can be either an integer, a binary number, *or* a
48 wildcard nmigen Case pattern of the form "001--1-100".
52 these are *additional* cases with further decoding. The "pattern"
53 argument is specified as one of the Case statements (a peer of the
54 opcode row in the CSV file), and thus further fields of the opcode
55 may be decoded giving increasing levels of detail.
59 [ (extra.csv: bit-fields entire 32-bit range
61 000000---------------01000000000 -> ILLEGAL instruction
62 01100000000000000000000000000000 -> SIM_CONFIG instruction
63 ................................ ->
65 (major.csv: first 6 bits ONLY
67 001100 -> ALU,OP_ADD (add)
68 001101 -> ALU,OP_ADD (another type of add)
72 001011 this must match *MAJOR*.CSV
73 [ (minor_19.csv: bits 21 through 30 inclusive:
75 0b0000000000 -> ALU,OP_MCRF
78 (minor_19_00000.csv: bits 21 through 25 inclusive:
80 0b00010 -> ALU,add_pcis
90 from collections
import namedtuple
91 from nmigen
import Module
, Elaboratable
, Signal
, Cat
, Mux
92 from nmigen
.cli
import rtlil
93 from openpower
.decoder
.power_enums
import (Function
, Form
, MicrOp
,
94 In1Sel
, In2Sel
, In3Sel
, OutSel
,
95 SVEXTRA
, SVEtype
, SVPtype
, # Simple-V
96 RC
, LdstLen
, LDSTMode
, CryIn
,
97 single_bit_flags
, CRInSel
,
98 CROutSel
, get_signal_name
,
99 default_values
, insns
, asmidx
)
100 from openpower
.decoder
.power_fields
import DecodeFields
101 from openpower
.decoder
.power_fieldsn
import SigDecode
, SignalBitRange
102 from openpower
.decoder
.power_svp64
import SVP64RM
104 # key data structure in which the POWER decoder is specified,
105 # in a hierarchical fashion
106 Subdecoder
= namedtuple( # fix autoformatter
108 ["pattern", # the major pattern to search for (e.g. major opcode)
109 "opcodes", # a dictionary of minor patterns to find
110 "opint", # true => the pattern must not be in "10----11" format
111 # the bits (as a range) against which "pattern" matches
113 "suffix", # shift the opcode down before decoding
114 "subdecoders" # list of further subdecoders for *additional* matches,
115 # *ONLY* after "pattern" has *ALSO* been matched against.
118 power_op_types
= {'function_unit': Function
,
119 'internal_op': MicrOp
,
136 'sv_cr_out': SVEXTRA
,
143 power_op_csvmap
= {'function_unit': 'unit',
145 'internal_op': 'internal op',
154 'sv_out2': 'sv_out2',
155 'sv_cr_in': 'sv_cr_in',
156 'sv_cr_out': 'sv_cr_out',
157 'SV_Etype': 'SV_Etype',
158 'SV_Ptype': 'SV_Ptype',
161 'ldst_len': 'ldst len',
168 def get_pname(field
, pname
):
171 return "%s_%s" % (pname
, field
)
175 """PowerOp - a dynamic class that stores (subsets of) CSV rows of data
176 about a PowerISA instruction. this is a "micro-code" expanded format
177 which generates an awful lot of wires, hence the subsetting
180 def __init__(self
, incl_asm
=True, name
=None, subset
=None):
184 for field
, ptype
in power_op_types
.items():
186 if subset
and field
not in subset
:
188 fname
= get_pname(field
, name
)
189 setattr(self
, field
, Signal(ptype
, reset_less
=True, name
=fname
))
190 debug_report
.add(field
)
191 for bit
in single_bit_flags
:
192 field
= get_signal_name(bit
)
194 if subset
and field
not in subset
:
196 debug_report
.add(field
)
197 fname
= get_pname(field
, name
)
198 setattr(self
, field
, Signal(reset_less
=True, name
=fname
))
199 # comment out, bit too high debug level
200 #print("PowerOp debug", name, debug_report)
201 #print(" fields", fields)
203 def _eq(self
, row
=None):
206 # TODO: this conversion process from a dict to an object
207 # should really be done using e.g. namedtuple and then
209 if False: # debugging
210 if row
['CR in'] == '1':
214 if row
['CR out'] == '0':
219 ldst_mode
= row
['upd']
220 if ldst_mode
.isdigit():
221 row
['upd'] = int(ldst_mode
)
223 for field
, ptype
in power_op_types
.items():
224 if not hasattr(self
, field
):
226 if field
not in power_op_csvmap
:
228 csvname
= power_op_csvmap
[field
]
229 #print(field, ptype, csvname, row)
231 if csvname
== 'upd' and isinstance(val
, int): # LDSTMode different
235 res
.append(getattr(self
, field
).eq(val
))
238 asmcode
= row
['comment']
239 if hasattr(self
, "asmcode") and asmcode
in asmidx
:
240 res
.append(self
.asmcode
.eq(asmidx
[asmcode
]))
241 for bit
in single_bit_flags
:
242 field
= get_signal_name(bit
)
243 if not hasattr(self
, field
):
245 sig
= getattr(self
, field
)
246 res
.append(sig
.eq(int(row
.get(bit
, 0))))
249 def _get_eq(self
, res
, field
, otherop
):
250 copyfrom
= getattr(otherop
, field
, None)
251 copyto
= getattr(self
, field
, None)
252 if copyfrom
is not None and copyto
is not None:
253 res
.append(copyto
.eq(copyfrom
))
255 def eq(self
, otherop
):
257 for field
in power_op_types
.keys():
258 self
._get
_eq
(res
, field
, otherop
)
259 for bit
in single_bit_flags
:
260 self
._get
_eq
(res
, get_signal_name(bit
), otherop
)
265 for field
in power_op_types
.keys():
266 if hasattr(self
, field
):
267 res
.append(getattr(self
, field
))
268 if hasattr(self
, "asmcode"):
269 res
.append(self
.asmcode
)
270 for field
in single_bit_flags
:
271 field
= get_signal_name(field
)
272 if hasattr(self
, field
):
273 res
.append(getattr(self
, field
))
277 class PowerDecoder(Elaboratable
):
278 """PowerDecoder - decodes an incoming opcode into the type of operation
280 this is a recursive algorithm, creating Switch statements that can
281 have further match-and-decode on other parts of the opcode field before
282 finally landing at a "this CSV entry details gets returned" thing.
284 the complicating factor is the row and col subsetting. column subsetting
285 dynamically chooses only the CSV columns requested, whilst row subsetting
286 allows a function to be called on the row to determine if the Case
287 statement is to be generated for that row. this not only generates
288 completely different Decoders, it also means that some sub-decoders
289 will turn up blank (empty switch statements). if that happens we do
290 not want the parent to include a Mux for an entirely blank switch statement
291 so we have to store the switch/case statements in a tree, and
294 the reason for the tree is because elaborate can only be called *after*
295 the constructor is called. all quite messy.
298 def __init__(self
, width
, dec
, name
=None, col_subset
=None, row_subset
=None):
299 self
.actually_does_something
= False
301 self
.col_subset
= col_subset
302 self
.row_subsetfn
= row_subset
303 if not isinstance(dec
, list):
306 self
.opcode_in
= Signal(width
, reset_less
=True)
308 self
.op
= PowerOp(name
=name
, subset
=col_subset
)
310 if d
.suffix
is not None and d
.suffix
>= width
:
314 def suffix_mask(self
, d
):
315 return ((1 << d
.suffix
) - 1)
317 def divide_opcodes(self
, d
):
319 mask
= self
.suffix_mask(d
)
320 #print("mask", hex(mask))
321 for row
in d
.opcodes
:
322 opcode
= row
['opcode']
323 if d
.opint
and '-' not in opcode
:
324 opcode
= int(opcode
, 0)
326 opcode
= opcode
>> d
.suffix
327 if key
not in divided
:
331 divided
[key
].append(r
)
334 def tree_analyse(self
):
335 self
.decs
= decs
= []
336 self
.submodules
= submodules
= {}
339 # go through the list of CSV decoders first
342 opcode_switch
= Signal(d
.bitsel
[1] - d
.bitsel
[0],
345 case_does_something
= False
346 eq
.append(opcode_switch
.eq(
347 self
.opcode_in
[d
.bitsel
[0]:d
.bitsel
[1]]))
349 opcodes
= self
.divide_opcodes(d
)
350 opc_in
= Signal(d
.suffix
, reset_less
=True)
351 eq
.append(opc_in
.eq(opcode_switch
[:d
.suffix
]))
352 # begin the dynamic Switch statement here
354 cases
.append([opc_in
, switch_case
])
356 for key
, row
in opcodes
.items():
357 bitsel
= (d
.suffix
+d
.bitsel
[0], d
.bitsel
[1])
358 sd
= Subdecoder(pattern
=None, opcodes
=row
,
359 bitsel
=bitsel
, suffix
=None,
360 opint
=False, subdecoders
=[])
361 mname
= get_pname("dec_sub%d" % key
, self
.pname
)
362 subdecoder
= PowerDecoder(width
=32, dec
=sd
,
364 col_subset
=self
.col_subset
,
365 row_subset
=self
.row_subsetfn
)
366 if not subdecoder
.tree_analyse():
369 submodules
[mname
] = subdecoder
370 sub_eqs
.append(subdecoder
.opcode_in
.eq(self
.opcode_in
))
371 # add in the dynamic Case statement here
372 switch_case
[key
] = self
.op
.eq(subdecoder
.op
)
373 self
.actually_does_something
= True
374 case_does_something
= True
375 if case_does_something
:
378 # TODO: arguments, here (all of them) need to be a list.
379 # a for-loop around the *list* of decoder args.
381 cases
.append([opcode_switch
, switch_case
])
382 seqs
= self
.handle_subdecoders(switch_case
, submodules
, d
)
384 case_does_something
= True
386 for row
in d
.opcodes
:
387 opcode
= row
['opcode']
388 if d
.opint
and '-' not in opcode
:
389 opcode
= int(opcode
, 0)
392 if self
.row_subsetfn
:
393 if not self
.row_subsetfn(opcode
, row
):
395 # add in the dynamic Case statement here
396 switch_case
[opcode
] = self
.op
._eq
(row
)
397 self
.actually_does_something
= True
398 case_does_something
= True
402 if case_does_something
:
404 #print("submodule eqs", self.pname, eq)
406 #print("submodules", self.pname, submodules)
409 return self
.actually_does_something
411 def handle_subdecoders(self
, switch_case
, submodules
, d
):
413 for dec
in d
.subdecoders
:
414 if isinstance(dec
, list): # XXX HACK: take first pattern
416 #print("subdec", dec.pattern, self.pname)
417 mname
= get_pname("dec%d" % dec
.pattern
, self
.pname
)
418 subdecoder
= PowerDecoder(self
.width
, dec
,
420 col_subset
=self
.col_subset
,
421 row_subset
=self
.row_subsetfn
)
422 if not subdecoder
.tree_analyse(): # doesn't do anything
425 submodules
[mname
] = subdecoder
426 eqs
.append(subdecoder
.opcode_in
.eq(self
.opcode_in
))
427 switch_case
[dec
.pattern
] = self
.op
.eq(subdecoder
.op
)
428 self
.actually_does_something
= True
432 def elaborate(self
, platform
):
433 #print("decoder elaborate", self.pname, self.submodules)
439 for mname
, subdecoder
in self
.submodules
.items():
440 setattr(m
.submodules
, mname
, subdecoder
)
442 for switch_case
in self
.decs
:
443 for (switch
, cases
) in switch_case
:
444 with m
.Switch(switch
):
445 for key
, eqs
in cases
.items():
451 return [self
.opcode_in
] + self
.op
.ports()
454 class TopPowerDecoder(PowerDecoder
):
457 top-level hierarchical decoder for POWER ISA
458 bigendian dynamically switches between big and little endian decoding
459 (reverses byte order). See V3.0B p44 1.11.2
462 def __init__(self
, width
, dec
, name
=None, col_subset
=None, row_subset
=None):
463 PowerDecoder
.__init
__(self
, width
, dec
, name
, col_subset
, row_subset
)
464 self
.fields
= df
= DecodeFields(SignalBitRange
, [self
.opcode_in
])
465 self
.fields
.create_specs()
466 self
.raw_opcode_in
= Signal
.like(self
.opcode_in
, reset_less
=True)
467 self
.bigendian
= Signal(reset_less
=True)
469 for fname
, value
in self
.fields
.common_fields
.items():
470 signame
= get_pname(fname
, name
)
471 sig
= Signal(value
[0:-1].shape(), reset_less
=True, name
=signame
)
472 setattr(self
, fname
, sig
)
474 # create signals for all field forms
475 forms
= self
.form_names
478 fields
= self
.fields
.instrs
[form
]
480 Fields
= namedtuple("Fields", fk
)
482 for k
, value
in fields
.items():
483 fname
= "%s_%s" % (form
, k
)
484 sig
= Signal(value
[0:-1].shape(), reset_less
=True, name
=fname
)
487 setattr(self
, "Form%s" % form
, instr
)
488 self
.sigforms
[form
] = instr
493 def form_names(self
):
494 return self
.fields
.instrs
.keys()
496 def elaborate(self
, platform
):
497 m
= PowerDecoder
.elaborate(self
, platform
)
499 # sigh duplicated in SVP64PowerDecoder
500 # raw opcode in assumed to be in LE order: byte-reverse it to get BE
501 raw_le
= self
.raw_opcode_in
503 for i
in range(0, self
.width
, 8):
504 l
.append(raw_le
[i
:i
+8])
507 comb
+= self
.opcode_in
.eq(Mux(self
.bigendian
, raw_be
, raw_le
))
509 # add all signal from commonly-used fields
510 for fname
, value
in self
.fields
.common_fields
.items():
511 sig
= getattr(self
, fname
)
512 comb
+= sig
.eq(value
[0:-1])
514 # link signals for all field forms
515 forms
= self
.form_names
517 sf
= self
.sigforms
[form
]
518 fields
= self
.fields
.instrs
[form
]
519 for k
, value
in fields
.items():
521 comb
+= sig
.eq(value
[0:-1])
526 return [self
.raw_opcode_in
, self
.bigendian
] + PowerDecoder
.ports(self
)
529 ####################################################
530 # PRIMARY FUNCTION SPECIFYING THE FULL POWER DECODER
532 def create_pdecode(name
=None, col_subset
=None, row_subset
=None):
533 """create_pdecode - creates a cascading hierarchical POWER ISA decoder
535 subsetting of the PowerOp decoding is possible by setting col_subset
538 # some alteration to the CSV files is required for SV so we use
541 get_csv
= isa
.get_svp64_csv
543 # minor 19 has extra patterns
545 m19
.append(Subdecoder(pattern
=19, opcodes
=get_csv("minor_19.csv"),
546 opint
=True, bitsel
=(1, 11), suffix
=None,
548 m19
.append(Subdecoder(pattern
=19, opcodes
=get_csv("minor_19_00000.csv"),
549 opint
=True, bitsel
=(1, 6), suffix
=None,
555 Subdecoder(pattern
=30, opcodes
=get_csv("minor_30.csv"),
556 opint
=True, bitsel
=(1, 5), suffix
=None, subdecoders
=[]),
557 Subdecoder(pattern
=31, opcodes
=get_csv("minor_31.csv"),
558 opint
=True, bitsel
=(1, 11), suffix
=0b00101, subdecoders
=[]),
559 Subdecoder(pattern
=58, opcodes
=get_csv("minor_58.csv"),
560 opint
=True, bitsel
=(0, 2), suffix
=None, subdecoders
=[]),
561 Subdecoder(pattern
=62, opcodes
=get_csv("minor_62.csv"),
562 opint
=True, bitsel
=(0, 2), suffix
=None, subdecoders
=[]),
563 Subdecoder(pattern
=22, opcodes
=get_csv("minor_22.csv"),
564 opint
=True, bitsel
=(1, 5), suffix
=None, subdecoders
=[]),
567 # top level: extra merged with major
569 opcodes
= get_csv("major.csv")
570 dec
.append(Subdecoder(pattern
=None, opint
=True, opcodes
=opcodes
,
571 bitsel
=(26, 32), suffix
=None, subdecoders
=pminor
))
572 opcodes
= get_csv("extra.csv")
573 dec
.append(Subdecoder(pattern
=None, opint
=False, opcodes
=opcodes
,
574 bitsel
=(0, 32), suffix
=None, subdecoders
=[]))
576 return TopPowerDecoder(32, dec
, name
=name
, col_subset
=col_subset
,
577 row_subset
=row_subset
)
580 if __name__
== '__main__':
585 def rowsubsetfn(opcode
, row
):
586 print("row_subset", opcode
, row
)
587 return row
['unit'] == 'ALU'
589 pdecode
= create_pdecode(name
="rowsub",
590 col_subset
={'function_unit', 'in1_sel'},
591 row_subset
=rowsubsetfn
)
592 vl
= rtlil
.convert(pdecode
, ports
=pdecode
.ports())
593 with
open("row_subset_decoder.il", "w") as f
:
598 pdecode
= create_pdecode(name
="fusubset", col_subset
={'function_unit'})
599 vl
= rtlil
.convert(pdecode
, ports
=pdecode
.ports())
600 with
open("col_subset_decoder.il", "w") as f
:
605 pdecode
= create_pdecode()
606 vl
= rtlil
.convert(pdecode
, ports
=pdecode
.ports())
607 with
open("decoder.il", "w") as f
: