pysvp64db: fix traversal
[openpower-isa.git] / src / openpower / decoder / power_decoder.py
1 # SPDX-License-Identifier: LGPL-3-or-later
2 """Cascading Power ISA Decoder
3
4 License: LGPLv3+
5
6 # Copyright (C) 2020 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
7 # Copyright (C) 2020 Michael Nolan <mtnolan2640@gmail.com>
8
9 This module uses CSV tables in a hierarchical/peer cascading fashion,
10 to create a multi-level instruction decoder by recognising appropriate
11 patterns. The output is a wide, flattened (1-level) series of bitfields,
12 suitable for a simple RISC engine.
13
14 This is based on Anton Blanchard's excellent microwatt work:
15 https://github.com/antonblanchard/microwatt/blob/master/decode1.vhdl
16
17 The basic principle is that the python code does the heavy lifting
18 (reading the CSV files, constructing the hierarchy), creating the HDL
19 AST with for-loops generating switch-case statements.
20
21 Where "normal" HDL would do this, in laborious excruciating detail:
22
23 switch (opcode & major_mask_bits):
24 case opcode_2: decode_opcode_2()
25 case opcode_19:
26 switch (opcode & minor_19_mask_bits)
27 case minor_opcode_19_operation_X:
28 case minor_opcode_19_operation_y:
29
30 we take *full* advantage of the decoupling between python and the
31 nmigen AST data structure, to do this:
32
33 with m.Switch(opcode & self.mask):
34 for case_bitmask in subcases:
35 with m.If(opcode & case_bitmask): {do_something}
36
37 this includes specifying the information sufficient to perform subdecoding.
38
39 create_pdecode()
40
41 the full hierarchical tree for decoding POWER9 is specified here
42 subsetting is possible by specifying col_subset (row_subset TODO)
43
44 PowerDecoder
45
46 takes a *list* of CSV files with an associated bit-range that it
47 is requested to match against the "opcode" row of the CSV file.
48 This pattern can be either an integer, a binary number, *or* a
49 wildcard nmigen Case pattern of the form "001--1-100".
50
51 Subdecoders
52
53 these are *additional* cases with further decoding. The "pattern"
54 argument is specified as one of the Case statements (a peer of the
55 opcode row in the CSV file), and thus further fields of the opcode
56 may be decoded giving increasing levels of detail.
57
58 Top Level:
59
60 [ (extra.csv: bit-fields entire 32-bit range
61 opcode -> matches
62 000000---------------01000000000 -> ILLEGAL instruction
63 01100000000000000000000000000000 -> SIM_CONFIG instruction
64 ................................ ->
65 ),
66 (major.csv: first 6 bits ONLY
67 opcode -> matches
68 001100 -> ALU,OP_ADD (add)
69 001101 -> ALU,OP_ADD (another type of add)
70 ...... -> ...
71 ...... -> ...
72 subdecoders:
73 001011 this must match *MAJOR*.CSV
74 [ (minor_19.csv: bits 21 through 30 inclusive:
75 opcode -> matches
76 0b0000000000 -> ALU,OP_MCRF
77 ............ -> ....
78 ),
79 (minor_19_00000.csv: bits 21 through 25 inclusive:
80 opcode -> matches
81 0b00010 -> ALU,add_pcis
82 )
83 ]
84 ),
85 ]
86
87
88 """
89
90 import gc
91 from collections import namedtuple, OrderedDict
92 from nmigen import Module, Elaboratable, Signal, Cat, Mux, Const
93 from nmigen.cli import rtlil, verilog
94 from openpower.decoder.power_enums import (Function, Form, MicrOp,
95 In1Sel, In2Sel, In3Sel, OutSel,
96 SVEXTRA, SVMode, # Simple-V
97 SVEType, SVPType, # Simple-V
98 RCOE, LdstLen, LDSTMode, CryIn,
99 single_bit_flags, CRInSel,
100 CROutSel, get_signal_name,
101 default_values, insns, asmidx,
102 asmlen)
103 from openpower.decoder.power_fields import DecodeFields
104 from openpower.decoder.power_fieldsn import SigDecode, SignalBitRange
105 from openpower.decoder.power_svp64 import SVP64RM
106
107 from openpower.util import log
108
109 # key data structure in which the POWER decoder is specified,
110 # in a hierarchical fashion
111 Subdecoder = namedtuple( # fix autoformatter
112 "Subdecoder",
113 ["pattern", # the major pattern to search for (e.g. major opcode)
114 "opcodes", # a dictionary of minor patterns to find
115 "opint", # true => the pattern must not be in "10----11" format
116 # the bits (as a range) against which "pattern" matches
117 "bitsel", # should be in MSB0 order but isn't! it's LSB0. um.
118 "suffix", # shift the opcode down before decoding
119 "subdecoders" # list of further subdecoders for *additional* matches,
120 # *ONLY* after "pattern" has *ALSO* been matched against.
121 ])
122
123 power_op_types = {'function_unit': Function,
124 'internal_op': MicrOp,
125 'form': Form,
126 'asmcode': asmlen,
127 'SV_Etype': SVEType,
128 'SV_Ptype': SVPType,
129 'SV_mode': SVMode,
130 'in1_sel': In1Sel,
131 'in2_sel': In2Sel,
132 'in3_sel': In3Sel,
133 'out_sel': OutSel,
134 'cr_in': CRInSel,
135 'cr_out': CROutSel,
136 'sv_in1': SVEXTRA,
137 'sv_in2': SVEXTRA,
138 'sv_in3': SVEXTRA,
139 'sv_out': SVEXTRA,
140 'sv_out2': SVEXTRA,
141 'sv_cr_in': SVEXTRA,
142 'sv_cr_out': SVEXTRA,
143 'ldst_len': LdstLen,
144 'upd': LDSTMode,
145 'rc_sel': RCOE,
146 'cry_in': CryIn
147 }
148
149 power_op_csvmap = {'function_unit': 'unit',
150 'form': 'form',
151 'internal_op': 'internal op',
152 'in1_sel': 'in1',
153 'in2_sel': 'in2',
154 'in3_sel': 'in3',
155 'out_sel': 'out',
156 'sv_in1': 'sv_in1',
157 'sv_in2': 'sv_in2',
158 'sv_in3': 'sv_in3',
159 'sv_out': 'sv_out',
160 'sv_out2': 'sv_out2',
161 'sv_cr_in': 'sv_cr_in',
162 'sv_cr_out': 'sv_cr_out',
163 'SV_Etype': 'SV_Etype',
164 'SV_Ptype': 'SV_Ptype',
165 'SV_mode': 'SV_mode',
166 'cr_in': 'CR in',
167 'cr_out': 'CR out',
168 'ldst_len': 'ldst len',
169 'upd': 'upd',
170 'rsrv': 'rsrv', # atomic operation
171 'rc_sel': 'rc',
172 'cry_in': 'cry in',
173 }
174
175
176 def get_pname(field, pname):
177 if pname is None:
178 return field
179 return "%s_%s" % (pname, field)
180
181
182 class PowerOp:
183 """PowerOp - a dynamic class that stores (subsets of) CSV rows of data
184 about a PowerISA instruction. this is a "micro-code" expanded format
185 which generates an awful lot of wires, hence the subsetting
186 """
187
188 def __init__(self, incl_asm=True, name=None, subset=None, fields=None):
189 self.name = name
190 self.subset = subset
191 if fields is not None:
192 for k, v in fields.items():
193 setattr(self, k, v)
194 return
195 debug_report = set()
196 fields = set()
197 for field, ptype in power_op_types.items():
198 fields.add(field)
199 if subset and field not in subset:
200 continue
201 fname = get_pname(field, name)
202 setattr(self, field, Signal(ptype, reset_less=True, name=fname))
203 debug_report.add(field)
204 for bit in single_bit_flags:
205 field = get_signal_name(bit)
206 fields.add(field)
207 if subset and field not in subset:
208 continue
209 debug_report.add(field)
210 fname = get_pname(field, name)
211 setattr(self, field, Signal(reset_less=True, name=fname))
212 self._fields = fields
213 # comment out, bit too high debug level
214 #log("PowerOp debug", name, debug_report)
215 #log(" fields", fields)
216
217 @staticmethod
218 def like(other):
219 """PowerOp.like: creates a duplicate of a given PowerOp instance
220 """
221 fields = {}
222 for fname in other._fields:
223 sig = getattr(other, fname, None)
224 if sig is not None:
225 fields[fname] = sig.__class__.like(sig)
226 return PowerOp(subset=other.subset, fields=fields)
227
228 def _eq(self, row=None):
229 if row is None:
230 row = default_values
231 # TODO: this conversion process from a dict to an object
232 # should really be done using e.g. namedtuple and then
233 # call eq not _eq
234 if False: # debugging
235 if row['CR in'] == '1':
236 import pdb
237 pdb.set_trace()
238 log(row)
239 if row['CR out'] == '0':
240 import pdb
241 pdb.set_trace()
242 log(row)
243 log(row)
244 ldst_mode = row['upd']
245 if ldst_mode.isdigit():
246 row['upd'] = int(ldst_mode)
247 res = []
248 for field, ptype in power_op_types.items():
249 if not hasattr(self, field):
250 continue
251 if field not in power_op_csvmap:
252 continue
253 csvname = power_op_csvmap[field]
254 log("_eq", field, ptype, csvname, row)
255 val = row[csvname]
256 if csvname == 'upd' and isinstance(val, int): # LDSTMode different
257 val = ptype(val)
258 else:
259 val = ptype[val]
260 res.append(getattr(self, field).eq(val))
261 if False:
262 log(row.keys())
263 asmcode = row['comment']
264 # process the comment field, strip out "equals" for FP
265 if "=" in asmcode:
266 asmcode = asmcode.split("=")[-1]
267 log("asmcode stripping =", asmcode,
268 asmcode in asmidx, hasattr(self, "asmcode"))
269 if hasattr(self, "asmcode") and asmcode in asmidx:
270 res.append(self.asmcode.eq(asmidx[asmcode]))
271 for bit in single_bit_flags:
272 field = get_signal_name(bit)
273 if not hasattr(self, field):
274 continue
275 sig = getattr(self, field)
276 res.append(sig.eq(int(row.get(bit, 0))))
277 return res
278
279 def _get_eq(self, res, field, otherop):
280 copyfrom = getattr(otherop, field, None)
281 copyto = getattr(self, field, None)
282 if copyfrom is not None and copyto is not None:
283 res.append(copyto.eq(copyfrom))
284
285 def eq(self, otherop):
286 res = []
287 for field in power_op_types.keys():
288 self._get_eq(res, field, otherop)
289 for bit in single_bit_flags:
290 self._get_eq(res, get_signal_name(bit), otherop)
291 return res
292
293 def ports(self):
294 res = []
295 for field in power_op_types.keys():
296 if hasattr(self, field):
297 res.append(getattr(self, field))
298 if hasattr(self, "asmcode"):
299 res.append(self.asmcode)
300 for field in single_bit_flags:
301 field = get_signal_name(field)
302 if hasattr(self, field):
303 res.append(getattr(self, field))
304 return res
305
306
307 class PowerDecoder(Elaboratable):
308 """PowerDecoder - decodes an incoming opcode into the type of operation
309
310 this is a recursive algorithm, creating Switch statements that can
311 have further match-and-decode on other parts of the opcode field before
312 finally landing at a "this CSV entry details gets returned" thing.
313
314 the complicating factor is the row and col subsetting. column subsetting
315 dynamically chooses only the CSV columns requested, whilst row subsetting
316 allows a function to be called on the row to determine if the Case
317 statement is to be generated for that row. this not only generates
318 completely different Decoders, it also means that some sub-decoders
319 will turn up blank (empty switch statements). if that happens we do
320 not want the parent to include a Mux for an entirely blank switch statement
321 so we have to store the switch/case statements in a tree, and
322 post-analyse it.
323
324 the reason for the tree is because elaborate can only be called *after*
325 the constructor is called. all quite messy.
326 """
327
328 def __init__(self, width, dec, name=None, col_subset=None,
329 row_subset=None, conditions=None):
330 if conditions is None:
331 # XXX conditions = {}
332 conditions = {
333 'SVP64FFT': Const(0, 1),
334 }
335 self.actually_does_something = False
336 self.pname = name
337 self.conditions = conditions
338 self.col_subset = col_subset
339 self.row_subsetfn = row_subset
340 if not isinstance(dec, list):
341 dec = [dec]
342 self.dec = dec
343 self.opcode_in = Signal(width, reset_less=True)
344
345 self.op = PowerOp(name=name, subset=col_subset)
346 for d in dec:
347 if d.suffix is not None and d.suffix >= width:
348 d.suffix = None
349
350 self.width = width
351
352 # create some case statement condition patterns for matching
353 # a single condition. "1----" for the first condition,
354 # "-1----" for the 2nd etc.
355 # also create a matching ordered list of conditions, for the switch,
356 # which will Cat() them together
357 self.ccases = {}
358 self.ckeys = list(conditions.keys())
359 self.ckeys.sort()
360
361 def find_conditions(self, opcodes):
362 # look for conditions, create dictionary entries for them
363 # sorted by opcode
364 rows = OrderedDict() # start as a dictionary, get as list (after)
365 for row in opcodes:
366 condition = row['CONDITIONS']
367 opcode = row['opcode']
368 if condition:
369 # check it's expected
370 assert (condition in self.conditions or
371 (condition[0] == '~' and
372 condition[1:] in self.conditions)), \
373 "condition %s not in %s" % (condition, str(conditions))
374 if opcode not in rows:
375 rows[opcode] = {}
376 rows[opcode][condition] = row
377 else:
378 # check it's unique
379 assert opcode not in rows, \
380 "opcode %s already in rows for %s" % \
381 (opcode, self.pname)
382 rows[opcode] = row
383 # after checking for conditions, get just the values (ordered)
384 return list(rows.values())
385
386 def suffix_mask(self, d):
387 return ((1 << d.suffix) - 1)
388
389 def divide_opcodes(self, d):
390 divided = {}
391 mask = self.suffix_mask(d)
392 #log("mask", hex(mask))
393 for row in d.opcodes:
394 opcode = row['opcode']
395 if d.opint and '-' not in opcode:
396 opcode = int(opcode, 0)
397 key = opcode & mask
398 opcode = opcode >> d.suffix
399 if key not in divided:
400 divided[key] = []
401 r = row.copy()
402 r['opcode'] = opcode
403 divided[key].append(r)
404 return divided
405
406 def tree_analyse(self):
407 self.decs = decs = []
408 self.submodules = submodules = {}
409 self.eqs = eqs = []
410
411 # go through the list of CSV decoders first
412 for d in self.dec:
413 cases = []
414 opcode_switch = Signal(d.bitsel[1] - d.bitsel[0],
415 reset_less=True)
416 eq = []
417 case_does_something = False
418 look_for = self.opcode_in[d.bitsel[0]:d.bitsel[1]]
419 eq.append(opcode_switch.eq(look_for))
420 if d.suffix:
421 opcodes = self.divide_opcodes(d)
422 # TODO opcodes = self.find_conditions(opcodes)
423 opc_in = Signal(d.suffix, reset_less=True)
424 eq.append(opc_in.eq(opcode_switch[:d.suffix]))
425 # begin the dynamic Switch statement here
426 switch_case = {}
427 cases.append([opc_in, switch_case])
428 sub_eqs = []
429 for key, row in opcodes.items():
430 bitsel = (d.suffix+d.bitsel[0], d.bitsel[1])
431 sd = Subdecoder(pattern=None, opcodes=row,
432 bitsel=bitsel, suffix=None,
433 opint=False, subdecoders=[])
434 mname = get_pname("dec_sub%d" % key, self.pname)
435 subdecoder = PowerDecoder(width=32, dec=sd,
436 name=mname,
437 col_subset=self.col_subset,
438 row_subset=self.row_subsetfn,
439 conditions=self.conditions)
440 if not subdecoder.tree_analyse():
441 del subdecoder
442 continue
443 submodules[mname] = subdecoder
444 sub_eqs.append(subdecoder.opcode_in.eq(self.opcode_in))
445 # add in the dynamic Case statement here
446 switch_case[key] = self.op.eq(subdecoder.op)
447 self.actually_does_something = True
448 case_does_something = True
449 if case_does_something:
450 eq += sub_eqs
451 else:
452 # TODO: arguments, here (all of them) need to be a list.
453 # a for-loop around the *list* of decoder args.
454 switch_case = {}
455 cases.append([opcode_switch, switch_case])
456 seqs = self.handle_subdecoders(switch_case, submodules, d)
457 if seqs:
458 case_does_something = True
459 eq += seqs
460 opcodes = self.find_conditions(d.opcodes)
461 for row in opcodes:
462 # urrr this is an awful hack. if "conditions" are active
463 # get the FIRST item (will be the same opcode), and it
464 # had BETTER have the same unit and also pass other
465 # row subset conditions.
466 if 'opcode' not in row: # must be a "CONDITIONS" dict...
467 is_conditions = True
468 _row = row[list(row.keys())[0]]
469 else:
470 is_conditions = False
471 _row = row
472 opcode = _row['opcode']
473 if d.opint and '-' not in opcode:
474 opcode = int(opcode, 0)
475 if not _row['unit']:
476 continue
477 if self.row_subsetfn:
478 if not self.row_subsetfn(opcode, _row):
479 continue
480 # add in the dynamic Case statement here
481 if is_conditions:
482 switch_case[opcode] = {}
483 for k, crow in row.items():
484 # log("ordered", k, crow)
485 switch_case[opcode][k] = self.op._eq(crow)
486 else:
487 switch_case[opcode] = self.op._eq(row)
488 self.actually_does_something = True
489 case_does_something = True
490
491 if cases:
492 decs.append(cases)
493 if case_does_something:
494 eqs += eq
495 #log("submodule eqs", self.pname, eq)
496
497 #log("submodules", self.pname, submodules)
498
499 # GC collection is really slow and shouldn't be needed
500 # gc.collect()
501 return self.actually_does_something
502
503 def handle_subdecoders(self, switch_case, submodules, d):
504 eqs = []
505 for dlist in d.subdecoders:
506 if not isinstance(dlist, list): # XXX HACK: take first pattern
507 dlist = [dlist]
508 for dec in dlist:
509 #log("subdec", dec.pattern, self.pname)
510 mname = get_pname("dec%d" % dec.pattern, self.pname)
511 if mname in submodules:
512 # sigh, HACK...
513 mname += "_1"
514 assert mname not in submodules
515 subdecoder = PowerDecoder(self.width, dec,
516 name=mname,
517 col_subset=self.col_subset,
518 row_subset=self.row_subsetfn,
519 conditions=self.conditions)
520 #log("subdecoder", mname, subdecoder)
521 if not subdecoder.tree_analyse(): # doesn't do anything
522 #log("analysed, DELETING", mname)
523 del subdecoder
524 continue # skip
525 submodules[mname] = subdecoder
526 eqs.append(subdecoder.opcode_in.eq(self.opcode_in))
527 switch_case[dec.pattern] = self.op.eq(subdecoder.op)
528 self.actually_does_something = True
529
530 return eqs
531
532 def elaborate(self, platform):
533 #log("decoder elaborate", self.pname, self.submodules)
534 m = Module()
535 comb = m.d.comb
536
537 comb += self.eqs
538
539 for mname, subdecoder in self.submodules.items():
540 setattr(m.submodules, mname, subdecoder)
541
542 for switch_case in self.decs:
543 for (switch, cases) in switch_case:
544 with m.Switch(switch):
545 for key, eqs in cases.items():
546 with m.Case(key):
547 # "conditions" are a further switch statement
548 if isinstance(eqs, dict):
549 self.condition_switch(m, eqs)
550 else:
551 comb += eqs
552 return m
553
554 def condition_switch(self, m, cases):
555 """against the global list of "conditions", having matched against
556 bits of the opcode, we FINALLY now have to match against some
557 additional "conditions". this is because there can be **MULTIPLE**
558 entries for a given opcode match. here we discern them.
559 """
560 comb = m.d.comb
561 cswitch = []
562 ccases = []
563 for casekey, eqs in cases.items():
564 if casekey.startswith('~'):
565 with m.If(~self.conditions[casekey[1:]]):
566 comb += eqs
567 else:
568 with m.If(self.conditions[casekey]):
569 comb += eqs
570
571 def ports(self):
572 return [self.opcode_in] + self.op.ports()
573
574
575 class TopPowerDecoder(PowerDecoder):
576 """TopPowerDecoder
577
578 top-level hierarchical decoder for POWER ISA
579 bigendian dynamically switches between big and little endian decoding
580 (reverses byte order). See V3.0B p44 1.11.2
581 """
582
583 def __init__(self, width, dec, name=None, col_subset=None,
584 row_subset=None, conditions=None):
585 PowerDecoder.__init__(self, width, dec, name,
586 col_subset, row_subset, conditions)
587 self.fields = df = DecodeFields(SignalBitRange, [self.opcode_in])
588 self.fields.create_specs()
589 self.raw_opcode_in = Signal.like(self.opcode_in, reset_less=True)
590 self.bigendian = Signal(reset_less=True)
591
592 for fname, value in self.fields.common_fields.items():
593 signame = get_pname(fname, name)
594 sig = Signal(value[0:-1].shape(), reset_less=True, name=signame)
595 setattr(self, fname, sig)
596
597 # create signals for all field forms
598 forms = self.form_names
599 self.sigforms = {}
600 for form in forms:
601 fields = self.fields.instrs[form]
602 fk = fields.keys()
603 Fields = namedtuple("Fields", fk)
604 sf = {}
605 for k, value in fields.items():
606 fname = "%s_%s" % (form, k)
607 sig = Signal(value[0:-1].shape(), reset_less=True, name=fname)
608 sf[k] = sig
609 instr = Fields(**sf)
610 setattr(self, "Form%s" % form, instr)
611 self.sigforms[form] = instr
612
613 self.tree_analyse()
614
615 @property
616 def form_names(self):
617 return self.fields.instrs.keys()
618
619 def elaborate(self, platform):
620 m = PowerDecoder.elaborate(self, platform)
621 comb = m.d.comb
622 # sigh duplicated in SVP64PowerDecoder
623 # raw opcode in assumed to be in LE order: byte-reverse it to get BE
624 raw_le = self.raw_opcode_in
625 l = []
626 for i in range(0, self.width, 8):
627 l.append(raw_le[i:i+8])
628 l.reverse()
629 raw_be = Cat(*l)
630 comb += self.opcode_in.eq(Mux(self.bigendian, raw_be, raw_le))
631
632 # add all signal from commonly-used fields
633 for fname, value in self.fields.common_fields.items():
634 sig = getattr(self, fname)
635 comb += sig.eq(value[0:-1])
636
637 # link signals for all field forms
638 forms = self.form_names
639 for form in forms:
640 sf = self.sigforms[form]
641 fields = self.fields.instrs[form]
642 for k, value in fields.items():
643 sig = getattr(sf, k)
644 comb += sig.eq(value[0:-1])
645
646 return m
647
648 def ports(self):
649 res = [self.raw_opcode_in, self.bigendian] + PowerDecoder.ports(self)
650 for condition in self.conditions.values():
651 res.append(condition)
652 return res
653
654
655 #############################################################
656 # PRIMARY FUNCTION SPECIFYING ALTERNATIVE SVP64 POWER DECODER
657
658 def create_pdecode_svp64_ldst(name=None, col_subset=None, row_subset=None,
659 include_fp=False):
660 """create_pdecode - creates a cascading hierarchical POWER ISA decoder
661
662 subsetting of the PowerOp decoding is possible by setting col_subset
663 """
664 #log("create_pdecode_svp64_ldst", name, col_subset, row_subset, include_fp)
665
666 # some alteration to the CSV files is required for SV so we use
667 # a class to do it
668 isa = SVP64RM()
669 get_csv = isa.get_svp64_csv
670
671 # minor opcodes.
672 pminor = [
673 Subdecoder(pattern=58, opcodes=get_csv("svldst_minor_58.csv"),
674 opint=True, bitsel=(0, 2), suffix=None, subdecoders=[]),
675 # nope - needs 4-in regs
676 # Subdecoder(pattern=62, opcodes=get_csv("svldst_minor_62.csv"),
677 # opint=True, bitsel=(0, 2), suffix=None, subdecoders=[]),
678 ]
679
680 # FP 63L/H decoders. TODO: move mffsfamily to separate subdecoder
681 if False and include_fp:
682 pminor.append(
683 Subdecoder(pattern=63, opcodes=get_csv("minor_63.csv"),
684 opint=False, bitsel=(1, 11), suffix=None,
685 subdecoders=[]),
686 )
687 pminor.append(
688 Subdecoder(pattern=59, opcodes=get_csv("minor_59.csv"),
689 opint=False, bitsel=(1, 11), suffix=None,
690 subdecoders=[]),
691 )
692
693 # top level: extra merged with major
694 dec = []
695 opcodes = get_csv("svldst_major.csv")
696 dec.append(Subdecoder(pattern=None, opint=True, opcodes=opcodes,
697 bitsel=(26, 32), suffix=None, subdecoders=pminor))
698
699 return TopPowerDecoder(32, dec, name=name, col_subset=col_subset,
700 row_subset=row_subset)
701
702
703 ####################################################
704 # PRIMARY FUNCTION SPECIFYING THE FULL POWER DECODER
705
706 def create_pdecode(name=None, col_subset=None, row_subset=None,
707 include_fp=False, conditions=None):
708 """create_pdecode - creates a cascading hierarchical POWER ISA decoder
709
710 subsetting of the PowerOp decoding is possible by setting col_subset
711
712 NOTE (sigh) the bitsel patterns are in LSB0 order, they should be MSB0
713 """
714 #log("create_pdecode", name, col_subset, row_subset, include_fp)
715
716 # some alteration to the CSV files is required for SV so we use
717 # a class to do it
718 isa = SVP64RM()
719 get_csv = isa.get_svp64_csv
720
721 # minor 19 has extra patterns
722 m19 = []
723 m19.append(Subdecoder(pattern=19, opcodes=get_csv("minor_19.csv"),
724 opint=False, bitsel=(1, 11), suffix=None,
725 subdecoders=[]))
726 # XXX problem with sub-decoders (can only handle one),
727 # sort this another time
728 # m19.append(Subdecoder(pattern=19, opcodes=get_csv("minor_19_00000.csv"),
729 # opint=True, bitsel=(1, 6), suffix=None,
730 # subdecoders=[]))
731
732 # minor opcodes.
733 pminor = [
734 m19,
735 Subdecoder(pattern=30, opcodes=get_csv("minor_30.csv"),
736 opint=False, bitsel=(1, 5), suffix=None, subdecoders=[]),
737 Subdecoder(pattern=31, opcodes=get_csv("minor_31.csv"),
738 opint=True, bitsel=(1, 11), suffix=0b00101, subdecoders=[]),
739 Subdecoder(pattern=58, opcodes=get_csv("minor_58.csv"),
740 opint=True, bitsel=(0, 2), suffix=None, subdecoders=[]),
741 Subdecoder(pattern=62, opcodes=get_csv("minor_62.csv"),
742 opint=True, bitsel=(0, 2), suffix=None, subdecoders=[]),
743 Subdecoder(pattern=22, opcodes=get_csv("minor_22.csv"),
744 opint=False, bitsel=(0, 11), suffix=None, subdecoders=[]),
745 Subdecoder(pattern=5, opcodes=get_csv("minor_5.csv"),
746 opint=True, bitsel=(0, 11), suffix=None, subdecoders=[]),
747 Subdecoder(pattern=4, opcodes=get_csv("minor_4.csv"),
748 opint=True, bitsel=(0, 6), suffix=None, subdecoders=[]),
749 ]
750
751 # FP 63L/H decoders. TODO: move mffsfamily to separate subdecoder
752 if include_fp:
753 pminor.append(
754 Subdecoder(pattern=63, opcodes=get_csv("minor_63.csv"),
755 opint=False, bitsel=(1, 11), suffix=None,
756 subdecoders=[]),
757 )
758 pminor.append(
759 Subdecoder(pattern=59, opcodes=get_csv("minor_59.csv"),
760 opint=False, bitsel=(1, 11), suffix=None,
761 subdecoders=[]),
762 )
763
764 # top level: extra merged with major
765 dec = []
766 opcodes = get_csv("major.csv")
767 dec.append(Subdecoder(pattern=None, opint=True, opcodes=opcodes,
768 bitsel=(26, 32), suffix=None, subdecoders=pminor))
769 opcodes = get_csv("extra.csv")
770 dec.append(Subdecoder(pattern=None, opint=False, opcodes=opcodes,
771 bitsel=(0, 32), suffix=None, subdecoders=[]))
772
773 return TopPowerDecoder(32, dec, name=name, col_subset=col_subset,
774 row_subset=row_subset,
775 conditions=conditions)
776
777 # test function from
778 # https://github.com/apertus-open-source-cinema/naps/blob/9ebbc0/naps/soc/cli.py#L17
779
780
781 def fragment_repr(original):
782 from textwrap import indent
783 attrs_str = "\n"
784 for attr in ['ports', 'drivers', 'statements', 'attrs',
785 'generated', 'flatten']:
786 attrs_str += f"{attr}={repr(getattr(original, attr))},\n"
787
788 domains_str = "\n"
789 for name, domain in original.domains.items():
790 # TODO: this is not really sound because domains could be non local
791 domains_str += f"{name}: {domain.name}\n"
792 attrs_str += f"domains={{{indent(domains_str, ' ')}}},\n"
793
794 children_str = "\n"
795 for child, name in original.subfragments:
796 children_str += f"[{name}, {fragment_repr(child)}]\n"
797 attrs_str += f"children=[{indent(children_str, ' ')}],\n"
798
799 return f"Fragment({indent(attrs_str, ' ')})"
800
801
802 if __name__ == '__main__':
803
804 if True:
805 # row subset
806
807 def rowsubsetfn(opcode, row):
808 #log("row_subset", opcode, row)
809 return row['unit'] in ['LDST', 'FPU']
810
811 conditions = {
812 'SVP64FFT': Signal(name="svp64fft", reset_less=True),
813 }
814 pdecode = create_pdecode(name="rowsub",
815 col_subset={'opcode', 'function_unit',
816 'asmcode',
817 'in2_sel', 'in3_sel'},
818 row_subset=rowsubsetfn,
819 include_fp=True,
820 conditions=conditions)
821 vl = rtlil.convert(pdecode, ports=pdecode.ports())
822 with open("row_subset_decoder.il", "w") as f:
823 f.write(vl)
824
825 vl = verilog.convert(pdecode, ports=pdecode.ports())
826 with open("row_subset_decoder.v", "w") as f:
827 f.write(vl)
828
829 # col subset
830
831 pdecode = create_pdecode(name="fusubset", col_subset={'function_unit'},
832 conditions=conditions)
833 vl = rtlil.convert(pdecode, ports=pdecode.ports())
834 with open("col_subset_decoder.il", "w") as f:
835 f.write(vl)
836
837 from nmigen.hdl.ir import Fragment
838 elaborated = Fragment.get(pdecode, platform=None)
839 elaborated_repr = fragment_repr(elaborated)
840 #log(elaborated_repr)
841
842 exit(0)
843
844 exit(0)
845
846 # full decoder
847 pdecode = create_pdecode(include_fp=True)
848 vl = rtlil.convert(pdecode, ports=pdecode.ports())
849 with open("decoder.il", "w") as f:
850 f.write(vl)
851
852 # full SVP64 decoder
853 pdecode = create_pdecode_svp64_ldst(include_fp=True)
854 vl = rtlil.convert(pdecode, ports=pdecode.ports())
855 with open("decoder_svp64.il", "w") as f:
856 f.write(vl)