3 from contextlib
import contextmanager
4 from vcd
import VCDWriter
5 from vcd
.gtkw
import GTKWSave
7 from ..tools
import flatten
8 from ..hdl
.ast
import *
9 from ..hdl
.xfrm
import ValueTransformer
, StatementTransformer
12 __all__
= ["Simulator", "Delay", "Tick", "Passive", "DeadlineError"]
15 class DeadlineError(Exception):
20 __slots__
= ("curr", "curr_dirty", "next", "next_dirty")
23 self
.curr
= ValueDict()
24 self
.next
= ValueDict()
25 self
.curr_dirty
= ValueSet()
26 self
.next_dirty
= ValueSet()
28 def set(self
, signal
, value
):
29 assert isinstance(value
, int)
30 if self
.next
[signal
] != value
:
31 self
.next_dirty
.add(signal
)
32 self
.next
[signal
] = value
34 def commit(self
, signal
):
35 old_value
= self
.curr
[signal
]
36 new_value
= self
.next
[signal
]
37 if old_value
!= new_value
:
38 self
.next_dirty
.remove(signal
)
39 self
.curr_dirty
.add(signal
)
40 self
.curr
[signal
] = new_value
41 return old_value
, new_value
44 normalize
= Const
.normalize
47 class _RHSValueCompiler(ValueTransformer
):
48 def __init__(self
, sensitivity
=None):
49 self
.sensitivity
= sensitivity
51 def on_Const(self
, value
):
52 return lambda state
: value
.value
54 def on_Signal(self
, value
):
55 if self
.sensitivity
is not None:
56 self
.sensitivity
.add(value
)
57 return lambda state
: state
.curr
[value
]
59 def on_ClockSignal(self
, value
):
60 raise NotImplementedError # :nocov:
62 def on_ResetSignal(self
, value
):
63 raise NotImplementedError # :nocov:
65 def on_Operator(self
, value
):
67 if len(value
.operands
) == 1:
68 arg
, = map(self
, value
.operands
)
70 return lambda state
: normalize(~
arg(state
), shape
)
72 return lambda state
: normalize(-arg(state
), shape
)
74 return lambda state
: normalize(bool(arg(state
)), shape
)
75 elif len(value
.operands
) == 2:
76 lhs
, rhs
= map(self
, value
.operands
)
78 return lambda state
: normalize(lhs(state
) + rhs(state
), shape
)
80 return lambda state
: normalize(lhs(state
) - rhs(state
), shape
)
82 return lambda state
: normalize(lhs(state
) & rhs(state
), shape
)
84 return lambda state
: normalize(lhs(state
) |
rhs(state
), shape
)
86 return lambda state
: normalize(lhs(state
) ^
rhs(state
), shape
)
89 return lhs
<< rhs
if rhs
>= 0 else lhs
>> -rhs
90 return lambda state
: normalize(sshl(lhs(state
), rhs(state
)), shape
)
93 return lhs
>> rhs
if rhs
>= 0 else lhs
<< -rhs
94 return lambda state
: normalize(sshr(lhs(state
), rhs(state
)), shape
)
96 return lambda state
: normalize(lhs(state
) == rhs(state
), shape
)
98 return lambda state
: normalize(lhs(state
) != rhs(state
), shape
)
100 return lambda state
: normalize(lhs(state
) < rhs(state
), shape
)
102 return lambda state
: normalize(lhs(state
) <= rhs(state
), shape
)
104 return lambda state
: normalize(lhs(state
) > rhs(state
), shape
)
106 return lambda state
: normalize(lhs(state
) >= rhs(state
), shape
)
107 elif len(value
.operands
) == 3:
109 sel
, val1
, val0
= map(self
, value
.operands
)
110 return lambda state
: val1(state
) if sel(state
) else val0(state
)
111 raise NotImplementedError("Operator '{}' not implemented".format(value
.op
)) # :nocov:
113 def on_Slice(self
, value
):
114 shape
= value
.shape()
115 arg
= self(value
.value
)
117 mask
= (1 << (value
.end
- value
.start
)) - 1
118 return lambda state
: normalize((arg(state
) >> shift
) & mask
, shape
)
120 def on_Part(self
, value
):
121 shape
= value
.shape()
122 arg
= self(value
.value
)
123 shift
= self(value
.offset
)
124 mask
= (1 << value
.width
) - 1
125 return lambda state
: normalize((arg(state
) >> shift(state
)) & mask
, shape
)
127 def on_Cat(self
, value
):
128 shape
= value
.shape()
131 for opnd
in value
.operands
:
132 parts
.append((offset
, (1 << len(opnd
)) - 1, self(opnd
)))
136 for offset
, mask
, opnd
in parts
:
137 result |
= (opnd(state
) & mask
) << offset
138 return normalize(result
, shape
)
141 def on_Repl(self
, value
):
142 shape
= value
.shape()
143 offset
= len(value
.value
)
144 mask
= (1 << len(value
.value
)) - 1
146 opnd
= self(value
.value
)
149 for _
in range(count
):
151 result |
= opnd(state
)
152 return normalize(result
, shape
)
155 def on_ArrayProxy(self
, value
):
156 shape
= value
.shape()
157 elems
= list(map(self
, value
.elems
))
158 index
= self(value
.index
)
159 return lambda state
: normalize(elems
[index(state
)](state
), shape
)
162 class _LHSValueCompiler(ValueTransformer
):
163 def on_Const(self
, value
):
164 raise TypeError # :nocov:
166 def on_Signal(self
, value
):
167 return lambda state
, arg
: state
.set(value
, arg
)
169 def on_ClockSignal(self
, value
):
170 raise NotImplementedError # :nocov:
172 def on_ResetSignal(self
, value
):
173 raise NotImplementedError # :nocov:
175 def on_Operator(self
, value
):
176 raise TypeError # :nocov:
178 def on_Slice(self
, value
):
179 raise NotImplementedError
181 def on_Part(self
, value
):
182 raise NotImplementedError
184 def on_Cat(self
, value
):
185 raise NotImplementedError
187 def on_Repl(self
, value
):
188 raise TypeError # :nocov:
190 def on_ArrayProxy(self
, value
):
191 raise NotImplementedError
194 class _StatementCompiler(StatementTransformer
):
196 self
.sensitivity
= ValueSet()
197 self
.rhs_compiler
= _RHSValueCompiler(self
.sensitivity
)
198 self
.lhs_compiler
= _LHSValueCompiler()
200 def on_Assign(self
, stmt
):
201 shape
= stmt
.lhs
.shape()
202 lhs
= self
.lhs_compiler(stmt
.lhs
)
203 rhs
= self
.rhs_compiler(stmt
.rhs
)
205 lhs(state
, normalize(rhs(state
), shape
))
208 def on_Switch(self
, stmt
):
209 test
= self
.rhs_compiler(stmt
.test
)
211 for value
, stmts
in stmt
.cases
.items():
213 mask
= "".join("0" if b
== "-" else "1" for b
in value
)
214 value
= "".join("0" if b
== "-" else b
for b
in value
)
216 mask
= "1" * len(value
)
218 value
= int(value
, 2)
219 def make_test(mask
, value
):
220 return lambda test
: test
& mask
== value
221 cases
.append((make_test(mask
, value
), self
.on_statements(stmts
)))
223 test_value
= test(state
)
224 for check
, body
in cases
:
225 if check(test_value
):
230 def on_statements(self
, stmts
):
231 stmts
= [self
.on_statement(stmt
) for stmt
in stmts
]
239 def __init__(self
, fragment
, vcd_file
=None, gtkw_file
=None, traces
=()):
240 self
._fragment
= fragment
242 self
._domains
= dict() # str/domain -> ClockDomain
243 self
._domain
_triggers
= ValueDict() # Signal -> str/domain
244 self
._domain
_signals
= dict() # str/domain -> {Signal}
246 self
._signals
= ValueSet() # {Signal}
247 self
._comb
_signals
= ValueSet() # {Signal}
248 self
._sync
_signals
= ValueSet() # {Signal}
249 self
._user
_signals
= ValueSet() # {Signal}
251 self
._started
= False
254 self
._epsilon
= 1e-10
255 self
._fastest
_clock
= self
._epsilon
256 self
._state
= _State()
258 self
._processes
= set() # {process}
259 self
._process
_loc
= dict() # process -> str/loc
260 self
._passive
= set() # {process}
261 self
._suspended
= set() # {process}
262 self
._wait
_deadline
= dict() # process -> float/timestamp
263 self
._wait
_tick
= dict() # process -> str/domain
265 self
._funclets
= ValueDict() # Signal -> set(lambda)
267 self
._vcd
_file
= vcd_file
268 self
._vcd
_writer
= None
269 self
._vcd
_signals
= ValueDict() # signal -> set(vcd_signal)
270 self
._vcd
_names
= ValueDict() # signal -> str/name
271 self
._gtkw
_file
= gtkw_file
272 self
._traces
= traces
275 def _check_process(process
):
276 if inspect
.isgeneratorfunction(process
):
278 if not inspect
.isgenerator(process
):
279 raise TypeError("Cannot add a process '{!r}' because it is not a generator or"
280 "a generator function"
284 def _name_process(self
, process
):
285 if process
in self
._process
_loc
:
286 return self
._process
_loc
[process
]
288 frame
= process
.gi_frame
289 return "{}:{}".format(inspect
.getfile(frame
), inspect
.getlineno(frame
))
291 def add_process(self
, process
):
292 process
= self
._check
_process
(process
)
293 self
._processes
.add(process
)
295 def add_sync_process(self
, process
, domain
="sync"):
296 process
= self
._check
_process
(process
)
302 result
= Tick(domain
)
303 self
._process
_loc
[sync_process
] = self
._name
_process
(process
)
304 result
= process
.send((yield result
))
305 except StopIteration:
307 sync_process
= sync_process()
308 self
.add_process(sync_process
)
310 def add_clock(self
, period
, phase
=None, domain
="sync"):
311 if self
._fastest
_clock
== self
._epsilon
or period
< self
._fastest
_clock
:
312 self
._fastest
_clock
= period
314 half_period
= period
/ 2
317 clk
= self
._domains
[domain
].clk
323 yield Delay(half_period
)
325 yield Delay(half_period
)
326 self
.add_process(clk_process
)
330 self
._vcd
_writer
= VCDWriter(self
._vcd
_file
, timescale
="100 ps",
331 comment
="Generated by nMigen")
333 root_fragment
= self
._fragment
.prepare()
335 self
._domains
= root_fragment
.domains
336 for domain
, cd
in self
._domains
.items():
337 self
._domain
_triggers
[cd
.clk
] = domain
338 if cd
.rst
is not None:
339 self
._domain
_triggers
[cd
.rst
] = domain
340 self
._domain
_signals
[domain
] = ValueSet()
343 def add_fragment(fragment
, scope
=()):
344 hierarchy
[fragment
] = scope
345 for subfragment
, name
in fragment
.subfragments
:
346 add_fragment(subfragment
, (*scope
, name
))
347 add_fragment(root_fragment
)
349 for fragment
, fragment_scope
in hierarchy
.items():
350 for signal
in fragment
.iter_signals():
351 self
._signals
.add(signal
)
353 self
._state
.curr
[signal
] = self
._state
.next
[signal
] = \
354 normalize(signal
.reset
, signal
.shape())
355 self
._state
.curr_dirty
.add(signal
)
357 if not self
._vcd
_writer
:
360 if signal
not in self
._vcd
_signals
:
361 self
._vcd
_signals
[signal
] = set()
363 for subfragment
, name
in fragment
.subfragments
:
364 if signal
in subfragment
.ports
:
365 var_name
= "{}_{}".format(name
, signal
.name
)
368 var_name
= signal
.name
373 var_init
= signal
.decoder(signal
.reset
).replace(" ", "_")
376 var_size
= signal
.nbits
377 var_init
= signal
.reset
383 var_name_suffix
= var_name
385 var_name_suffix
= "{}${}".format(var_name
, suffix
)
386 self
._vcd
_signals
[signal
].add(self
._vcd
_writer
.register_var(
387 scope
=".".join(fragment_scope
), name
=var_name_suffix
,
388 var_type
=var_type
, size
=var_size
, init
=var_init
))
389 if signal
not in self
._vcd
_names
:
390 self
._vcd
_names
[signal
] = ".".join(fragment_scope
+ (var_name_suffix
,))
393 suffix
= (suffix
or 0) + 1
395 for domain
, signals
in fragment
.drivers
.items():
397 self
._comb
_signals
.update(signals
)
399 self
._sync
_signals
.update(signals
)
400 self
._domain
_signals
[domain
].update(signals
)
403 for signal
in fragment
.iter_comb():
404 statements
.append(signal
.eq(signal
.reset
))
405 for domain
, signal
in fragment
.iter_sync():
406 statements
.append(signal
.eq(signal
))
407 statements
+= fragment
.statements
409 compiler
= _StatementCompiler()
410 funclet
= compiler(statements
)
412 def add_funclet(signal
, funclet
):
413 if signal
not in self
._funclets
:
414 self
._funclets
[signal
] = set()
415 self
._funclets
[signal
].add(funclet
)
417 for signal
in compiler
.sensitivity
:
418 add_funclet(signal
, funclet
)
419 for domain
, cd
in fragment
.domains
.items():
420 add_funclet(cd
.clk
, funclet
)
421 if cd
.rst
is not None:
422 add_funclet(cd
.rst
, funclet
)
424 self
._user
_signals
= self
._signals
- self
._comb
_signals
- self
._sync
_signals
428 def _update_dirty_signals(self
):
429 """Perform the statement part of IR processes (aka RTLIL case)."""
430 # First, for all dirty signals, use sensitivity lists to determine the set of fragments
431 # that need their statements to be reevaluated because the signals changed at the previous
434 while self
._state
.curr_dirty
:
435 signal
= self
._state
.curr_dirty
.pop()
436 if signal
in self
._funclets
:
437 funclets
.update(self
._funclets
[signal
])
439 # Second, compute the values of all signals at the start of the next delta cycle, by
440 # running precompiled statements.
441 for funclet
in funclets
:
444 def _commit_signal(self
, signal
, domains
):
445 """Perform the driver part of IR processes (aka RTLIL sync), for individual signals."""
446 # Take the computed value (at the start of this delta cycle) of a signal (that could have
447 # come from an IR process that ran earlier, or modified by a simulator process) and update
448 # the value for this delta cycle.
449 old
, new
= self
._state
.commit(signal
)
451 # If the signal is a clock that triggers synchronous logic, record that fact.
452 if (old
, new
) == (0, 1) and signal
in self
._domain
_triggers
:
453 domains
.add(self
._domain
_triggers
[signal
])
455 if self
._vcd
_writer
and old
!= new
:
456 # Finally, dump the new value to the VCD file.
457 for vcd_signal
in self
._vcd
_signals
[signal
]:
459 var_value
= signal
.decoder(new
).replace(" ", "_")
462 vcd_timestamp
= (self
._timestamp
+ self
._delta
) / self
._epsilon
463 self
._vcd
_writer
.change(vcd_signal
, vcd_timestamp
, var_value
)
465 def _commit_comb_signals(self
, domains
):
466 """Perform the comb part of IR processes (aka RTLIL always)."""
467 # Take the computed value (at the start of this delta cycle) of every comb signal and
468 # update the value for this delta cycle.
469 for signal
in self
._state
.next_dirty
:
470 if signal
in self
._comb
_signals
:
471 self
._commit
_signal
(signal
, domains
)
473 def _commit_sync_signals(self
, domains
):
474 """Perform the sync part of IR processes (aka RTLIL posedge)."""
475 # At entry, `domains` contains a list of every simultaneously triggered sync update.
477 # Advance the timeline a bit (purely for observational purposes) and commit all of them
478 # at the same timestamp.
479 self
._delta
+= self
._epsilon
480 curr_domains
, domains
= domains
, set()
483 domain
= curr_domains
.pop()
485 # Take the computed value (at the start of this delta cycle) of every sync signal
486 # in this domain and update the value for this delta cycle. This can trigger more
487 # synchronous logic, so record that.
488 for signal
in self
._state
.next_dirty
:
489 if signal
in self
._domain
_signals
[domain
]:
490 self
._commit
_signal
(signal
, domains
)
492 # Wake up any simulator processes that wait for a domain tick.
493 for process
, wait_domain
in list(self
._wait
_tick
.items()):
494 if domain
== wait_domain
:
495 del self
._wait
_tick
[process
]
496 self
._suspended
.remove(process
)
498 # Unless handling synchronous logic above has triggered more synchronous logic (which
499 # can happen e.g. if a domain is clocked off a clock divisor in fabric), we're done.
500 # Otherwise, do one more round of updates.
502 def _run_process(self
, process
):
504 cmd
= process
.send(None)
506 if isinstance(cmd
, Delay
):
507 if cmd
.interval
is None:
508 interval
= self
._epsilon
510 interval
= cmd
.interval
511 self
._wait
_deadline
[process
] = self
._timestamp
+ interval
512 self
._suspended
.add(process
)
514 elif isinstance(cmd
, Tick
):
515 self
._wait
_tick
[process
] = cmd
.domain
516 self
._suspended
.add(process
)
518 elif isinstance(cmd
, Passive
):
519 self
._passive
.add(process
)
521 elif isinstance(cmd
, Value
):
522 compiler
= _RHSValueCompiler()
523 funclet
= compiler(cmd
)
524 cmd
= process
.send(funclet(self
._state
))
527 elif isinstance(cmd
, Assign
):
528 lhs_signals
= cmd
.lhs
._lhs
_signals
()
529 for signal
in lhs_signals
:
530 if not signal
in self
._signals
:
531 raise ValueError("Process '{}' sent a request to set signal '{!r}', "
532 "which is not a part of simulation"
533 .format(self
._name
_process
(process
), signal
))
534 if signal
in self
._comb
_signals
:
535 raise ValueError("Process '{}' sent a request to set signal '{!r}', "
536 "which is a part of combinatorial assignment in "
538 .format(self
._name
_process
(process
), signal
))
540 compiler
= _StatementCompiler()
541 funclet
= compiler(cmd
)
545 for signal
in lhs_signals
:
546 self
._commit
_signal
(signal
, domains
)
547 self
._commit
_sync
_signals
(domains
)
550 raise TypeError("Received unsupported command '{!r}' from process '{}'"
551 .format(cmd
, self
._name
_process
(process
)))
555 except StopIteration:
556 self
._processes
.remove(process
)
557 self
._passive
.discard(process
)
559 except Exception as e
:
562 def step(self
, run_passive
=False):
563 # Are there any delta cycles we should run?
564 if self
._state
.curr_dirty
:
565 # We might run some delta cycles, and we have simulator processes waiting on
566 # a deadline. Take care to not exceed the closest deadline.
567 if self
._wait
_deadline
and \
568 (self
._timestamp
+ self
._delta
) >= min(self
._wait
_deadline
.values()):
569 # Oops, we blew the deadline. We *could* run the processes now, but this is
570 # virtually certainly a logic loop and a design bug, so bail out instead.d
571 raise DeadlineError("Delta cycles exceeded process deadline; combinatorial loop?")
574 while self
._state
.curr_dirty
:
575 self
._update
_dirty
_signals
()
576 self
._commit
_comb
_signals
(domains
)
577 self
._commit
_sync
_signals
(domains
)
580 # Are there any processes that haven't had a chance to run yet?
581 if len(self
._processes
) > len(self
._suspended
):
582 # Schedule an arbitrary one.
583 process
= (self
._processes
- set(self
._suspended
)).pop()
584 self
._run
_process
(process
)
587 # All processes are suspended. Are any of them active?
588 if len(self
._processes
) > len(self
._passive
) or run_passive
:
589 # Are any of them suspended before a deadline?
590 if self
._wait
_deadline
:
591 # Schedule the one with the lowest deadline.
592 process
, deadline
= min(self
._wait
_deadline
.items(), key
=lambda x
: x
[1])
593 del self
._wait
_deadline
[process
]
594 self
._suspended
.remove(process
)
595 self
._timestamp
= deadline
597 self
._run
_process
(process
)
600 # No processes, or all processes are passive. Nothing to do!
607 def run_until(self
, deadline
, run_passive
=False):
608 while self
._timestamp
< deadline
:
609 if not self
.step(run_passive
):
614 def __exit__(self
, *args
):
616 vcd_timestamp
= (self
._timestamp
+ self
._delta
) / self
._epsilon
617 self
._vcd
_writer
.close(vcd_timestamp
)
619 if self
._vcd
_file
and self
._gtkw
_file
:
620 gtkw_save
= GTKWSave(self
._gtkw
_file
)
621 if hasattr(self
._vcd
_file
, "name"):
622 gtkw_save
.dumpfile(self
._vcd
_file
.name
)
623 if hasattr(self
._vcd
_file
, "tell"):
624 gtkw_save
.dumpfile_size(self
._vcd
_file
.tell())
626 gtkw_save
.treeopen("top")
627 gtkw_save
.zoom_markers(math
.log(self
._epsilon
/ self
._fastest
_clock
) - 14)
629 def add_trace(signal
, **kwargs
):
630 if signal
in self
._vcd
_names
:
632 suffix
= "[{}:0]".format(len(signal
) - 1)
635 gtkw_save
.trace(self
._vcd
_names
[signal
] + suffix
, **kwargs
)
637 for domain
, cd
in self
._domains
.items():
638 with gtkw_save
.group("d.{}".format(domain
)):
639 if cd
.rst
is not None:
643 for signal
in self
._traces
:
647 self
._vcd
_file
.close()
649 self
._gtkw
_file
.close()