3 from vcd
import VCDWriter
4 from vcd
.gtkw
import GTKWSave
6 from ..tools
import flatten
7 from ..fhdl
.ast
import *
8 from ..fhdl
.xfrm
import ValueTransformer
, StatementTransformer
11 __all__
= ["Simulator", "Delay", "Tick", "Passive", "DeadlineError"]
14 class DeadlineError(Exception):
19 __slots__
= ("curr", "curr_dirty", "next", "next_dirty")
22 self
.curr
= ValueDict()
23 self
.next
= ValueDict()
24 self
.curr_dirty
= ValueSet()
25 self
.next_dirty
= ValueSet()
27 def get(self
, signal
):
28 return self
.curr
[signal
]
30 def set(self
, signal
, value
):
31 assert isinstance(value
, int)
32 if self
.next
[signal
] != value
:
33 self
.next_dirty
.add(signal
)
34 self
.next
[signal
] = value
36 def commit(self
, signal
):
37 old_value
= self
.curr
[signal
]
38 if self
.curr
[signal
] != self
.next
[signal
]:
39 self
.next_dirty
.remove(signal
)
40 self
.curr_dirty
.add(signal
)
41 self
.curr
[signal
] = self
.next
[signal
]
42 new_value
= self
.curr
[signal
]
43 return old_value
, new_value
46 normalize
= Const
.normalize
49 class _RHSValueCompiler(ValueTransformer
):
50 def __init__(self
, sensitivity
):
51 self
.sensitivity
= sensitivity
53 def on_Const(self
, value
):
54 return lambda state
: value
.value
56 def on_Signal(self
, value
):
57 self
.sensitivity
.add(value
)
58 return lambda state
: state
.get(value
)
60 def on_ClockSignal(self
, value
):
61 raise NotImplementedError # :nocov:
63 def on_ResetSignal(self
, value
):
64 raise NotImplementedError # :nocov:
66 def on_Operator(self
, value
):
68 if len(value
.operands
) == 1:
69 arg
, = map(self
, value
.operands
)
71 return lambda state
: normalize(~
arg(state
), shape
)
73 return lambda state
: normalize(-arg(state
), shape
)
75 return lambda state
: normalize(bool(arg(state
)), shape
)
76 elif len(value
.operands
) == 2:
77 lhs
, rhs
= map(self
, value
.operands
)
79 return lambda state
: normalize(lhs(state
) + rhs(state
), shape
)
81 return lambda state
: normalize(lhs(state
) - rhs(state
), shape
)
83 return lambda state
: normalize(lhs(state
) & rhs(state
), shape
)
85 return lambda state
: normalize(lhs(state
) |
rhs(state
), shape
)
87 return lambda state
: normalize(lhs(state
) ^
rhs(state
), shape
)
89 return lambda state
: normalize(lhs(state
) == rhs(state
), shape
)
91 return lambda state
: normalize(lhs(state
) != rhs(state
), shape
)
93 return lambda state
: normalize(lhs(state
) < rhs(state
), shape
)
95 return lambda state
: normalize(lhs(state
) <= rhs(state
), shape
)
97 return lambda state
: normalize(lhs(state
) > rhs(state
), shape
)
99 return lambda state
: normalize(lhs(state
) >= rhs(state
), shape
)
100 elif len(value
.operands
) == 3:
102 sel
, val1
, val0
= map(self
, value
.operands
)
103 return lambda state
: val1(state
) if sel(state
) else val0(state
)
104 raise NotImplementedError("Operator '{!r}' not implemented".format(value
.op
)) # :nocov:
106 def on_Slice(self
, value
):
107 shape
= value
.shape()
108 arg
= self(value
.value
)
110 mask
= (1 << (value
.end
- value
.start
)) - 1
111 return lambda state
: normalize((arg(state
) >> shift
) & mask
, shape
)
113 def on_Part(self
, value
):
114 raise NotImplementedError
116 def on_Cat(self
, value
):
117 shape
= value
.shape()
120 for opnd
in value
.operands
:
121 parts
.append((offset
, (1 << len(opnd
)) - 1, self(opnd
)))
125 for offset
, mask
, opnd
in parts
:
126 result |
= (opnd(state
) & mask
) << offset
127 return normalize(result
, shape
)
130 def on_Repl(self
, value
):
131 shape
= value
.shape()
132 offset
= len(value
.value
)
133 mask
= (1 << len(value
.value
)) - 1
135 opnd
= self(value
.value
)
138 for _
in range(count
):
140 result |
= opnd(state
)
141 return normalize(result
, shape
)
145 class _StatementCompiler(StatementTransformer
):
147 self
.sensitivity
= ValueSet()
148 self
.rhs_compiler
= _RHSValueCompiler(self
.sensitivity
)
150 def lhs_compiler(self
, value
):
152 return lambda state
, arg
: state
.set(value
, arg
)
154 def on_Assign(self
, stmt
):
155 assert isinstance(stmt
.lhs
, Signal
)
156 shape
= stmt
.lhs
.shape()
157 lhs
= self
.lhs_compiler(stmt
.lhs
)
158 rhs
= self
.rhs_compiler(stmt
.rhs
)
160 lhs(state
, normalize(rhs(state
), shape
))
163 def on_Switch(self
, stmt
):
164 test
= self
.rhs_compiler(stmt
.test
)
166 for value
, stmts
in stmt
.cases
.items():
168 mask
= "".join("0" if b
== "-" else "1" for b
in value
)
169 value
= "".join("0" if b
== "-" else b
for b
in value
)
171 mask
= "1" * len(value
)
173 value
= int(value
, 2)
174 def make_test(mask
, value
):
175 return lambda test
: test
& mask
== value
176 cases
.append((make_test(mask
, value
), self
.on_statements(stmts
)))
178 test_value
= test(state
)
179 for check
, body
in cases
:
180 if check(test_value
):
185 def on_statements(self
, stmts
):
186 stmts
= [self
.on_statement(stmt
) for stmt
in stmts
]
194 def __init__(self
, fragment
, vcd_file
=None, gtkw_file
=None, traces
=()):
195 self
._fragment
= fragment
197 self
._domains
= {} # str/domain -> ClockDomain
198 self
._domain
_triggers
= ValueDict() # Signal -> str/domain
199 self
._domain
_signals
= {} # str/domain -> {Signal}
201 self
._signals
= ValueSet() # {Signal}
202 self
._comb
_signals
= ValueSet() # {Signal}
203 self
._sync
_signals
= ValueSet() # {Signal}
204 self
._user
_signals
= ValueSet() # {Signal}
206 self
._started
= False
208 self
._epsilon
= 1e-10
209 self
._fastest
_clock
= self
._epsilon
210 self
._state
= _State()
212 self
._processes
= set() # {process}
213 self
._passive
= set() # {process}
214 self
._suspended
= set() # {process}
215 self
._wait
_deadline
= {} # process -> float/timestamp
216 self
._wait
_tick
= {} # process -> str/domain
218 self
._funclets
= ValueDict() # Signal -> set(lambda)
220 self
._vcd
_file
= vcd_file
221 self
._vcd
_writer
= None
222 self
._vcd
_signals
= ValueDict() # signal -> set(vcd_signal)
223 self
._vcd
_names
= ValueDict() # signal -> str/name
224 self
._gtkw
_file
= gtkw_file
225 self
._traces
= traces
227 def _check_process(self
, process
):
228 if inspect
.isgeneratorfunction(process
):
230 if not inspect
.isgenerator(process
):
231 raise TypeError("Cannot add a process '{!r}' because it is not a generator or"
232 "a generator function"
236 def add_process(self
, process
):
237 process
= self
._check
_process
(process
)
238 self
._processes
.add(process
)
240 def add_sync_process(self
, process
, domain
="sync"):
241 process
= self
._check
_process
(process
)
244 result
= process
.send(None)
246 result
= process
.send((yield (result
or Tick(domain
))))
247 except StopIteration:
249 self
.add_process(sync_process())
251 def add_clock(self
, period
, domain
="sync"):
252 if self
._fastest
_clock
== self
._epsilon
or period
< self
._fastest
_clock
:
253 self
._fastest
_clock
= period
255 half_period
= period
/ 2
256 clk
= self
._domains
[domain
].clk
259 yield Delay(half_period
)
262 yield Delay(half_period
)
264 yield Delay(half_period
)
265 self
.add_process(clk_process())
269 self
._vcd
_writer
= VCDWriter(self
._vcd
_file
, timescale
="100 ps",
270 comment
="Generated by nMigen")
272 root_fragment
= self
._fragment
.prepare()
274 self
._domains
= root_fragment
.domains
275 for domain
, cd
in self
._domains
.items():
276 self
._domain
_triggers
[cd
.clk
] = domain
277 if cd
.rst
is not None:
278 self
._domain
_triggers
[cd
.rst
] = domain
279 self
._domain
_signals
[domain
] = ValueSet()
282 def add_fragment(fragment
, scope
=("top",)):
283 hierarchy
[fragment
] = scope
284 for subfragment
, name
in fragment
.subfragments
:
285 add_fragment(subfragment
, (*scope
, name
))
286 add_fragment(root_fragment
)
288 for fragment
, fragment_name
in hierarchy
.items():
289 for signal
in fragment
.iter_signals():
290 self
._signals
.add(signal
)
292 self
._state
.curr
[signal
] = self
._state
.next
[signal
] = \
293 normalize(signal
.reset
, signal
.shape())
294 self
._state
.curr_dirty
.add(signal
)
296 if not self
._vcd
_writer
:
299 if signal
not in self
._vcd
_signals
:
300 self
._vcd
_signals
[signal
] = set()
302 for subfragment
, name
in fragment
.subfragments
:
303 if signal
in subfragment
.ports
:
304 var_name
= "{}_{}".format(name
, signal
.name
)
307 var_name
= signal
.name
312 var_init
= signal
.decoder(signal
.reset
).replace(" ", "_")
315 var_size
= signal
.nbits
316 var_init
= signal
.reset
322 var_name_suffix
= var_name
324 var_name_suffix
= "{}${}".format(var_name
, suffix
)
325 self
._vcd
_signals
[signal
].add(self
._vcd
_writer
.register_var(
326 scope
=".".join(fragment_name
), name
=var_name_suffix
,
327 var_type
=var_type
, size
=var_size
, init
=var_init
))
328 if signal
not in self
._vcd
_names
:
329 self
._vcd
_names
[signal
] = ".".join(fragment_name
+ (var_name_suffix
,))
332 suffix
= (suffix
or 0) + 1
334 for domain
, signals
in fragment
.drivers
.items():
336 self
._comb
_signals
.update(signals
)
338 self
._sync
_signals
.update(signals
)
339 self
._domain
_signals
[domain
].update(signals
)
342 for signal
in fragment
.iter_comb():
343 statements
.append(signal
.eq(signal
.reset
))
344 statements
+= fragment
.statements
346 def add_funclet(signal
, funclet
):
347 if signal
not in self
._funclets
:
348 self
._funclets
[signal
] = set()
349 self
._funclets
[signal
].add(funclet
)
351 compiler
= _StatementCompiler()
352 funclet
= compiler(statements
)
353 for signal
in compiler
.sensitivity
:
354 add_funclet(signal
, funclet
)
355 for domain
, cd
in fragment
.domains
.items():
356 add_funclet(cd
.clk
, funclet
)
357 if cd
.rst
is not None:
358 add_funclet(cd
.rst
, funclet
)
360 self
._user
_signals
= self
._signals
- self
._comb
_signals
- self
._sync
_signals
364 def _update_dirty_signals(self
):
365 """Perform the statement part of IR processes (aka RTLIL case)."""
366 # First, for all dirty signals, use sensitivity lists to determine the set of fragments
367 # that need their statements to be reevaluated because the signals changed at the previous
370 while self
._state
.curr_dirty
:
371 signal
= self
._state
.curr_dirty
.pop()
372 if signal
in self
._funclets
:
373 funclets
.update(self
._funclets
[signal
])
375 # Second, compute the values of all signals at the start of the next delta cycle, by
376 # running precompiled statements.
377 for funclet
in funclets
:
380 def _commit_signal(self
, signal
, domains
):
381 """Perform the driver part of IR processes (aka RTLIL sync), for individual signals."""
382 # Take the computed value (at the start of this delta cycle) of a signal (that could have
383 # come from an IR process that ran earlier, or modified by a simulator process) and update
384 # the value for this delta cycle.
385 old
, new
= self
._state
.commit(signal
)
387 # If the signal is a clock that triggers synchronous logic, record that fact.
388 if (old
, new
) == (0, 1) and signal
in self
._domain
_triggers
:
389 domains
.add(self
._domain
_triggers
[signal
])
391 if self
._vcd
_writer
and old
!= new
:
392 # Finally, dump the new value to the VCD file.
393 for vcd_signal
in self
._vcd
_signals
[signal
]:
395 var_value
= signal
.decoder(new
).replace(" ", "_")
398 self
._vcd
_writer
.change(vcd_signal
, self
._timestamp
/ self
._epsilon
, var_value
)
400 def _commit_comb_signals(self
, domains
):
401 """Perform the comb part of IR processes (aka RTLIL always)."""
402 # Take the computed value (at the start of this delta cycle) of every comb signal and
403 # update the value for this delta cycle.
404 for signal
in self
._state
.next_dirty
:
405 if signal
in self
._comb
_signals
or signal
in self
._user
_signals
:
406 self
._commit
_signal
(signal
, domains
)
408 def _commit_sync_signals(self
, domains
):
409 """Perform the sync part of IR processes (aka RTLIL posedge)."""
410 # At entry, `domains` contains a list of every simultaneously triggered sync update.
412 # Advance the timeline a bit (purely for observational purposes) and commit all of them
413 # at the same timestamp.
414 self
._timestamp
+= self
._epsilon
415 curr_domains
, domains
= domains
, set()
418 domain
= curr_domains
.pop()
420 # Take the computed value (at the start of this delta cycle) of every sync signal
421 # in this domain and update the value for this delta cycle. This can trigger more
422 # synchronous logic, so record that.
423 for signal
in self
._state
.next_dirty
:
424 if signal
in self
._domain
_signals
[domain
]:
425 self
._commit
_signal
(signal
, domains
)
427 # Wake up any simulator processes that wait for a domain tick.
428 for process
, wait_domain
in list(self
._wait
_tick
.items()):
429 if domain
== wait_domain
:
430 del self
._wait
_tick
[process
]
431 self
._suspended
.remove(process
)
433 # Unless handling synchronous logic above has triggered more synchronous logic (which
434 # can happen e.g. if a domain is clocked off a clock divisor in fabric), we're done.
435 # Otherwise, do one more round of updates.
437 def _run_process(self
, process
):
438 def format_process(process
):
439 frame
= process
.gi_frame
440 return "{}:{}".format(inspect
.getfile(frame
), inspect
.getlineno(frame
))
443 cmd
= process
.send(None)
445 if isinstance(cmd
, Delay
):
446 if cmd
.interval
is None:
447 interval
= self
._epsilon
449 interval
= cmd
.interval
450 self
._wait
_deadline
[process
] = self
._timestamp
+ interval
451 self
._suspended
.add(process
)
453 elif isinstance(cmd
, Tick
):
454 self
._wait
_tick
[process
] = cmd
.domain
455 self
._suspended
.add(process
)
457 elif isinstance(cmd
, Passive
):
458 self
._passive
.add(process
)
460 elif isinstance(cmd
, Value
):
461 funclet
= _RHSValueCompiler(sensitivity
=ValueSet())(cmd
)
462 cmd
= process
.send(funclet(self
._state
))
465 elif isinstance(cmd
, Assign
):
466 lhs_signals
= cmd
.lhs
._lhs
_signals
()
467 for signal
in lhs_signals
:
468 if not signal
in self
._signals
:
469 raise ValueError("Process '{}' sent a request to set signal '{!r}', "
470 "which is not a part of simulation"
471 .format(format_process(process
), signal
))
472 if signal
in self
._comb
_signals
:
473 raise ValueError("Process '{}' sent a request to set signal '{!r}', "
474 "which is a part of combinatorial assignment in "
476 .format(format_process(process
), signal
))
478 funclet
= _StatementCompiler()(cmd
)
482 for signal
in lhs_signals
:
483 self
._commit
_signal
(signal
, domains
)
484 self
._commit
_sync
_signals
(domains
)
487 raise TypeError("Received unsupported command '{!r}' from process '{}'"
488 .format(cmd
, format_process(process
)))
492 except StopIteration:
493 self
._processes
.remove(process
)
494 self
._passive
.discard(process
)
496 except Exception as e
:
499 def step(self
, run_passive
=False):
501 if self
._wait
_deadline
:
502 # We might run some delta cycles, and we have simulator processes waiting on
503 # a deadline. Take care to not exceed the closest deadline.
504 deadline
= min(self
._wait
_deadline
.values())
506 # Are there any delta cycles we should run?
507 while self
._state
.curr_dirty
:
508 self
._timestamp
+= self
._epsilon
509 if deadline
is not None and self
._timestamp
>= deadline
:
510 # Oops, we blew the deadline. We *could* run the processes now, but this is
511 # virtually certainly a logic loop and a design bug, so bail out instead.d
512 raise DeadlineError("Delta cycles exceeded process deadline; combinatorial loop?")
515 self
._update
_dirty
_signals
()
516 self
._commit
_comb
_signals
(domains
)
517 self
._commit
_sync
_signals
(domains
)
519 # Are there any processes that haven't had a chance to run yet?
520 if len(self
._processes
) > len(self
._suspended
):
521 # Schedule an arbitrary one.
522 process
= (self
._processes
- set(self
._suspended
)).pop()
523 self
._run
_process
(process
)
526 # All processes are suspended. Are any of them active?
527 if len(self
._processes
) > len(self
._passive
) or run_passive
:
528 # Are any of them suspended before a deadline?
529 if self
._wait
_deadline
:
530 # Schedule the one with the lowest deadline.
531 process
, deadline
= min(self
._wait
_deadline
.items(), key
=lambda x
: x
[1])
532 del self
._wait
_deadline
[process
]
533 self
._suspended
.remove(process
)
534 self
._timestamp
= deadline
535 self
._run
_process
(process
)
538 # No processes, or all processes are passive. Nothing to do!
545 def run_until(self
, deadline
, run_passive
=False):
546 while self
._timestamp
< deadline
:
547 if not self
.step(run_passive
):
552 def __exit__(self
, *args
):
554 self
._vcd
_writer
.close(self
._timestamp
/ self
._epsilon
)
556 if self
._vcd
_file
and self
._gtkw
_file
:
557 gtkw_save
= GTKWSave(self
._gtkw
_file
)
558 if hasattr(self
._vcd
_file
, "name"):
559 gtkw_save
.dumpfile(self
._vcd
_file
.name
)
560 if hasattr(self
._vcd
_file
, "tell"):
561 gtkw_save
.dumpfile_size(self
._vcd
_file
.tell())
563 gtkw_save
.treeopen("top")
564 gtkw_save
.zoom_markers(math
.log(self
._epsilon
/ self
._fastest
_clock
) - 14)
566 def add_trace(signal
, **kwargs
):
567 if signal
in self
._vcd
_names
:
569 suffix
= "[{}:0]".format(len(signal
) - 1)
572 gtkw_save
.trace(self
._vcd
_names
[signal
] + suffix
, **kwargs
)
574 for domain
, cd
in self
._domains
.items():
575 with gtkw_save
.group("d.{}".format(domain
)):
576 if cd
.rst
is not None:
580 for signal
in self
._traces
:
584 self
._vcd
_file
.close()
586 self
._gtkw
_file
.close()