3 // Copyright (c) 2015 Riscv Developers
4 // Copyright (c) 2016 The University of Virginia
5 // All rights reserved.
7 // Redistribution and use in source and binary forms, with or without
8 // modification, are permitted provided that the following conditions are
9 // met: redistributions of source code must retain the above copyright
10 // notice, this list of conditions and the following disclaimer;
11 // redistributions in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the distribution;
14 // neither the name of the copyright holders nor the names of its
15 // contributors may be used to endorse or promote products derived from
16 // this software without specific prior written permission.
18 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 // Declaration templates
31 def template AtomicMemOpDeclare {{
33 * Static instruction class for an AtomicMemOp operation
35 class %(class_name)s : public %(base_class)s
39 %(class_name)s(ExtMachInst machInst);
44 * The main RMW part of an AMO
46 class %(class_name)sRMW : public %(base_class)sMicro
50 %(class_name)sRMW(ExtMachInst machInst, %(class_name)s *_p);
52 Fault execute(ExecContext *, Trace::InstRecord *) const override;
53 Fault initiateAcc(ExecContext *,
54 Trace::InstRecord *) const override;
55 Fault completeAcc(PacketPtr, ExecContext *,
56 Trace::InstRecord *) const override;
61 def template LRSCDeclare {{
63 * Static instruction class for an AtomicMemOp operation
65 class %(class_name)s : public %(base_class)s
69 %(class_name)s(ExtMachInst machInst);
73 class %(class_name)sMicro : public %(base_class)sMicro
77 %(class_name)sMicro(ExtMachInst machInst, %(class_name)s *_p);
79 Fault execute(ExecContext *, Trace::InstRecord *) const override;
80 Fault initiateAcc(ExecContext *,
81 Trace::InstRecord *) const override;
82 Fault completeAcc(PacketPtr, ExecContext *,
83 Trace::InstRecord *) const override;
88 // Constructor templates
89 def template LRSCMacroConstructor {{
90 %(class_name)s::%(class_name)s(ExtMachInst machInst):
91 %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
95 StaticInstPtr rel_fence;
97 StaticInstPtr acq_fence;
99 // set up release fence
101 rel_fence = new MemFenceMicro(machInst, No_OpClass);
102 rel_fence->setFlag(IsFirstMicroop);
103 rel_fence->setFlag(IsReadBarrier);
104 rel_fence->setFlag(IsWriteBarrier);
105 rel_fence->setFlag(IsDelayedCommit);
108 // set up atomic rmw op
109 lrsc = new %(class_name)sMicro(machInst, this);
112 lrsc->setFlag(IsFirstMicroop);
116 lrsc->setFlag(IsLastMicroop);
118 lrsc->setFlag(IsDelayedCommit);
121 // set up acquire fence
123 acq_fence = new MemFenceMicro(machInst, No_OpClass);
124 acq_fence->setFlag(IsLastMicroop);
125 acq_fence->setFlag(IsReadBarrier);
126 acq_fence->setFlag(IsWriteBarrier);
130 microops = {rel_fence, lrsc, acq_fence};
132 microops = {rel_fence, lrsc};
134 microops = {lrsc, acq_fence};
141 def template LRSCMicroConstructor {{
142 %(class_name)s::%(class_name)sMicro::%(class_name)sMicro(
143 ExtMachInst machInst, %(class_name)s *_p)
144 : %(base_class)sMicro("%(mnemonic)s", machInst, %(op_class)s)
150 def template AtomicMemOpMacroConstructor {{
151 %(class_name)s::%(class_name)s(ExtMachInst machInst)
152 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
156 StaticInstPtr rel_fence;
157 StaticInstPtr rmw_op;
158 StaticInstPtr acq_fence;
160 // set up release fence
162 rel_fence = new MemFenceMicro(machInst, No_OpClass);
163 rel_fence->setFlag(IsFirstMicroop);
164 rel_fence->setFlag(IsReadBarrier);
165 rel_fence->setFlag(IsWriteBarrier);
166 rel_fence->setFlag(IsDelayedCommit);
169 // set up atomic rmw op
170 rmw_op = new %(class_name)sRMW(machInst, this);
173 rmw_op->setFlag(IsFirstMicroop);
177 rmw_op->setFlag(IsLastMicroop);
179 rmw_op->setFlag(IsDelayedCommit);
182 // set up acquire fence
184 acq_fence = new MemFenceMicro(machInst, No_OpClass);
185 acq_fence->setFlag(IsLastMicroop);
186 acq_fence->setFlag(IsReadBarrier);
187 acq_fence->setFlag(IsWriteBarrier);
191 microops = {rel_fence, rmw_op, acq_fence};
193 microops = {rel_fence, rmw_op};
195 microops = {rmw_op, acq_fence};
202 def template AtomicMemOpRMWConstructor {{
203 %(class_name)s::%(class_name)sRMW::%(class_name)sRMW(
204 ExtMachInst machInst, %(class_name)s *_p)
205 : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s)
209 // overwrite default flags
210 flags[IsLoad] = false;
211 flags[IsStore] = false;
212 flags[IsAtomic] = true;
216 // execute() templates
218 def template LoadReservedExecute {{
220 %(class_name)s::%(class_name)sMicro::execute(
221 ExecContext *xc, Trace::InstRecord *traceData) const
224 Fault fault = NoFault;
230 if (fault == NoFault) {
231 fault = readMemAtomicLE(xc, traceData, EA, Mem, memAccessFlags);
235 if (fault == NoFault) {
243 def template StoreCondExecute {{
244 Fault %(class_name)s::%(class_name)sMicro::execute(ExecContext *xc,
245 Trace::InstRecord *traceData) const
248 Fault fault = NoFault;
255 if (fault == NoFault) {
259 if (fault == NoFault) {
260 fault = writeMemAtomicLE(xc, traceData, Mem, EA, memAccessFlags,
262 // RISC-V has the opposite convention gem5 has for success flags,
263 // so we invert the result here.
267 if (fault == NoFault) {
271 if (fault == NoFault) {
279 def template AtomicMemOpRMWExecute {{
280 Fault %(class_name)s::%(class_name)sRMW::execute(ExecContext *xc,
281 Trace::InstRecord *traceData) const
284 Fault fault = NoFault;
293 if (fault == NoFault) {
294 fault = amoMemAtomicLE(xc, traceData, Mem, EA, memAccessFlags,
299 if (fault == NoFault) {
303 if (fault == NoFault) {
311 // initiateAcc() templates
313 def template LoadReservedInitiateAcc {{
315 %(class_name)s::%(class_name)sMicro::initiateAcc(ExecContext *xc,
316 Trace::InstRecord *traceData) const
319 Fault fault = NoFault;
325 if (fault == NoFault) {
326 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
333 def template StoreCondInitiateAcc {{
335 %(class_name)s::%(class_name)sMicro::initiateAcc(ExecContext *xc,
336 Trace::InstRecord *traceData) const
339 Fault fault = NoFault;
345 if (fault == NoFault) {
349 if (fault == NoFault) {
350 fault = writeMemTimingLE(xc, traceData, Mem, EA,
351 memAccessFlags, nullptr);
354 if (fault == NoFault) {
362 def template AtomicMemOpRMWInitiateAcc {{
364 %(class_name)s::%(class_name)sRMW::initiateAcc(ExecContext *xc,
365 Trace::InstRecord *traceData) const
368 Fault fault = NoFault;
377 if (fault == NoFault) {
378 fault = initiateMemAMO(xc, traceData, EA, Mem, memAccessFlags,
386 // completeAcc() templates
388 def template LoadReservedCompleteAcc {{
390 %(class_name)s::%(class_name)sMicro::completeAcc(PacketPtr pkt,
391 ExecContext *xc, Trace::InstRecord *traceData) const
393 Fault fault = NoFault;
398 getMemLE(pkt, Mem, traceData);
400 if (fault == NoFault) {
404 if (fault == NoFault) {
412 def template StoreCondCompleteAcc {{
413 Fault %(class_name)s::%(class_name)sMicro::completeAcc(Packet *pkt,
414 ExecContext *xc, Trace::InstRecord *traceData) const
416 Fault fault = NoFault;
420 // RISC-V has the opposite convention gem5 has for success flags,
421 // so we invert the result here.
422 uint64_t result = !pkt->req->getExtraData();
424 if (fault == NoFault) {
428 if (fault == NoFault) {
436 def template AtomicMemOpRMWCompleteAcc {{
437 Fault %(class_name)s::%(class_name)sRMW::completeAcc(Packet *pkt,
438 ExecContext *xc, Trace::InstRecord *traceData) const
440 Fault fault = NoFault;
445 getMemLE(pkt, Mem, traceData);
447 if (fault == NoFault) {
451 if (fault == NoFault) {
459 // LR/SC/AMO decode formats
461 def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
462 mem_flags=[], inst_flags=[]) {{
464 macro_inst_flags = []
465 macro_iop = InstObjParams(name, Name, 'LoadReserved', macro_ea_code,
467 header_output = LRSCDeclare.subst(macro_iop)
468 decoder_output = LRSCMacroConstructor.subst(macro_iop)
469 decode_block = BasicDecode.subst(macro_iop)
473 mem_flags = makeList(mem_flags)
474 inst_flags = makeList(inst_flags)
475 iop = InstObjParams(name, Name, 'LoadReserved',
476 {'ea_code': ea_code, 'memacc_code': memacc_code,
477 'postacc_code': postacc_code}, inst_flags)
478 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
479 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
481 decoder_output += LRSCMicroConstructor.subst(iop)
482 decode_block += BasicDecode.subst(iop)
483 exec_output += LoadReservedExecute.subst(iop) \
484 + LoadReservedInitiateAcc.subst(iop) \
485 + LoadReservedCompleteAcc.subst(iop)
488 def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
489 mem_flags=[], inst_flags=[]) {{
491 macro_inst_flags = []
492 macro_iop = InstObjParams(name, Name, 'StoreCond', macro_ea_code,
494 header_output = LRSCDeclare.subst(macro_iop)
495 decoder_output = LRSCMacroConstructor.subst(macro_iop)
496 decode_block = BasicDecode.subst(macro_iop)
500 mem_flags = makeList(mem_flags)
501 inst_flags = makeList(inst_flags)
502 iop = InstObjParams(name, Name, 'StoreCond',
503 {'ea_code': ea_code, 'memacc_code': memacc_code,
504 'postacc_code': postacc_code}, inst_flags)
505 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
506 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
508 decoder_output += LRSCMicroConstructor.subst(iop)
509 decode_block += BasicDecode.subst(iop)
510 exec_output += StoreCondExecute.subst(iop) \
511 + StoreCondInitiateAcc.subst(iop) \
512 + StoreCondCompleteAcc.subst(iop)
515 def format AtomicMemOp(memacc_code, amoop_code, postacc_code={{ }},
516 ea_code={{EA = Rs1;}}, mem_flags=[], inst_flags=[]) {{
518 macro_inst_flags = []
519 macro_iop = InstObjParams(name, Name, 'AtomicMemOp', macro_ea_code,
521 header_output = AtomicMemOpDeclare.subst(macro_iop)
522 decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop)
523 decode_block = BasicDecode.subst(macro_iop)
527 rmw_mem_flags = makeList(mem_flags)
528 rmw_inst_flags = makeList(inst_flags)
529 rmw_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
531 'memacc_code': memacc_code,
532 'postacc_code': postacc_code,
533 'amoop_code': amoop_code},
536 rmw_iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
537 '|'.join(['Request::%s' % flag for flag in rmw_mem_flags]) + ';'
539 decoder_output += AtomicMemOpRMWConstructor.subst(rmw_iop)
540 decode_block += BasicDecode.subst(rmw_iop)
541 exec_output += AtomicMemOpRMWExecute.subst(rmw_iop) \
542 + AtomicMemOpRMWInitiateAcc.subst(rmw_iop) \
543 + AtomicMemOpRMWCompleteAcc.subst(rmw_iop)