arch,cpu: Enforce using accessors to get at src/destRegIdx.
[gem5.git] / src / arch / arm / isa / templates / mem64.isa
1 // -*- mode:c++ -*-
2
3 // Copyright (c) 2011-2014, 2017, 2019 ARM Limited
4 // All rights reserved
5 //
6 // The license below extends only to copyright in the software and shall
7 // not be construed as granting a license to any other intellectual
8 // property including but not limited to intellectual property relating
9 // to a hardware implementation of the functionality of the software
10 // licensed hereunder. You may use the software subject to the license
11 // terms below provided that you ensure that this notice is replicated
12 // unmodified and in its entirety in all distributions of the software,
13 // modified or unmodified, in source code or in binary form.
14 //
15 // Redistribution and use in source and binary forms, with or without
16 // modification, are permitted provided that the following conditions are
17 // met: redistributions of source code must retain the above copyright
18 // notice, this list of conditions and the following disclaimer;
19 // redistributions in binary form must reproduce the above copyright
20 // notice, this list of conditions and the following disclaimer in the
21 // documentation and/or other materials provided with the distribution;
22 // neither the name of the copyright holders nor the names of its
23 // contributors may be used to endorse or promote products derived from
24 // this software without specific prior written permission.
25 //
26 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37
38 let {{
39 SPAlignmentCheckCode = '''
40 if (baseIsSP && bits(XBase, 3, 0) &&
41 SPAlignmentCheckEnabled(xc->tcBase())) {
42 return std::make_shared<SPAlignmentFault>();
43 }
44 '''
45 }};
46
47 def template Load64Execute {{
48 Fault %(class_name)s::execute(ExecContext *xc,
49 Trace::InstRecord *traceData) const
50 {
51 Addr EA;
52 Fault fault = NoFault;
53
54 %(op_decl)s;
55 %(op_rd)s;
56 %(ea_code)s;
57
58 if (fault == NoFault) {
59 fault = readMemAtomicLE(xc, traceData, EA, Mem, memAccessFlags);
60 %(memacc_code)s;
61 }
62
63 if (fault == NoFault) {
64 %(op_wb)s;
65 }
66
67 return fault;
68 }
69 }};
70
71 def template Load64FpExecute {{
72 Fault %(class_name)s::execute(ExecContext *xc,
73 Trace::InstRecord *traceData) const
74 {
75 Addr EA;
76 Fault fault = NoFault;
77
78 %(op_decl)s;
79 %(op_rd)s;
80 %(ea_code)s;
81
82 if (fault == NoFault) {
83 fault = readMemAtomicLE(xc, traceData, EA, Mem, memAccessFlags);
84 }
85
86 if (fault == NoFault) {
87 %(memacc_code)s;
88 %(op_wb)s;
89 }
90
91 return fault;
92 }
93 }};
94
95 def template Store64Execute {{
96 Fault %(class_name)s::execute(ExecContext *xc,
97 Trace::InstRecord *traceData) const
98 {
99 Addr EA;
100 Fault fault = NoFault;
101
102 %(op_decl)s;
103 %(op_rd)s;
104 %(ea_code)s;
105
106 if (fault == NoFault) {
107 %(memacc_code)s;
108 }
109
110 if (fault == NoFault) {
111 fault = writeMemAtomicLE(xc, traceData, Mem, EA,
112 memAccessFlags, NULL);
113 }
114
115 if (fault == NoFault) {
116 %(op_wb)s;
117 }
118
119 return fault;
120 }
121 }};
122
123 def template Store64InitiateAcc {{
124 Fault %(class_name)s::initiateAcc(ExecContext *xc,
125 Trace::InstRecord *traceData) const
126 {
127 Addr EA;
128 Fault fault = NoFault;
129
130 %(op_decl)s;
131 %(op_rd)s;
132 %(ea_code)s;
133
134 if (fault == NoFault) {
135 %(memacc_code)s;
136 }
137
138 if (fault == NoFault) {
139 fault = writeMemTimingLE(xc, traceData, Mem, EA, memAccessFlags,
140 NULL);
141 }
142
143 return fault;
144 }
145 }};
146
147 def template StoreEx64Execute {{
148 Fault %(class_name)s::execute(ExecContext *xc,
149 Trace::InstRecord *traceData) const
150 {
151 Addr EA;
152 Fault fault = NoFault;
153
154 %(op_decl)s;
155 %(op_rd)s;
156 %(ea_code)s;
157
158 if (fault == NoFault) {
159 %(memacc_code)s;
160 }
161
162 uint64_t writeResult = 0;
163 if (fault == NoFault) {
164 fault = writeMemAtomicLE(xc, traceData, Mem, EA, memAccessFlags,
165 &writeResult);
166 }
167
168 if (fault == NoFault) {
169 %(postacc_code)s;
170 }
171
172 if (fault == NoFault) {
173 %(op_wb)s;
174 }
175
176 return fault;
177 }
178 }};
179
180 def template StoreEx64InitiateAcc {{
181 Fault %(class_name)s::initiateAcc(ExecContext *xc,
182 Trace::InstRecord *traceData) const
183 {
184 Addr EA;
185 Fault fault = NoFault;
186
187 %(op_decl)s;
188 %(op_rd)s;
189 %(ea_code)s;
190
191 if (fault == NoFault) {
192 %(memacc_code)s;
193 }
194
195 if (fault == NoFault) {
196 fault = writeMemTimingLE(xc, traceData, Mem, EA, memAccessFlags,
197 NULL);
198 }
199
200 return fault;
201 }
202 }};
203
204 def template Load64InitiateAcc {{
205 Fault %(class_name)s::initiateAcc(ExecContext *xc,
206 Trace::InstRecord *traceData) const
207 {
208 Addr EA;
209 Fault fault = NoFault;
210
211 %(op_src_decl)s;
212 %(op_rd)s;
213 %(ea_code)s;
214
215 if (fault == NoFault) {
216 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
217 }
218
219 return fault;
220 }
221 }};
222
223 def template Load64CompleteAcc {{
224 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
225 Trace::InstRecord *traceData) const
226 {
227 Fault fault = NoFault;
228
229 %(op_decl)s;
230 %(op_rd)s;
231
232 // ARM instructions will not have a pkt if the predicate is false
233 getMemLE(pkt, Mem, traceData);
234
235 if (fault == NoFault) {
236 %(memacc_code)s;
237 }
238
239 if (fault == NoFault) {
240 %(op_wb)s;
241 }
242
243 return fault;
244 }
245 }};
246
247 def template Store64CompleteAcc {{
248 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
249 Trace::InstRecord *traceData) const
250 {
251 return NoFault;
252 }
253 }};
254
255 def template StoreEx64CompleteAcc {{
256 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
257 Trace::InstRecord *traceData) const
258 {
259 Fault fault = NoFault;
260
261 %(op_decl)s;
262 %(op_rd)s;
263
264 uint64_t writeResult = pkt->req->getExtraData();
265 %(postacc_code)s;
266
267 if (fault == NoFault) {
268 %(op_wb)s;
269 }
270
271 return fault;
272 }
273 }};
274
275 def template DCStore64Declare {{
276 class %(class_name)s : public %(base_class)s
277 {
278 public:
279
280 /// Constructor.
281 %(class_name)s(ExtMachInst machInst, IntRegIndex _base,
282 MiscRegIndex _dest, uint64_t _imm);
283
284 Fault execute(ExecContext *, Trace::InstRecord *) const override;
285 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
286 Fault completeAcc(PacketPtr, ExecContext *,
287 Trace::InstRecord *) const override;
288
289 void
290 annotateFault(ArmISA::ArmFault *fault) override
291 {
292 %(fa_code)s
293 }
294 };
295 }};
296
297 def template DCStore64Constructor {{
298 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base,
299 MiscRegIndex _dest, uint64_t _imm)
300 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
301 _base, _dest, _imm)
302 {
303 %(constructor)s;
304 assert(!%(use_uops)d);
305 }
306 }};
307
308 def template DCStore64Execute {{
309 Fault %(class_name)s::execute(ExecContext *xc,
310 Trace::InstRecord *traceData) const
311 {
312 Addr EA;
313 Fault fault = NoFault;
314
315 %(op_decl)s;
316 %(op_rd)s;
317 %(ea_code)s;
318
319
320 if (fault == NoFault) {
321 %(memacc_code)s;
322 }
323
324 if (fault == NoFault) {
325 fault = writeMemAtomic(xc, NULL, EA,
326 op_size, memAccessFlags, NULL,
327 std::vector<bool>(op_size, true));
328 }
329
330 if (fault == NoFault) {
331 %(op_wb)s;
332 }
333
334 return fault;
335 }
336 }};
337
338 def template DCStore64InitiateAcc {{
339 Fault %(class_name)s::initiateAcc(ExecContext *xc,
340 Trace::InstRecord *traceData) const
341 {
342 Addr EA;
343 Fault fault = NoFault;
344
345 %(op_decl)s;
346 %(op_rd)s;
347 %(ea_code)s;
348
349 if (fault == NoFault) {
350 %(memacc_code)s;
351 }
352
353 if (fault == NoFault) {
354 fault = writeMemTiming(xc, NULL, EA, op_size,
355 memAccessFlags, NULL,
356 std::vector<bool>(op_size, true));
357 }
358
359 return fault;
360 }
361 }};
362
363
364 def template LoadStoreImm64Declare {{
365 class %(class_name)s : public %(base_class)s
366 {
367 public:
368
369 /// Constructor.
370 %(class_name)s(ExtMachInst machInst,
371 IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
372
373 Fault execute(ExecContext *, Trace::InstRecord *) const override;
374 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
375 Fault completeAcc(PacketPtr, ExecContext *,
376 Trace::InstRecord *) const override;
377
378 void
379 annotateFault(ArmISA::ArmFault *fault) override
380 {
381 %(fa_code)s
382 }
383 };
384 }};
385
386 def template LoadStoreImmU64Declare {{
387 class %(class_name)s : public %(base_class)s
388 {
389 public:
390
391 /// Constructor.
392 %(class_name)s(ExtMachInst machInst,
393 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
394 bool noAlloc = false, bool exclusive = false,
395 bool acrel = false);
396
397 Fault execute(ExecContext *, Trace::InstRecord *) const override;
398 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
399 Fault completeAcc(PacketPtr, ExecContext *,
400 Trace::InstRecord *) const override;
401
402 void
403 annotateFault(ArmISA::ArmFault *fault) override
404 {
405 %(fa_code)s
406 }
407 };
408 }};
409
410 def template LoadStoreImmDU64Declare {{
411 class %(class_name)s : public %(base_class)s
412 {
413 public:
414
415 /// Constructor.
416 %(class_name)s(ExtMachInst machInst,
417 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
418 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
419 bool acrel = false);
420
421 Fault execute(ExecContext *, Trace::InstRecord *) const override;
422 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
423 Fault completeAcc(PacketPtr, ExecContext *,
424 Trace::InstRecord *) const override;
425
426 void
427 annotateFault(ArmISA::ArmFault *fault) override
428 {
429 %(fa_code)s
430 }
431 };
432 }};
433
434 def template StoreImmDEx64Declare {{
435 /**
436 * Static instruction class for "%(mnemonic)s".
437 */
438 class %(class_name)s : public %(base_class)s
439 {
440 public:
441
442 /// Constructor.
443 %(class_name)s(ExtMachInst machInst,
444 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
445 IntRegIndex _base, int64_t _imm = 0);
446
447 Fault execute(ExecContext *, Trace::InstRecord *) const override;
448 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
449 Fault completeAcc(PacketPtr, ExecContext *,
450 Trace::InstRecord *) const override;
451 };
452 }};
453
454
455 def template LoadStoreReg64Declare {{
456 class %(class_name)s : public %(base_class)s
457 {
458 public:
459
460 /// Constructor.
461 %(class_name)s(ExtMachInst machInst,
462 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
463 ArmExtendType _type, uint32_t _shiftAmt);
464
465 Fault execute(ExecContext *, Trace::InstRecord *) const override;
466 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
467 Fault completeAcc(PacketPtr, ExecContext *,
468 Trace::InstRecord *) const override;
469
470 void
471 annotateFault(ArmISA::ArmFault *fault) override
472 {
473 %(fa_code)s
474 }
475 };
476 }};
477
478 def template LoadStoreRegU64Declare {{
479 class %(class_name)s : public %(base_class)s
480 {
481 public:
482
483 /// Constructor.
484 %(class_name)s(ExtMachInst machInst,
485 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
486 ArmExtendType _type, uint32_t _shiftAmt,
487 bool noAlloc = false, bool exclusive = false,
488 bool acrel = false);
489
490 Fault execute(ExecContext *, Trace::InstRecord *) const override;
491 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
492 Fault completeAcc(PacketPtr, ExecContext *,
493 Trace::InstRecord *) const override;
494
495 void
496 annotateFault(ArmISA::ArmFault *fault) override
497 {
498 %(fa_code)s
499 }
500 };
501 }};
502
503 def template LoadStoreRaw64Declare {{
504 class %(class_name)s : public %(base_class)s
505 {
506 public:
507
508 /// Constructor.
509 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
510 IntRegIndex _base);
511
512 Fault execute(ExecContext *, Trace::InstRecord *) const override;
513 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
514 Fault completeAcc(PacketPtr, ExecContext *,
515 Trace::InstRecord *) const override;
516
517 void
518 annotateFault(ArmISA::ArmFault *fault) override
519 {
520 %(fa_code)s
521 }
522 };
523 }};
524
525 def template LoadStoreEx64Declare {{
526 class %(class_name)s : public %(base_class)s
527 {
528 public:
529
530 /// Constructor.
531 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
532 IntRegIndex _base, IntRegIndex _result);
533
534 Fault execute(ExecContext *, Trace::InstRecord *) const override;
535 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
536 Fault completeAcc(PacketPtr, ExecContext *,
537 Trace::InstRecord *) const override;
538
539 void
540 annotateFault(ArmISA::ArmFault *fault) override
541 {
542 %(fa_code)s
543 }
544 };
545 }};
546
547 def template LoadStoreLit64Declare {{
548 class %(class_name)s : public %(base_class)s
549 {
550 public:
551
552 /// Constructor.
553 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
554
555 Fault execute(ExecContext *, Trace::InstRecord *) const override;
556 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
557 Fault completeAcc(PacketPtr, ExecContext *,
558 Trace::InstRecord *) const override;
559
560 void
561 annotateFault(ArmISA::ArmFault *fault) override
562 {
563 %(fa_code)s
564 }
565 };
566 }};
567
568 def template LoadStoreLitU64Declare {{
569 class %(class_name)s : public %(base_class)s
570 {
571 public:
572
573 /// Constructor.
574 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
575 bool noAlloc = false, bool exclusive = false,
576 bool acrel = false);
577
578 Fault execute(ExecContext *, Trace::InstRecord *) const override;
579 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
580 Fault completeAcc(PacketPtr, ExecContext *,
581 Trace::InstRecord *) const override;
582
583 void
584 annotateFault(ArmISA::ArmFault *fault) override
585 {
586 %(fa_code)s
587 }
588 };
589 }};
590
591 def template LoadStoreImm64Constructor {{
592 %(class_name)s::%(class_name)s(ExtMachInst machInst,
593 IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
594 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
595 (IntRegIndex)_dest, (IntRegIndex)_base, _imm)
596 {
597 %(constructor)s;
598 #if %(use_uops)d
599 assert(numMicroops >= 2);
600 uops = new StaticInstPtr[numMicroops];
601 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
602 uops[0]->setDelayedCommit();
603 uops[0]->setFirstMicroop();
604 uops[1] = new %(wb_decl)s;
605 uops[1]->setLastMicroop();
606 #endif
607 }
608 }};
609
610 def template LoadStoreImmU64Constructor {{
611 %(class_name)s::%(class_name)s(ExtMachInst machInst,
612 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
613 bool noAlloc, bool exclusive, bool acrel)
614 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
615 _dest, _base, _imm)
616 {
617 %(constructor)s;
618 assert(!%(use_uops)d);
619 setExcAcRel(exclusive, acrel);
620 }
621 }};
622
623 def template LoadStoreImmDU64Constructor {{
624 %(class_name)s::%(class_name)s(ExtMachInst machInst,
625 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
626 int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
627 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
628 _dest, _dest2, _base, _imm)
629 {
630 %(constructor)s;
631 assert(!%(use_uops)d);
632 setExcAcRel(exclusive, acrel);
633 }
634 }};
635
636 def template StoreImmDEx64Constructor {{
637 %(class_name)s::%(class_name)s(ExtMachInst machInst,
638 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
639 IntRegIndex _base, int64_t _imm)
640 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
641 _result, _dest, _dest2, _base, _imm)
642 {
643 %(constructor)s;
644 assert(!%(use_uops)d);
645 }
646 }};
647
648
649 def template LoadStoreReg64Constructor {{
650 %(class_name)s::%(class_name)s(ExtMachInst machInst,
651 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
652 ArmExtendType _type, uint32_t _shiftAmt)
653 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
654 _dest, _base, _offset, _type, _shiftAmt)
655 {
656 %(constructor)s;
657 #if %(use_uops)d
658 assert(numMicroops >= 2);
659 uops = new StaticInstPtr[numMicroops];
660 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
661 _type, _shiftAmt);
662 uops[0]->setDelayedCommit();
663 uops[0]->setFirstMicroop();
664 uops[1] = new %(wb_decl)s;
665 uops[1]->setLastMicroop();
666 #endif
667 }
668 }};
669
670 def template LoadStoreRegU64Constructor {{
671 %(class_name)s::%(class_name)s(ExtMachInst machInst,
672 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
673 ArmExtendType _type, uint32_t _shiftAmt,
674 bool noAlloc, bool exclusive, bool acrel)
675 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
676 _dest, _base, _offset, _type, _shiftAmt)
677 {
678 %(constructor)s;
679 assert(!%(use_uops)d);
680 setExcAcRel(exclusive, acrel);
681 }
682 }};
683
684 def template LoadStoreRaw64Constructor {{
685 %(class_name)s::%(class_name)s(ExtMachInst machInst,
686 IntRegIndex _dest, IntRegIndex _base)
687 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
688 {
689 %(constructor)s;
690 }
691 }};
692
693 def template LoadStoreEx64Constructor {{
694 %(class_name)s::%(class_name)s(ExtMachInst machInst,
695 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
696 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
697 _dest, _base, _result)
698 {
699 %(constructor)s;
700 }
701 }};
702
703 def template LoadStoreLit64Constructor {{
704 %(class_name)s::%(class_name)s(ExtMachInst machInst,
705 IntRegIndex _dest, int64_t _imm)
706 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
707 (IntRegIndex)_dest, _imm)
708 {
709 %(constructor)s;
710 #if %(use_uops)d
711 assert(numMicroops >= 2);
712 uops = new StaticInstPtr[numMicroops];
713 uops[0] = new %(acc_name)s(machInst, _dest, _imm);
714 uops[0]->setDelayedCommit();
715 uops[0]->setFirstMicroop();
716 uops[1] = new %(wb_decl)s;
717 uops[1]->setLastMicroop();
718 #endif
719 }
720 }};
721
722 def template LoadStoreLitU64Constructor {{
723 %(class_name)s::%(class_name)s(ExtMachInst machInst,
724 IntRegIndex _dest, int64_t _imm,
725 bool noAlloc, bool exclusive, bool acrel)
726 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
727 (IntRegIndex)_dest, _imm)
728 {
729 %(constructor)s;
730 assert(!%(use_uops)d);
731 setExcAcRel(exclusive, acrel);
732 }
733 }};
734
735 // Atomic operations in memory
736
737 def template AmoOpExecute {{
738 Fault %(class_name)s::execute(ExecContext *xc,
739 Trace::InstRecord *traceData) const
740 {
741 Addr EA;
742
743 %(op_decl)s;
744 %(op_rd)s;
745 %(ea_code)s;
746
747 %(usrDecl)s;
748
749 %(memacc_code)s;
750
751 %(amo_code)s;
752 assert(amo_op);
753
754 const Fault fault = amoMemAtomicLE(xc, traceData, Mem, EA,
755 memAccessFlags, amo_op);
756
757 if (fault == NoFault) {
758 %(postacc_code)s;
759 %(op_wb)s;
760 }
761
762 return fault;
763 }
764 }};
765
766 def template AmoOpInitiateAcc {{
767 Fault %(class_name)s::initiateAcc(ExecContext *xc,
768 Trace::InstRecord *traceData) const
769 {
770 Addr EA;
771
772 %(op_src_decl)s;
773 %(op_rd)s;
774 %(ea_code)s;
775 %(usrDecl)s;
776
777 %(memacc_code)s;
778
779 %(amo_code)s;
780
781 assert(amo_op);
782 return initiateMemAMO(xc, traceData, EA, Mem, memAccessFlags,
783 amo_op);
784 }
785 }};
786
787 def template AmoOpCompleteAcc {{
788 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
789 Trace::InstRecord *traceData) const
790 {
791 %(op_decl)s;
792 %(op_rd)s;
793
794 // ARM instructions will not have a pkt if the predicate is false
795 getMemLE(pkt, Mem, traceData);
796
797 %(postacc_code)s;
798
799 %(op_wb)s;
800
801 return NoFault;
802 }
803
804 }};
805
806 def template AmoOpDeclare {{
807 class %(class_name)s : public %(base_class)s
808 {
809 public:
810
811 /// Constructor.
812 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
813 IntRegIndex _base, IntRegIndex _result);
814
815 Fault execute(ExecContext *, Trace::InstRecord *) const override;
816 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
817 Fault completeAcc(PacketPtr, ExecContext *,
818 Trace::InstRecord *) const override;
819
820 void
821 annotateFault(ArmISA::ArmFault *fault) override
822 {
823 %(fa_code)s
824 }
825 };
826 }};
827
828
829 def template AmoOpConstructor {{
830 %(class_name)s::%(class_name)s(ExtMachInst machInst,
831 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
832 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
833 _dest, _base, _result)
834 {
835 %(constructor)s;
836 flags[IsStore] = false;
837 flags[IsLoad] = false;
838 }
839 }};
840
841 def template AmoPairOpDeclare {{
842 class %(class_name)s : public %(base_class)s
843 {
844 public:
845 uint32_t d2_src ;
846 uint32_t r2_src ;
847 uint32_t r2_dst ;
848 /// Constructor.
849 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
850 IntRegIndex _base, IntRegIndex _result);
851
852 Fault execute(ExecContext *, Trace::InstRecord *) const override;
853 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
854 Fault completeAcc(PacketPtr, ExecContext *,
855 Trace::InstRecord *) const override;
856
857 void
858 annotateFault(ArmISA::ArmFault *fault) override
859 {
860 %(fa_code)s
861 }
862 };
863 }};
864
865
866 def template AmoPairOpConstructor {{
867 %(class_name)s::%(class_name)s(ExtMachInst machInst,
868 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
869 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
870 _dest, _base, _result)
871 {
872 %(constructor)s;
873
874 uint32_t d2 = RegId(IntRegClass, dest).index() + 1 ;
875 uint32_t r2 = RegId(IntRegClass, result).index() + 1 ;
876
877 d2_src = _numSrcRegs ;
878 setSrcRegIdx(_numSrcRegs++, RegId(IntRegClass, d2));
879 r2_src = _numSrcRegs ;
880 setSrcRegIdx(_numSrcRegs++, RegId(IntRegClass, r2));
881 r2_dst = _numDestRegs ;
882 setDestRegIdx(_numDestRegs++, RegId(IntRegClass, r2));
883 flags[IsStore] = false;
884 flags[IsLoad] = false;
885 }
886 }};
887
888 def template AmoArithmeticOpDeclare {{
889 class %(class_name)s : public %(base_class)s
890 {
891 public:
892 bool isXZR ;
893 /// Constructor.
894 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
895 IntRegIndex _base, IntRegIndex _result);
896
897 Fault execute(ExecContext *, Trace::InstRecord *) const override;
898 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
899 Fault completeAcc(PacketPtr, ExecContext *,
900 Trace::InstRecord *) const override;
901
902 void
903 annotateFault(ArmISA::ArmFault *fault) override
904 {
905 %(fa_code)s
906 }
907 };
908 }};
909
910 def template AmoArithmeticOpConstructor {{
911 %(class_name)s::%(class_name)s(ExtMachInst machInst,
912 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
913 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
914 _dest, _base, _result)
915 {
916 %(constructor)s;
917 isXZR = false;
918 uint32_t r2 = RegId(IntRegClass, dest).index() ;
919 flags[IsStore] = false;
920 flags[IsLoad] = false;
921 if (r2 == 31){
922 isXZR = true;
923 }
924 }
925 }};