arm: Delete authors lists from the arm files.
[gem5.git] / src / arch / arm / isa / templates / mem64.isa
1 // -*- mode:c++ -*-
2
3 // Copyright (c) 2011-2014, 2017, 2019 ARM Limited
4 // All rights reserved
5 //
6 // The license below extends only to copyright in the software and shall
7 // not be construed as granting a license to any other intellectual
8 // property including but not limited to intellectual property relating
9 // to a hardware implementation of the functionality of the software
10 // licensed hereunder. You may use the software subject to the license
11 // terms below provided that you ensure that this notice is replicated
12 // unmodified and in its entirety in all distributions of the software,
13 // modified or unmodified, in source code or in binary form.
14 //
15 // Redistribution and use in source and binary forms, with or without
16 // modification, are permitted provided that the following conditions are
17 // met: redistributions of source code must retain the above copyright
18 // notice, this list of conditions and the following disclaimer;
19 // redistributions in binary form must reproduce the above copyright
20 // notice, this list of conditions and the following disclaimer in the
21 // documentation and/or other materials provided with the distribution;
22 // neither the name of the copyright holders nor the names of its
23 // contributors may be used to endorse or promote products derived from
24 // this software without specific prior written permission.
25 //
26 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37
38 let {{
39 SPAlignmentCheckCode = '''
40 if (baseIsSP && bits(XBase, 3, 0) &&
41 SPAlignmentCheckEnabled(xc->tcBase())) {
42 return std::make_shared<SPAlignmentFault>();
43 }
44 '''
45 }};
46
47 def template Load64Execute {{
48 Fault %(class_name)s::execute(ExecContext *xc,
49 Trace::InstRecord *traceData) const
50 {
51 Addr EA;
52 Fault fault = NoFault;
53
54 %(op_decl)s;
55 %(op_rd)s;
56 %(ea_code)s;
57
58 if (fault == NoFault) {
59 fault = readMemAtomicLE(xc, traceData, EA, Mem, memAccessFlags);
60 %(memacc_code)s;
61 }
62
63 if (fault == NoFault) {
64 %(op_wb)s;
65 }
66
67 return fault;
68 }
69 }};
70
71 def template Load64FpExecute {{
72 Fault %(class_name)s::execute(ExecContext *xc,
73 Trace::InstRecord *traceData) const
74 {
75 Addr EA;
76 Fault fault = NoFault;
77
78 %(op_decl)s;
79 %(op_rd)s;
80 %(ea_code)s;
81
82 if (fault == NoFault) {
83 fault = readMemAtomicLE(xc, traceData, EA, Mem, memAccessFlags);
84 }
85
86 if (fault == NoFault) {
87 %(memacc_code)s;
88 %(op_wb)s;
89 }
90
91 return fault;
92 }
93 }};
94
95 def template Store64Execute {{
96 Fault %(class_name)s::execute(ExecContext *xc,
97 Trace::InstRecord *traceData) const
98 {
99 Addr EA;
100 Fault fault = NoFault;
101
102 %(op_decl)s;
103 %(op_rd)s;
104 %(ea_code)s;
105
106 if (fault == NoFault) {
107 %(memacc_code)s;
108 }
109
110 if (fault == NoFault) {
111 fault = writeMemAtomicLE(xc, traceData, Mem, EA,
112 memAccessFlags, NULL);
113 }
114
115 if (fault == NoFault) {
116 %(op_wb)s;
117 }
118
119 return fault;
120 }
121 }};
122
123 def template Store64InitiateAcc {{
124 Fault %(class_name)s::initiateAcc(ExecContext *xc,
125 Trace::InstRecord *traceData) const
126 {
127 Addr EA;
128 Fault fault = NoFault;
129
130 %(op_decl)s;
131 %(op_rd)s;
132 %(ea_code)s;
133
134 if (fault == NoFault) {
135 %(memacc_code)s;
136 }
137
138 if (fault == NoFault) {
139 fault = writeMemTimingLE(xc, traceData, Mem, EA, memAccessFlags,
140 NULL);
141 }
142
143 return fault;
144 }
145 }};
146
147 def template StoreEx64Execute {{
148 Fault %(class_name)s::execute(ExecContext *xc,
149 Trace::InstRecord *traceData) const
150 {
151 Addr EA;
152 Fault fault = NoFault;
153
154 %(op_decl)s;
155 %(op_rd)s;
156 %(ea_code)s;
157
158 if (fault == NoFault) {
159 %(memacc_code)s;
160 }
161
162 uint64_t writeResult = 0;
163 if (fault == NoFault) {
164 fault = writeMemAtomicLE(xc, traceData, Mem, EA, memAccessFlags,
165 &writeResult);
166 }
167
168 if (fault == NoFault) {
169 %(postacc_code)s;
170 }
171
172 if (fault == NoFault) {
173 %(op_wb)s;
174 }
175
176 return fault;
177 }
178 }};
179
180 def template StoreEx64InitiateAcc {{
181 Fault %(class_name)s::initiateAcc(ExecContext *xc,
182 Trace::InstRecord *traceData) const
183 {
184 Addr EA;
185 Fault fault = NoFault;
186
187 %(op_decl)s;
188 %(op_rd)s;
189 %(ea_code)s;
190
191 if (fault == NoFault) {
192 %(memacc_code)s;
193 }
194
195 if (fault == NoFault) {
196 fault = writeMemTimingLE(xc, traceData, Mem, EA, memAccessFlags,
197 NULL);
198 }
199
200 return fault;
201 }
202 }};
203
204 def template Load64InitiateAcc {{
205 Fault %(class_name)s::initiateAcc(ExecContext *xc,
206 Trace::InstRecord *traceData) const
207 {
208 Addr EA;
209 Fault fault = NoFault;
210
211 %(op_src_decl)s;
212 %(op_rd)s;
213 %(ea_code)s;
214
215 if (fault == NoFault) {
216 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
217 }
218
219 return fault;
220 }
221 }};
222
223 def template Load64CompleteAcc {{
224 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
225 Trace::InstRecord *traceData) const
226 {
227 Fault fault = NoFault;
228
229 %(op_decl)s;
230 %(op_rd)s;
231
232 // ARM instructions will not have a pkt if the predicate is false
233 getMemLE(pkt, Mem, traceData);
234
235 if (fault == NoFault) {
236 %(memacc_code)s;
237 }
238
239 if (fault == NoFault) {
240 %(op_wb)s;
241 }
242
243 return fault;
244 }
245 }};
246
247 def template Store64CompleteAcc {{
248 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
249 Trace::InstRecord *traceData) const
250 {
251 return NoFault;
252 }
253 }};
254
255 def template StoreEx64CompleteAcc {{
256 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
257 Trace::InstRecord *traceData) const
258 {
259 Fault fault = NoFault;
260
261 %(op_decl)s;
262 %(op_rd)s;
263
264 uint64_t writeResult = pkt->req->getExtraData();
265 %(postacc_code)s;
266
267 if (fault == NoFault) {
268 %(op_wb)s;
269 }
270
271 return fault;
272 }
273 }};
274
275 def template DCStore64Declare {{
276 class %(class_name)s : public %(base_class)s
277 {
278 public:
279
280 /// Constructor.
281 %(class_name)s(ExtMachInst machInst, IntRegIndex _base,
282 MiscRegIndex _dest, uint64_t _imm);
283
284 Fault execute(ExecContext *, Trace::InstRecord *) const override;
285 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
286 Fault completeAcc(PacketPtr, ExecContext *,
287 Trace::InstRecord *) const override;
288
289 void
290 annotateFault(ArmFault *fault) override
291 {
292 %(fa_code)s
293 }
294 };
295 }};
296
297 def template DCStore64Constructor {{
298 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base,
299 MiscRegIndex _dest, uint64_t _imm)
300 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
301 _base, _dest, _imm)
302 {
303 %(constructor)s;
304 assert(!%(use_uops)d);
305 }
306 }};
307
308 def template DCStore64Execute {{
309 Fault %(class_name)s::execute(ExecContext *xc,
310 Trace::InstRecord *traceData) const
311 {
312 Addr EA;
313 Fault fault = NoFault;
314
315 %(op_decl)s;
316 %(op_rd)s;
317 %(ea_code)s;
318
319
320 if (fault == NoFault) {
321 %(memacc_code)s;
322 }
323
324 if (fault == NoFault) {
325 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
326 }
327
328 if (fault == NoFault) {
329 %(op_wb)s;
330 }
331
332 return fault;
333 }
334 }};
335
336 def template DCStore64InitiateAcc {{
337 Fault %(class_name)s::initiateAcc(ExecContext *xc,
338 Trace::InstRecord *traceData) const
339 {
340 Addr EA;
341 Fault fault = NoFault;
342
343 %(op_decl)s;
344 %(op_rd)s;
345 %(ea_code)s;
346
347 if (fault == NoFault) {
348 %(memacc_code)s;
349 }
350
351 if (fault == NoFault) {
352 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
353 }
354
355 return fault;
356 }
357 }};
358
359
360 def template LoadStoreImm64Declare {{
361 class %(class_name)s : public %(base_class)s
362 {
363 public:
364
365 /// Constructor.
366 %(class_name)s(ExtMachInst machInst,
367 IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
368
369 Fault execute(ExecContext *, Trace::InstRecord *) const override;
370 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
371 Fault completeAcc(PacketPtr, ExecContext *,
372 Trace::InstRecord *) const override;
373
374 void
375 annotateFault(ArmFault *fault) override
376 {
377 %(fa_code)s
378 }
379 };
380 }};
381
382 def template LoadStoreImmU64Declare {{
383 class %(class_name)s : public %(base_class)s
384 {
385 public:
386
387 /// Constructor.
388 %(class_name)s(ExtMachInst machInst,
389 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
390 bool noAlloc = false, bool exclusive = false,
391 bool acrel = false);
392
393 Fault execute(ExecContext *, Trace::InstRecord *) const override;
394 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
395 Fault completeAcc(PacketPtr, ExecContext *,
396 Trace::InstRecord *) const override;
397
398 void
399 annotateFault(ArmFault *fault) override
400 {
401 %(fa_code)s
402 }
403 };
404 }};
405
406 def template LoadStoreImmDU64Declare {{
407 class %(class_name)s : public %(base_class)s
408 {
409 public:
410
411 /// Constructor.
412 %(class_name)s(ExtMachInst machInst,
413 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
414 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
415 bool acrel = false);
416
417 Fault execute(ExecContext *, Trace::InstRecord *) const override;
418 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
419 Fault completeAcc(PacketPtr, ExecContext *,
420 Trace::InstRecord *) const override;
421
422 void
423 annotateFault(ArmFault *fault) override
424 {
425 %(fa_code)s
426 }
427 };
428 }};
429
430 def template StoreImmDEx64Declare {{
431 /**
432 * Static instruction class for "%(mnemonic)s".
433 */
434 class %(class_name)s : public %(base_class)s
435 {
436 public:
437
438 /// Constructor.
439 %(class_name)s(ExtMachInst machInst,
440 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
441 IntRegIndex _base, int64_t _imm = 0);
442
443 Fault execute(ExecContext *, Trace::InstRecord *) const override;
444 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
445 Fault completeAcc(PacketPtr, ExecContext *,
446 Trace::InstRecord *) const override;
447 };
448 }};
449
450
451 def template LoadStoreReg64Declare {{
452 class %(class_name)s : public %(base_class)s
453 {
454 public:
455
456 /// Constructor.
457 %(class_name)s(ExtMachInst machInst,
458 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
459 ArmExtendType _type, uint32_t _shiftAmt);
460
461 Fault execute(ExecContext *, Trace::InstRecord *) const override;
462 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
463 Fault completeAcc(PacketPtr, ExecContext *,
464 Trace::InstRecord *) const override;
465
466 void
467 annotateFault(ArmFault *fault) override
468 {
469 %(fa_code)s
470 }
471 };
472 }};
473
474 def template LoadStoreRegU64Declare {{
475 class %(class_name)s : public %(base_class)s
476 {
477 public:
478
479 /// Constructor.
480 %(class_name)s(ExtMachInst machInst,
481 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
482 ArmExtendType _type, uint32_t _shiftAmt,
483 bool noAlloc = false, bool exclusive = false,
484 bool acrel = false);
485
486 Fault execute(ExecContext *, Trace::InstRecord *) const override;
487 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
488 Fault completeAcc(PacketPtr, ExecContext *,
489 Trace::InstRecord *) const override;
490
491 void
492 annotateFault(ArmFault *fault) override
493 {
494 %(fa_code)s
495 }
496 };
497 }};
498
499 def template LoadStoreRaw64Declare {{
500 class %(class_name)s : public %(base_class)s
501 {
502 public:
503
504 /// Constructor.
505 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
506 IntRegIndex _base);
507
508 Fault execute(ExecContext *, Trace::InstRecord *) const override;
509 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
510 Fault completeAcc(PacketPtr, ExecContext *,
511 Trace::InstRecord *) const override;
512
513 void
514 annotateFault(ArmFault *fault) override
515 {
516 %(fa_code)s
517 }
518 };
519 }};
520
521 def template LoadStoreEx64Declare {{
522 class %(class_name)s : public %(base_class)s
523 {
524 public:
525
526 /// Constructor.
527 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
528 IntRegIndex _base, IntRegIndex _result);
529
530 Fault execute(ExecContext *, Trace::InstRecord *) const override;
531 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
532 Fault completeAcc(PacketPtr, ExecContext *,
533 Trace::InstRecord *) const override;
534
535 void
536 annotateFault(ArmFault *fault) override
537 {
538 %(fa_code)s
539 }
540 };
541 }};
542
543 def template LoadStoreLit64Declare {{
544 class %(class_name)s : public %(base_class)s
545 {
546 public:
547
548 /// Constructor.
549 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
550
551 Fault execute(ExecContext *, Trace::InstRecord *) const override;
552 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
553 Fault completeAcc(PacketPtr, ExecContext *,
554 Trace::InstRecord *) const override;
555
556 void
557 annotateFault(ArmFault *fault) override
558 {
559 %(fa_code)s
560 }
561 };
562 }};
563
564 def template LoadStoreLitU64Declare {{
565 class %(class_name)s : public %(base_class)s
566 {
567 public:
568
569 /// Constructor.
570 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
571 bool noAlloc = false, bool exclusive = false,
572 bool acrel = false);
573
574 Fault execute(ExecContext *, Trace::InstRecord *) const override;
575 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
576 Fault completeAcc(PacketPtr, ExecContext *,
577 Trace::InstRecord *) const override;
578
579 void
580 annotateFault(ArmFault *fault) override
581 {
582 %(fa_code)s
583 }
584 };
585 }};
586
587 def template LoadStoreImm64Constructor {{
588 %(class_name)s::%(class_name)s(ExtMachInst machInst,
589 IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
590 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
591 (IntRegIndex)_dest, (IntRegIndex)_base, _imm)
592 {
593 %(constructor)s;
594 #if %(use_uops)d
595 assert(numMicroops >= 2);
596 uops = new StaticInstPtr[numMicroops];
597 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
598 uops[0]->setDelayedCommit();
599 uops[0]->setFirstMicroop();
600 uops[1] = new %(wb_decl)s;
601 uops[1]->setLastMicroop();
602 #endif
603 }
604 }};
605
606 def template LoadStoreImmU64Constructor {{
607 %(class_name)s::%(class_name)s(ExtMachInst machInst,
608 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
609 bool noAlloc, bool exclusive, bool acrel)
610 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
611 _dest, _base, _imm)
612 {
613 %(constructor)s;
614 assert(!%(use_uops)d);
615 setExcAcRel(exclusive, acrel);
616 }
617 }};
618
619 def template LoadStoreImmDU64Constructor {{
620 %(class_name)s::%(class_name)s(ExtMachInst machInst,
621 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
622 int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
623 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
624 _dest, _dest2, _base, _imm)
625 {
626 %(constructor)s;
627 assert(!%(use_uops)d);
628 setExcAcRel(exclusive, acrel);
629 }
630 }};
631
632 def template StoreImmDEx64Constructor {{
633 %(class_name)s::%(class_name)s(ExtMachInst machInst,
634 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
635 IntRegIndex _base, int64_t _imm)
636 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
637 _result, _dest, _dest2, _base, _imm)
638 {
639 %(constructor)s;
640 assert(!%(use_uops)d);
641 }
642 }};
643
644
645 def template LoadStoreReg64Constructor {{
646 %(class_name)s::%(class_name)s(ExtMachInst machInst,
647 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
648 ArmExtendType _type, uint32_t _shiftAmt)
649 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
650 _dest, _base, _offset, _type, _shiftAmt)
651 {
652 %(constructor)s;
653 #if %(use_uops)d
654 assert(numMicroops >= 2);
655 uops = new StaticInstPtr[numMicroops];
656 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
657 _type, _shiftAmt);
658 uops[0]->setDelayedCommit();
659 uops[0]->setFirstMicroop();
660 uops[1] = new %(wb_decl)s;
661 uops[1]->setLastMicroop();
662 #endif
663 }
664 }};
665
666 def template LoadStoreRegU64Constructor {{
667 %(class_name)s::%(class_name)s(ExtMachInst machInst,
668 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
669 ArmExtendType _type, uint32_t _shiftAmt,
670 bool noAlloc, bool exclusive, bool acrel)
671 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
672 _dest, _base, _offset, _type, _shiftAmt)
673 {
674 %(constructor)s;
675 assert(!%(use_uops)d);
676 setExcAcRel(exclusive, acrel);
677 }
678 }};
679
680 def template LoadStoreRaw64Constructor {{
681 %(class_name)s::%(class_name)s(ExtMachInst machInst,
682 IntRegIndex _dest, IntRegIndex _base)
683 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
684 {
685 %(constructor)s;
686 }
687 }};
688
689 def template LoadStoreEx64Constructor {{
690 %(class_name)s::%(class_name)s(ExtMachInst machInst,
691 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
692 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
693 _dest, _base, _result)
694 {
695 %(constructor)s;
696 }
697 }};
698
699 def template LoadStoreLit64Constructor {{
700 %(class_name)s::%(class_name)s(ExtMachInst machInst,
701 IntRegIndex _dest, int64_t _imm)
702 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
703 (IntRegIndex)_dest, _imm)
704 {
705 %(constructor)s;
706 #if %(use_uops)d
707 assert(numMicroops >= 2);
708 uops = new StaticInstPtr[numMicroops];
709 uops[0] = new %(acc_name)s(machInst, _dest, _imm);
710 uops[0]->setDelayedCommit();
711 uops[0]->setFirstMicroop();
712 uops[1] = new %(wb_decl)s;
713 uops[1]->setLastMicroop();
714 #endif
715 }
716 }};
717
718 def template LoadStoreLitU64Constructor {{
719 %(class_name)s::%(class_name)s(ExtMachInst machInst,
720 IntRegIndex _dest, int64_t _imm,
721 bool noAlloc, bool exclusive, bool acrel)
722 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
723 (IntRegIndex)_dest, _imm)
724 {
725 %(constructor)s;
726 assert(!%(use_uops)d);
727 setExcAcRel(exclusive, acrel);
728 }
729 }};
730
731 // Atomic operations in memory
732
733 def template AmoOpExecute {{
734 Fault %(class_name)s::execute(ExecContext *xc,
735 Trace::InstRecord *traceData) const
736 {
737 Addr EA;
738
739 %(op_decl)s;
740 %(op_rd)s;
741 %(ea_code)s;
742
743 %(usrDecl)s;
744
745 %(memacc_code)s;
746
747 %(amo_code)s;
748 assert(amo_op);
749
750 const Fault fault = amoMemAtomicLE(xc, traceData, Mem, EA,
751 memAccessFlags, amo_op);
752
753 if (fault == NoFault) {
754 %(postacc_code)s;
755 %(op_wb)s;
756 }
757
758 return fault;
759 }
760 }};
761
762 def template AmoOpInitiateAcc {{
763 Fault %(class_name)s::initiateAcc(ExecContext *xc,
764 Trace::InstRecord *traceData) const
765 {
766 Addr EA;
767
768 %(op_src_decl)s;
769 %(op_rd)s;
770 %(ea_code)s;
771 %(usrDecl)s;
772
773 %(memacc_code)s;
774
775 %(amo_code)s;
776
777 assert(amo_op);
778 return initiateMemAMO(xc, traceData, EA, Mem, memAccessFlags,
779 amo_op);
780 }
781 }};
782
783 def template AmoOpCompleteAcc {{
784 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
785 Trace::InstRecord *traceData) const
786 {
787 %(op_decl)s;
788 %(op_rd)s;
789
790 // ARM instructions will not have a pkt if the predicate is false
791 getMemLE(pkt, Mem, traceData);
792
793 %(postacc_code)s;
794
795 %(op_wb)s;
796
797 return NoFault;
798 }
799
800 }};
801
802 def template AmoOpDeclare {{
803 class %(class_name)s : public %(base_class)s
804 {
805 public:
806
807 /// Constructor.
808 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
809 IntRegIndex _base, IntRegIndex _result);
810
811 Fault execute(ExecContext *, Trace::InstRecord *) const override;
812 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
813 Fault completeAcc(PacketPtr, ExecContext *,
814 Trace::InstRecord *) const override;
815
816 void
817 annotateFault(ArmFault *fault) override
818 {
819 %(fa_code)s
820 }
821 };
822 }};
823
824
825 def template AmoOpConstructor {{
826 %(class_name)s::%(class_name)s(ExtMachInst machInst,
827 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
828 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
829 _dest, _base, _result)
830 {
831 %(constructor)s;
832 flags[IsStore] = false;
833 flags[IsLoad] = false;
834 }
835 }};
836
837 def template AmoPairOpDeclare {{
838 class %(class_name)s : public %(base_class)s
839 {
840 public:
841 uint32_t d2_src ;
842 uint32_t r2_src ;
843 uint32_t r2_dst ;
844 /// Constructor.
845 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
846 IntRegIndex _base, IntRegIndex _result);
847
848 Fault execute(ExecContext *, Trace::InstRecord *) const override;
849 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
850 Fault completeAcc(PacketPtr, ExecContext *,
851 Trace::InstRecord *) const override;
852
853 void
854 annotateFault(ArmFault *fault) override
855 {
856 %(fa_code)s
857 }
858 };
859 }};
860
861
862 def template AmoPairOpConstructor {{
863 %(class_name)s::%(class_name)s(ExtMachInst machInst,
864 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
865 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
866 _dest, _base, _result)
867 {
868 %(constructor)s;
869
870 uint32_t d2 = RegId(IntRegClass, dest).index() + 1 ;
871 uint32_t r2 = RegId(IntRegClass, result).index() + 1 ;
872
873 d2_src = _numSrcRegs ;
874 _srcRegIdx[_numSrcRegs++] = RegId(IntRegClass, d2);
875 r2_src = _numSrcRegs ;
876 _srcRegIdx[_numSrcRegs++] = RegId(IntRegClass, r2);
877 r2_dst = _numDestRegs ;
878 _destRegIdx[_numDestRegs++] = RegId(IntRegClass, r2);
879 flags[IsStore] = false;
880 flags[IsLoad] = false;
881 }
882 }};
883
884 def template AmoArithmeticOpDeclare {{
885 class %(class_name)s : public %(base_class)s
886 {
887 public:
888 bool isXZR ;
889 /// Constructor.
890 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
891 IntRegIndex _base, IntRegIndex _result);
892
893 Fault execute(ExecContext *, Trace::InstRecord *) const override;
894 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
895 Fault completeAcc(PacketPtr, ExecContext *,
896 Trace::InstRecord *) const override;
897
898 void
899 annotateFault(ArmFault *fault) override
900 {
901 %(fa_code)s
902 }
903 };
904 }};
905
906 def template AmoArithmeticOpConstructor {{
907 %(class_name)s::%(class_name)s(ExtMachInst machInst,
908 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
909 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
910 _dest, _base, _result)
911 {
912 %(constructor)s;
913 isXZR = false;
914 uint32_t r2 = RegId(IntRegClass, dest).index() ;
915 flags[IsStore] = false;
916 flags[IsLoad] = false;
917 if (r2 == 31){
918 isXZR = true;
919 }
920 }
921 }};