3 // Copyright (c) 2011-2014, 2017, 2019 ARM Limited
6 // The license below extends only to copyright in the software and shall
7 // not be construed as granting a license to any other intellectual
8 // property including but not limited to intellectual property relating
9 // to a hardware implementation of the functionality of the software
10 // licensed hereunder. You may use the software subject to the license
11 // terms below provided that you ensure that this notice is replicated
12 // unmodified and in its entirety in all distributions of the software,
13 // modified or unmodified, in source code or in binary form.
15 // Redistribution and use in source and binary forms, with or without
16 // modification, are permitted provided that the following conditions are
17 // met: redistributions of source code must retain the above copyright
18 // notice, this list of conditions and the following disclaimer;
19 // redistributions in binary form must reproduce the above copyright
20 // notice, this list of conditions and the following disclaimer in the
21 // documentation and/or other materials provided with the distribution;
22 // neither the name of the copyright holders nor the names of its
23 // contributors may be used to endorse or promote products derived from
24 // this software without specific prior written permission.
26 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39 SPAlignmentCheckCode = '''
40 if (baseIsSP && bits(XBase, 3, 0) &&
41 SPAlignmentCheckEnabled(xc->tcBase())) {
42 return std::make_shared<SPAlignmentFault>();
47 def template Load64Execute {{
48 Fault %(class_name)s::execute(ExecContext *xc,
49 Trace::InstRecord *traceData) const
52 Fault fault = NoFault;
58 if (fault == NoFault) {
59 fault = readMemAtomicLE(xc, traceData, EA, Mem, memAccessFlags);
63 if (fault == NoFault) {
71 def template Load64FpExecute {{
72 Fault %(class_name)s::execute(ExecContext *xc,
73 Trace::InstRecord *traceData) const
76 Fault fault = NoFault;
82 if (fault == NoFault) {
83 fault = readMemAtomicLE(xc, traceData, EA, Mem, memAccessFlags);
86 if (fault == NoFault) {
95 def template Store64Execute {{
96 Fault %(class_name)s::execute(ExecContext *xc,
97 Trace::InstRecord *traceData) const
100 Fault fault = NoFault;
106 if (fault == NoFault) {
110 if (fault == NoFault) {
111 fault = writeMemAtomicLE(xc, traceData, Mem, EA,
112 memAccessFlags, NULL);
115 if (fault == NoFault) {
123 def template Store64InitiateAcc {{
124 Fault %(class_name)s::initiateAcc(ExecContext *xc,
125 Trace::InstRecord *traceData) const
128 Fault fault = NoFault;
134 if (fault == NoFault) {
138 if (fault == NoFault) {
139 fault = writeMemTimingLE(xc, traceData, Mem, EA, memAccessFlags,
147 def template StoreEx64Execute {{
148 Fault %(class_name)s::execute(ExecContext *xc,
149 Trace::InstRecord *traceData) const
152 Fault fault = NoFault;
158 if (fault == NoFault) {
162 uint64_t writeResult = 0;
163 if (fault == NoFault) {
164 fault = writeMemAtomicLE(xc, traceData, Mem, EA, memAccessFlags,
168 if (fault == NoFault) {
172 if (fault == NoFault) {
180 def template StoreEx64InitiateAcc {{
181 Fault %(class_name)s::initiateAcc(ExecContext *xc,
182 Trace::InstRecord *traceData) const
185 Fault fault = NoFault;
191 if (fault == NoFault) {
195 if (fault == NoFault) {
196 fault = writeMemTimingLE(xc, traceData, Mem, EA, memAccessFlags,
204 def template Load64InitiateAcc {{
205 Fault %(class_name)s::initiateAcc(ExecContext *xc,
206 Trace::InstRecord *traceData) const
209 Fault fault = NoFault;
215 if (fault == NoFault) {
216 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
223 def template Load64CompleteAcc {{
224 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
225 Trace::InstRecord *traceData) const
227 Fault fault = NoFault;
232 // ARM instructions will not have a pkt if the predicate is false
233 getMemLE(pkt, Mem, traceData);
235 if (fault == NoFault) {
239 if (fault == NoFault) {
247 def template Store64CompleteAcc {{
248 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
249 Trace::InstRecord *traceData) const
255 def template StoreEx64CompleteAcc {{
256 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
257 Trace::InstRecord *traceData) const
259 Fault fault = NoFault;
264 uint64_t writeResult = pkt->req->getExtraData();
267 if (fault == NoFault) {
275 def template DCStore64Declare {{
276 class %(class_name)s : public %(base_class)s
281 %(class_name)s(ExtMachInst machInst, IntRegIndex _base,
282 MiscRegIndex _dest, uint64_t _imm);
284 Fault execute(ExecContext *, Trace::InstRecord *) const override;
285 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
286 Fault completeAcc(PacketPtr, ExecContext *,
287 Trace::InstRecord *) const override;
290 annotateFault(ArmFault *fault) override
297 def template DCStore64Constructor {{
298 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base,
299 MiscRegIndex _dest, uint64_t _imm)
300 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
304 assert(!%(use_uops)d);
308 def template DCStore64Execute {{
309 Fault %(class_name)s::execute(ExecContext *xc,
310 Trace::InstRecord *traceData) const
313 Fault fault = NoFault;
320 if (fault == NoFault) {
324 if (fault == NoFault) {
325 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
328 if (fault == NoFault) {
336 def template DCStore64InitiateAcc {{
337 Fault %(class_name)s::initiateAcc(ExecContext *xc,
338 Trace::InstRecord *traceData) const
341 Fault fault = NoFault;
347 if (fault == NoFault) {
351 if (fault == NoFault) {
352 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
360 def template LoadStoreImm64Declare {{
361 class %(class_name)s : public %(base_class)s
366 %(class_name)s(ExtMachInst machInst,
367 IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
369 Fault execute(ExecContext *, Trace::InstRecord *) const override;
370 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
371 Fault completeAcc(PacketPtr, ExecContext *,
372 Trace::InstRecord *) const override;
375 annotateFault(ArmFault *fault) override
382 def template LoadStoreImmU64Declare {{
383 class %(class_name)s : public %(base_class)s
388 %(class_name)s(ExtMachInst machInst,
389 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
390 bool noAlloc = false, bool exclusive = false,
393 Fault execute(ExecContext *, Trace::InstRecord *) const override;
394 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
395 Fault completeAcc(PacketPtr, ExecContext *,
396 Trace::InstRecord *) const override;
399 annotateFault(ArmFault *fault) override
406 def template LoadStoreImmDU64Declare {{
407 class %(class_name)s : public %(base_class)s
412 %(class_name)s(ExtMachInst machInst,
413 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
414 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
417 Fault execute(ExecContext *, Trace::InstRecord *) const override;
418 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
419 Fault completeAcc(PacketPtr, ExecContext *,
420 Trace::InstRecord *) const override;
423 annotateFault(ArmFault *fault) override
430 def template StoreImmDEx64Declare {{
432 * Static instruction class for "%(mnemonic)s".
434 class %(class_name)s : public %(base_class)s
439 %(class_name)s(ExtMachInst machInst,
440 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
441 IntRegIndex _base, int64_t _imm = 0);
443 Fault execute(ExecContext *, Trace::InstRecord *) const override;
444 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
445 Fault completeAcc(PacketPtr, ExecContext *,
446 Trace::InstRecord *) const override;
451 def template LoadStoreReg64Declare {{
452 class %(class_name)s : public %(base_class)s
457 %(class_name)s(ExtMachInst machInst,
458 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
459 ArmExtendType _type, uint32_t _shiftAmt);
461 Fault execute(ExecContext *, Trace::InstRecord *) const override;
462 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
463 Fault completeAcc(PacketPtr, ExecContext *,
464 Trace::InstRecord *) const override;
467 annotateFault(ArmFault *fault) override
474 def template LoadStoreRegU64Declare {{
475 class %(class_name)s : public %(base_class)s
480 %(class_name)s(ExtMachInst machInst,
481 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
482 ArmExtendType _type, uint32_t _shiftAmt,
483 bool noAlloc = false, bool exclusive = false,
486 Fault execute(ExecContext *, Trace::InstRecord *) const override;
487 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
488 Fault completeAcc(PacketPtr, ExecContext *,
489 Trace::InstRecord *) const override;
492 annotateFault(ArmFault *fault) override
499 def template LoadStoreRaw64Declare {{
500 class %(class_name)s : public %(base_class)s
505 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
508 Fault execute(ExecContext *, Trace::InstRecord *) const override;
509 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
510 Fault completeAcc(PacketPtr, ExecContext *,
511 Trace::InstRecord *) const override;
514 annotateFault(ArmFault *fault) override
521 def template LoadStoreEx64Declare {{
522 class %(class_name)s : public %(base_class)s
527 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
528 IntRegIndex _base, IntRegIndex _result);
530 Fault execute(ExecContext *, Trace::InstRecord *) const override;
531 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
532 Fault completeAcc(PacketPtr, ExecContext *,
533 Trace::InstRecord *) const override;
536 annotateFault(ArmFault *fault) override
543 def template LoadStoreLit64Declare {{
544 class %(class_name)s : public %(base_class)s
549 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
551 Fault execute(ExecContext *, Trace::InstRecord *) const override;
552 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
553 Fault completeAcc(PacketPtr, ExecContext *,
554 Trace::InstRecord *) const override;
557 annotateFault(ArmFault *fault) override
564 def template LoadStoreLitU64Declare {{
565 class %(class_name)s : public %(base_class)s
570 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
571 bool noAlloc = false, bool exclusive = false,
574 Fault execute(ExecContext *, Trace::InstRecord *) const override;
575 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
576 Fault completeAcc(PacketPtr, ExecContext *,
577 Trace::InstRecord *) const override;
580 annotateFault(ArmFault *fault) override
587 def template LoadStoreImm64Constructor {{
588 %(class_name)s::%(class_name)s(ExtMachInst machInst,
589 IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
590 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
591 (IntRegIndex)_dest, (IntRegIndex)_base, _imm)
595 assert(numMicroops >= 2);
596 uops = new StaticInstPtr[numMicroops];
597 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
598 uops[0]->setDelayedCommit();
599 uops[0]->setFirstMicroop();
600 uops[1] = new %(wb_decl)s;
601 uops[1]->setLastMicroop();
606 def template LoadStoreImmU64Constructor {{
607 %(class_name)s::%(class_name)s(ExtMachInst machInst,
608 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
609 bool noAlloc, bool exclusive, bool acrel)
610 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
614 assert(!%(use_uops)d);
615 setExcAcRel(exclusive, acrel);
619 def template LoadStoreImmDU64Constructor {{
620 %(class_name)s::%(class_name)s(ExtMachInst machInst,
621 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
622 int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
623 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
624 _dest, _dest2, _base, _imm)
627 assert(!%(use_uops)d);
628 setExcAcRel(exclusive, acrel);
632 def template StoreImmDEx64Constructor {{
633 %(class_name)s::%(class_name)s(ExtMachInst machInst,
634 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
635 IntRegIndex _base, int64_t _imm)
636 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
637 _result, _dest, _dest2, _base, _imm)
640 assert(!%(use_uops)d);
645 def template LoadStoreReg64Constructor {{
646 %(class_name)s::%(class_name)s(ExtMachInst machInst,
647 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
648 ArmExtendType _type, uint32_t _shiftAmt)
649 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
650 _dest, _base, _offset, _type, _shiftAmt)
654 assert(numMicroops >= 2);
655 uops = new StaticInstPtr[numMicroops];
656 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
658 uops[0]->setDelayedCommit();
659 uops[0]->setFirstMicroop();
660 uops[1] = new %(wb_decl)s;
661 uops[1]->setLastMicroop();
666 def template LoadStoreRegU64Constructor {{
667 %(class_name)s::%(class_name)s(ExtMachInst machInst,
668 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
669 ArmExtendType _type, uint32_t _shiftAmt,
670 bool noAlloc, bool exclusive, bool acrel)
671 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
672 _dest, _base, _offset, _type, _shiftAmt)
675 assert(!%(use_uops)d);
676 setExcAcRel(exclusive, acrel);
680 def template LoadStoreRaw64Constructor {{
681 %(class_name)s::%(class_name)s(ExtMachInst machInst,
682 IntRegIndex _dest, IntRegIndex _base)
683 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
689 def template LoadStoreEx64Constructor {{
690 %(class_name)s::%(class_name)s(ExtMachInst machInst,
691 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
692 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
693 _dest, _base, _result)
699 def template LoadStoreLit64Constructor {{
700 %(class_name)s::%(class_name)s(ExtMachInst machInst,
701 IntRegIndex _dest, int64_t _imm)
702 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
703 (IntRegIndex)_dest, _imm)
707 assert(numMicroops >= 2);
708 uops = new StaticInstPtr[numMicroops];
709 uops[0] = new %(acc_name)s(machInst, _dest, _imm);
710 uops[0]->setDelayedCommit();
711 uops[0]->setFirstMicroop();
712 uops[1] = new %(wb_decl)s;
713 uops[1]->setLastMicroop();
718 def template LoadStoreLitU64Constructor {{
719 %(class_name)s::%(class_name)s(ExtMachInst machInst,
720 IntRegIndex _dest, int64_t _imm,
721 bool noAlloc, bool exclusive, bool acrel)
722 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
723 (IntRegIndex)_dest, _imm)
726 assert(!%(use_uops)d);
727 setExcAcRel(exclusive, acrel);
731 // Atomic operations in memory
733 def template AmoOpExecute {{
734 Fault %(class_name)s::execute(ExecContext *xc,
735 Trace::InstRecord *traceData) const
750 const Fault fault = amoMemAtomicLE(xc, traceData, Mem, EA,
751 memAccessFlags, amo_op);
753 if (fault == NoFault) {
762 def template AmoOpInitiateAcc {{
763 Fault %(class_name)s::initiateAcc(ExecContext *xc,
764 Trace::InstRecord *traceData) const
778 return initiateMemAMO(xc, traceData, EA, Mem, memAccessFlags,
783 def template AmoOpCompleteAcc {{
784 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
785 Trace::InstRecord *traceData) const
790 // ARM instructions will not have a pkt if the predicate is false
791 getMemLE(pkt, Mem, traceData);
802 def template AmoOpDeclare {{
803 class %(class_name)s : public %(base_class)s
808 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
809 IntRegIndex _base, IntRegIndex _result);
811 Fault execute(ExecContext *, Trace::InstRecord *) const override;
812 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
813 Fault completeAcc(PacketPtr, ExecContext *,
814 Trace::InstRecord *) const override;
817 annotateFault(ArmFault *fault) override
825 def template AmoOpConstructor {{
826 %(class_name)s::%(class_name)s(ExtMachInst machInst,
827 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
828 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
829 _dest, _base, _result)
832 flags[IsStore] = false;
833 flags[IsLoad] = false;
837 def template AmoPairOpDeclare {{
838 class %(class_name)s : public %(base_class)s
845 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
846 IntRegIndex _base, IntRegIndex _result);
848 Fault execute(ExecContext *, Trace::InstRecord *) const override;
849 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
850 Fault completeAcc(PacketPtr, ExecContext *,
851 Trace::InstRecord *) const override;
854 annotateFault(ArmFault *fault) override
862 def template AmoPairOpConstructor {{
863 %(class_name)s::%(class_name)s(ExtMachInst machInst,
864 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
865 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
866 _dest, _base, _result)
870 uint32_t d2 = RegId(IntRegClass, dest).index() + 1 ;
871 uint32_t r2 = RegId(IntRegClass, result).index() + 1 ;
873 d2_src = _numSrcRegs ;
874 _srcRegIdx[_numSrcRegs++] = RegId(IntRegClass, d2);
875 r2_src = _numSrcRegs ;
876 _srcRegIdx[_numSrcRegs++] = RegId(IntRegClass, r2);
877 r2_dst = _numDestRegs ;
878 _destRegIdx[_numDestRegs++] = RegId(IntRegClass, r2);
879 flags[IsStore] = false;
880 flags[IsLoad] = false;
884 def template AmoArithmeticOpDeclare {{
885 class %(class_name)s : public %(base_class)s
890 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
891 IntRegIndex _base, IntRegIndex _result);
893 Fault execute(ExecContext *, Trace::InstRecord *) const override;
894 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
895 Fault completeAcc(PacketPtr, ExecContext *,
896 Trace::InstRecord *) const override;
899 annotateFault(ArmFault *fault) override
906 def template AmoArithmeticOpConstructor {{
907 %(class_name)s::%(class_name)s(ExtMachInst machInst,
908 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
909 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
910 _dest, _base, _result)
914 uint32_t r2 = RegId(IntRegClass, dest).index() ;
915 flags[IsStore] = false;
916 flags[IsLoad] = false;