3 // Copyright (c) 2011-2014, 2017, 2019 ARM Limited
6 // The license below extends only to copyright in the software and shall
7 // not be construed as granting a license to any other intellectual
8 // property including but not limited to intellectual property relating
9 // to a hardware implementation of the functionality of the software
10 // licensed hereunder. You may use the software subject to the license
11 // terms below provided that you ensure that this notice is replicated
12 // unmodified and in its entirety in all distributions of the software,
13 // modified or unmodified, in source code or in binary form.
15 // Redistribution and use in source and binary forms, with or without
16 // modification, are permitted provided that the following conditions are
17 // met: redistributions of source code must retain the above copyright
18 // notice, this list of conditions and the following disclaimer;
19 // redistributions in binary form must reproduce the above copyright
20 // notice, this list of conditions and the following disclaimer in the
21 // documentation and/or other materials provided with the distribution;
22 // neither the name of the copyright holders nor the names of its
23 // contributors may be used to endorse or promote products derived from
24 // this software without specific prior written permission.
26 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
38 // Authors: Gabe Black
41 SPAlignmentCheckCode = '''
42 if (baseIsSP && bits(XBase, 3, 0) &&
43 SPAlignmentCheckEnabled(xc->tcBase())) {
44 return std::make_shared<SPAlignmentFault>();
49 def template Load64Execute {{
50 Fault %(class_name)s::execute(ExecContext *xc,
51 Trace::InstRecord *traceData) const
54 Fault fault = NoFault;
60 if (fault == NoFault) {
61 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
65 if (fault == NoFault) {
73 def template Load64FpExecute {{
74 Fault %(class_name)s::execute(ExecContext *xc,
75 Trace::InstRecord *traceData) const
78 Fault fault = NoFault;
84 if (fault == NoFault) {
85 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
88 if (fault == NoFault) {
97 def template Store64Execute {{
98 Fault %(class_name)s::execute(ExecContext *xc,
99 Trace::InstRecord *traceData) const
102 Fault fault = NoFault;
108 if (fault == NoFault) {
112 if (fault == NoFault) {
113 fault = writeMemAtomic(xc, traceData, Mem, EA,
114 memAccessFlags, NULL);
117 if (fault == NoFault) {
125 def template Store64InitiateAcc {{
126 Fault %(class_name)s::initiateAcc(ExecContext *xc,
127 Trace::InstRecord *traceData) const
130 Fault fault = NoFault;
136 if (fault == NoFault) {
140 if (fault == NoFault) {
141 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
149 def template StoreEx64Execute {{
150 Fault %(class_name)s::execute(ExecContext *xc,
151 Trace::InstRecord *traceData) const
154 Fault fault = NoFault;
160 if (fault == NoFault) {
164 uint64_t writeResult = 0;
165 if (fault == NoFault) {
166 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
170 if (fault == NoFault) {
174 if (fault == NoFault) {
182 def template StoreEx64InitiateAcc {{
183 Fault %(class_name)s::initiateAcc(ExecContext *xc,
184 Trace::InstRecord *traceData) const
187 Fault fault = NoFault;
193 if (fault == NoFault) {
197 if (fault == NoFault) {
198 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
206 def template Load64InitiateAcc {{
207 Fault %(class_name)s::initiateAcc(ExecContext *xc,
208 Trace::InstRecord *traceData) const
211 Fault fault = NoFault;
217 if (fault == NoFault) {
218 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
225 def template Load64CompleteAcc {{
226 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
227 Trace::InstRecord *traceData) const
229 Fault fault = NoFault;
234 // ARM instructions will not have a pkt if the predicate is false
235 getMem(pkt, Mem, traceData);
237 if (fault == NoFault) {
241 if (fault == NoFault) {
249 def template Store64CompleteAcc {{
250 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
251 Trace::InstRecord *traceData) const
257 def template StoreEx64CompleteAcc {{
258 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
259 Trace::InstRecord *traceData) const
261 Fault fault = NoFault;
266 uint64_t writeResult = pkt->req->getExtraData();
269 if (fault == NoFault) {
277 def template DCStore64Declare {{
278 class %(class_name)s : public %(base_class)s
283 %(class_name)s(ExtMachInst machInst, IntRegIndex _base,
284 MiscRegIndex _dest, uint64_t _imm);
286 Fault execute(ExecContext *, Trace::InstRecord *) const override;
287 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
288 Fault completeAcc(PacketPtr, ExecContext *,
289 Trace::InstRecord *) const override;
292 annotateFault(ArmFault *fault) override
299 def template DCStore64Constructor {{
300 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base,
301 MiscRegIndex _dest, uint64_t _imm)
302 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
306 assert(!%(use_uops)d);
310 def template DCStore64Execute {{
311 Fault %(class_name)s::execute(ExecContext *xc,
312 Trace::InstRecord *traceData) const
315 Fault fault = NoFault;
322 if (fault == NoFault) {
326 if (fault == NoFault) {
327 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
330 if (fault == NoFault) {
338 def template DCStore64InitiateAcc {{
339 Fault %(class_name)s::initiateAcc(ExecContext *xc,
340 Trace::InstRecord *traceData) const
343 Fault fault = NoFault;
349 if (fault == NoFault) {
353 if (fault == NoFault) {
354 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
362 def template LoadStoreImm64Declare {{
363 class %(class_name)s : public %(base_class)s
368 %(class_name)s(ExtMachInst machInst,
369 IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
371 Fault execute(ExecContext *, Trace::InstRecord *) const override;
372 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
373 Fault completeAcc(PacketPtr, ExecContext *,
374 Trace::InstRecord *) const override;
377 annotateFault(ArmFault *fault) override
384 def template LoadStoreImmU64Declare {{
385 class %(class_name)s : public %(base_class)s
390 %(class_name)s(ExtMachInst machInst,
391 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
392 bool noAlloc = false, bool exclusive = false,
395 Fault execute(ExecContext *, Trace::InstRecord *) const override;
396 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
397 Fault completeAcc(PacketPtr, ExecContext *,
398 Trace::InstRecord *) const override;
401 annotateFault(ArmFault *fault) override
408 def template LoadStoreImmDU64Declare {{
409 class %(class_name)s : public %(base_class)s
414 %(class_name)s(ExtMachInst machInst,
415 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
416 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
419 Fault execute(ExecContext *, Trace::InstRecord *) const override;
420 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
421 Fault completeAcc(PacketPtr, ExecContext *,
422 Trace::InstRecord *) const override;
425 annotateFault(ArmFault *fault) override
432 def template StoreImmDEx64Declare {{
434 * Static instruction class for "%(mnemonic)s".
436 class %(class_name)s : public %(base_class)s
441 %(class_name)s(ExtMachInst machInst,
442 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
443 IntRegIndex _base, int64_t _imm = 0);
445 Fault execute(ExecContext *, Trace::InstRecord *) const override;
446 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
447 Fault completeAcc(PacketPtr, ExecContext *,
448 Trace::InstRecord *) const override;
453 def template LoadStoreReg64Declare {{
454 class %(class_name)s : public %(base_class)s
459 %(class_name)s(ExtMachInst machInst,
460 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
461 ArmExtendType _type, uint32_t _shiftAmt);
463 Fault execute(ExecContext *, Trace::InstRecord *) const override;
464 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
465 Fault completeAcc(PacketPtr, ExecContext *,
466 Trace::InstRecord *) const override;
469 annotateFault(ArmFault *fault) override
476 def template LoadStoreRegU64Declare {{
477 class %(class_name)s : public %(base_class)s
482 %(class_name)s(ExtMachInst machInst,
483 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
484 ArmExtendType _type, uint32_t _shiftAmt,
485 bool noAlloc = false, bool exclusive = false,
488 Fault execute(ExecContext *, Trace::InstRecord *) const override;
489 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
490 Fault completeAcc(PacketPtr, ExecContext *,
491 Trace::InstRecord *) const override;
494 annotateFault(ArmFault *fault) override
501 def template LoadStoreRaw64Declare {{
502 class %(class_name)s : public %(base_class)s
507 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
510 Fault execute(ExecContext *, Trace::InstRecord *) const override;
511 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
512 Fault completeAcc(PacketPtr, ExecContext *,
513 Trace::InstRecord *) const override;
516 annotateFault(ArmFault *fault) override
523 def template LoadStoreEx64Declare {{
524 class %(class_name)s : public %(base_class)s
529 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
530 IntRegIndex _base, IntRegIndex _result);
532 Fault execute(ExecContext *, Trace::InstRecord *) const override;
533 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
534 Fault completeAcc(PacketPtr, ExecContext *,
535 Trace::InstRecord *) const override;
538 annotateFault(ArmFault *fault) override
545 def template LoadStoreLit64Declare {{
546 class %(class_name)s : public %(base_class)s
551 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
553 Fault execute(ExecContext *, Trace::InstRecord *) const override;
554 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
555 Fault completeAcc(PacketPtr, ExecContext *,
556 Trace::InstRecord *) const override;
559 annotateFault(ArmFault *fault) override
566 def template LoadStoreLitU64Declare {{
567 class %(class_name)s : public %(base_class)s
572 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
573 bool noAlloc = false, bool exclusive = false,
576 Fault execute(ExecContext *, Trace::InstRecord *) const override;
577 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
578 Fault completeAcc(PacketPtr, ExecContext *,
579 Trace::InstRecord *) const override;
582 annotateFault(ArmFault *fault) override
589 def template LoadStoreImm64Constructor {{
590 %(class_name)s::%(class_name)s(ExtMachInst machInst,
591 IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
592 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
593 (IntRegIndex)_dest, (IntRegIndex)_base, _imm)
597 assert(numMicroops >= 2);
598 uops = new StaticInstPtr[numMicroops];
599 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
600 uops[0]->setDelayedCommit();
601 uops[0]->setFirstMicroop();
602 uops[1] = new %(wb_decl)s;
603 uops[1]->setLastMicroop();
608 def template LoadStoreImmU64Constructor {{
609 %(class_name)s::%(class_name)s(ExtMachInst machInst,
610 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
611 bool noAlloc, bool exclusive, bool acrel)
612 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
616 assert(!%(use_uops)d);
617 setExcAcRel(exclusive, acrel);
621 def template LoadStoreImmDU64Constructor {{
622 %(class_name)s::%(class_name)s(ExtMachInst machInst,
623 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
624 int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
625 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
626 _dest, _dest2, _base, _imm)
629 assert(!%(use_uops)d);
630 setExcAcRel(exclusive, acrel);
634 def template StoreImmDEx64Constructor {{
635 %(class_name)s::%(class_name)s(ExtMachInst machInst,
636 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
637 IntRegIndex _base, int64_t _imm)
638 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
639 _result, _dest, _dest2, _base, _imm)
642 assert(!%(use_uops)d);
647 def template LoadStoreReg64Constructor {{
648 %(class_name)s::%(class_name)s(ExtMachInst machInst,
649 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
650 ArmExtendType _type, uint32_t _shiftAmt)
651 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
652 _dest, _base, _offset, _type, _shiftAmt)
656 assert(numMicroops >= 2);
657 uops = new StaticInstPtr[numMicroops];
658 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
660 uops[0]->setDelayedCommit();
661 uops[0]->setFirstMicroop();
662 uops[1] = new %(wb_decl)s;
663 uops[1]->setLastMicroop();
668 def template LoadStoreRegU64Constructor {{
669 %(class_name)s::%(class_name)s(ExtMachInst machInst,
670 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
671 ArmExtendType _type, uint32_t _shiftAmt,
672 bool noAlloc, bool exclusive, bool acrel)
673 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
674 _dest, _base, _offset, _type, _shiftAmt)
677 assert(!%(use_uops)d);
678 setExcAcRel(exclusive, acrel);
682 def template LoadStoreRaw64Constructor {{
683 %(class_name)s::%(class_name)s(ExtMachInst machInst,
684 IntRegIndex _dest, IntRegIndex _base)
685 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
691 def template LoadStoreEx64Constructor {{
692 %(class_name)s::%(class_name)s(ExtMachInst machInst,
693 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
694 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
695 _dest, _base, _result)
701 def template LoadStoreLit64Constructor {{
702 %(class_name)s::%(class_name)s(ExtMachInst machInst,
703 IntRegIndex _dest, int64_t _imm)
704 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
705 (IntRegIndex)_dest, _imm)
709 assert(numMicroops >= 2);
710 uops = new StaticInstPtr[numMicroops];
711 uops[0] = new %(acc_name)s(machInst, _dest, _imm);
712 uops[0]->setDelayedCommit();
713 uops[0]->setFirstMicroop();
714 uops[1] = new %(wb_decl)s;
715 uops[1]->setLastMicroop();
720 def template LoadStoreLitU64Constructor {{
721 %(class_name)s::%(class_name)s(ExtMachInst machInst,
722 IntRegIndex _dest, int64_t _imm,
723 bool noAlloc, bool exclusive, bool acrel)
724 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
725 (IntRegIndex)_dest, _imm)
728 assert(!%(use_uops)d);
729 setExcAcRel(exclusive, acrel);
733 // Atomic operations in memory
735 def template AmoOpExecute {{
736 Fault %(class_name)s::execute(ExecContext *xc,
737 Trace::InstRecord *traceData) const
752 const Fault fault = amoMemAtomic(xc, traceData, Mem, EA,
753 memAccessFlags, amo_op);
755 if (fault == NoFault) {
764 def template AmoOpInitiateAcc {{
765 Fault %(class_name)s::initiateAcc(ExecContext *xc,
766 Trace::InstRecord *traceData) const
780 return initiateMemAMO(xc, traceData, EA, Mem, memAccessFlags,
785 def template AmoOpCompleteAcc {{
786 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
787 Trace::InstRecord *traceData) const
792 // ARM instructions will not have a pkt if the predicate is false
793 getMem(pkt, Mem, traceData);
804 def template AmoOpDeclare {{
805 class %(class_name)s : public %(base_class)s
810 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
811 IntRegIndex _base, IntRegIndex _result);
813 Fault execute(ExecContext *, Trace::InstRecord *) const override;
814 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
815 Fault completeAcc(PacketPtr, ExecContext *,
816 Trace::InstRecord *) const override;
819 annotateFault(ArmFault *fault) override
827 def template AmoOpConstructor {{
828 %(class_name)s::%(class_name)s(ExtMachInst machInst,
829 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
830 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
831 _dest, _base, _result)
834 flags[IsStore] = false;
835 flags[IsLoad] = false;
839 def template AmoPairOpDeclare {{
840 class %(class_name)s : public %(base_class)s
847 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
848 IntRegIndex _base, IntRegIndex _result);
850 Fault execute(ExecContext *, Trace::InstRecord *) const override;
851 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
852 Fault completeAcc(PacketPtr, ExecContext *,
853 Trace::InstRecord *) const override;
856 annotateFault(ArmFault *fault) override
864 def template AmoPairOpConstructor {{
865 %(class_name)s::%(class_name)s(ExtMachInst machInst,
866 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
867 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
868 _dest, _base, _result)
872 uint32_t d2 = RegId(IntRegClass, dest).index() + 1 ;
873 uint32_t r2 = RegId(IntRegClass, result).index() + 1 ;
875 d2_src = _numSrcRegs ;
876 _srcRegIdx[_numSrcRegs++] = RegId(IntRegClass, d2);
877 r2_src = _numSrcRegs ;
878 _srcRegIdx[_numSrcRegs++] = RegId(IntRegClass, r2);
879 r2_dst = _numDestRegs ;
880 _destRegIdx[_numDestRegs++] = RegId(IntRegClass, r2);
881 flags[IsStore] = false;
882 flags[IsLoad] = false;
886 def template AmoArithmeticOpDeclare {{
887 class %(class_name)s : public %(base_class)s
892 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
893 IntRegIndex _base, IntRegIndex _result);
895 Fault execute(ExecContext *, Trace::InstRecord *) const override;
896 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
897 Fault completeAcc(PacketPtr, ExecContext *,
898 Trace::InstRecord *) const override;
901 annotateFault(ArmFault *fault) override
908 def template AmoArithmeticOpConstructor {{
909 %(class_name)s::%(class_name)s(ExtMachInst machInst,
910 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
911 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
912 _dest, _base, _result)
916 uint32_t r2 = RegId(IntRegClass, dest).index() ;
917 flags[IsStore] = false;
918 flags[IsLoad] = false;