3 // Copyright (c) 2011-2014, 2017, 2019 ARM Limited
6 // The license below extends only to copyright in the software and shall
7 // not be construed as granting a license to any other intellectual
8 // property including but not limited to intellectual property relating
9 // to a hardware implementation of the functionality of the software
10 // licensed hereunder. You may use the software subject to the license
11 // terms below provided that you ensure that this notice is replicated
12 // unmodified and in its entirety in all distributions of the software,
13 // modified or unmodified, in source code or in binary form.
15 // Redistribution and use in source and binary forms, with or without
16 // modification, are permitted provided that the following conditions are
17 // met: redistributions of source code must retain the above copyright
18 // notice, this list of conditions and the following disclaimer;
19 // redistributions in binary form must reproduce the above copyright
20 // notice, this list of conditions and the following disclaimer in the
21 // documentation and/or other materials provided with the distribution;
22 // neither the name of the copyright holders nor the names of its
23 // contributors may be used to endorse or promote products derived from
24 // this software without specific prior written permission.
26 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39 SPAlignmentCheckCode = '''
40 if (baseIsSP && bits(XBase, 3, 0) &&
41 SPAlignmentCheckEnabled(xc->tcBase())) {
42 return std::make_shared<SPAlignmentFault>();
47 def template Load64Execute {{
48 Fault %(class_name)s::execute(ExecContext *xc,
49 Trace::InstRecord *traceData) const
52 Fault fault = NoFault;
58 if (fault == NoFault) {
59 fault = readMemAtomicLE(xc, traceData, EA, Mem, memAccessFlags);
63 if (fault == NoFault) {
71 def template Load64FpExecute {{
72 Fault %(class_name)s::execute(ExecContext *xc,
73 Trace::InstRecord *traceData) const
76 Fault fault = NoFault;
82 if (fault == NoFault) {
83 fault = readMemAtomicLE(xc, traceData, EA, Mem, memAccessFlags);
86 if (fault == NoFault) {
95 def template Store64Execute {{
96 Fault %(class_name)s::execute(ExecContext *xc,
97 Trace::InstRecord *traceData) const
100 Fault fault = NoFault;
106 if (fault == NoFault) {
110 if (fault == NoFault) {
111 fault = writeMemAtomicLE(xc, traceData, Mem, EA,
112 memAccessFlags, NULL);
115 if (fault == NoFault) {
123 def template Store64InitiateAcc {{
124 Fault %(class_name)s::initiateAcc(ExecContext *xc,
125 Trace::InstRecord *traceData) const
128 Fault fault = NoFault;
134 if (fault == NoFault) {
138 if (fault == NoFault) {
139 fault = writeMemTimingLE(xc, traceData, Mem, EA, memAccessFlags,
147 def template StoreEx64Execute {{
148 Fault %(class_name)s::execute(ExecContext *xc,
149 Trace::InstRecord *traceData) const
152 Fault fault = NoFault;
158 if (fault == NoFault) {
162 uint64_t writeResult = 0;
163 if (fault == NoFault) {
164 fault = writeMemAtomicLE(xc, traceData, Mem, EA, memAccessFlags,
168 if (fault == NoFault) {
172 if (fault == NoFault) {
180 def template StoreEx64InitiateAcc {{
181 Fault %(class_name)s::initiateAcc(ExecContext *xc,
182 Trace::InstRecord *traceData) const
185 Fault fault = NoFault;
191 if (fault == NoFault) {
195 if (fault == NoFault) {
196 fault = writeMemTimingLE(xc, traceData, Mem, EA, memAccessFlags,
204 def template Load64InitiateAcc {{
205 Fault %(class_name)s::initiateAcc(ExecContext *xc,
206 Trace::InstRecord *traceData) const
209 Fault fault = NoFault;
215 if (fault == NoFault) {
216 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
223 def template Load64CompleteAcc {{
224 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
225 Trace::InstRecord *traceData) const
227 Fault fault = NoFault;
232 // ARM instructions will not have a pkt if the predicate is false
233 getMemLE(pkt, Mem, traceData);
235 if (fault == NoFault) {
239 if (fault == NoFault) {
247 def template Store64CompleteAcc {{
248 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
249 Trace::InstRecord *traceData) const
255 def template StoreEx64CompleteAcc {{
256 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
257 Trace::InstRecord *traceData) const
259 Fault fault = NoFault;
264 uint64_t writeResult = pkt->req->getExtraData();
267 if (fault == NoFault) {
275 def template DCStore64Declare {{
276 class %(class_name)s : public %(base_class)s
281 %(class_name)s(ExtMachInst machInst, IntRegIndex _base,
282 MiscRegIndex _dest, uint64_t _imm);
284 Fault execute(ExecContext *, Trace::InstRecord *) const override;
285 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
286 Fault completeAcc(PacketPtr, ExecContext *,
287 Trace::InstRecord *) const override;
290 annotateFault(ArmISA::ArmFault *fault) override
297 def template DCStore64Constructor {{
298 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base,
299 MiscRegIndex _dest, uint64_t _imm)
300 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
304 assert(!%(use_uops)d);
308 def template DCStore64Execute {{
309 Fault %(class_name)s::execute(ExecContext *xc,
310 Trace::InstRecord *traceData) const
313 Fault fault = NoFault;
320 if (fault == NoFault) {
324 if (fault == NoFault) {
325 fault = writeMemAtomic(xc, NULL, EA,
326 op_size, memAccessFlags, NULL,
327 std::vector<bool>(op_size, true));
330 if (fault == NoFault) {
338 def template DCStore64InitiateAcc {{
339 Fault %(class_name)s::initiateAcc(ExecContext *xc,
340 Trace::InstRecord *traceData) const
343 Fault fault = NoFault;
349 if (fault == NoFault) {
353 if (fault == NoFault) {
354 fault = writeMemTiming(xc, NULL, EA, op_size,
355 memAccessFlags, NULL,
356 std::vector<bool>(op_size, true));
364 def template LoadStoreImm64Declare {{
365 class %(class_name)s : public %(base_class)s
370 %(class_name)s(ExtMachInst machInst,
371 IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
373 Fault execute(ExecContext *, Trace::InstRecord *) const override;
374 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
375 Fault completeAcc(PacketPtr, ExecContext *,
376 Trace::InstRecord *) const override;
379 annotateFault(ArmISA::ArmFault *fault) override
386 def template LoadStoreImmU64Declare {{
387 class %(class_name)s : public %(base_class)s
392 %(class_name)s(ExtMachInst machInst,
393 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
394 bool noAlloc = false, bool exclusive = false,
397 Fault execute(ExecContext *, Trace::InstRecord *) const override;
398 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
399 Fault completeAcc(PacketPtr, ExecContext *,
400 Trace::InstRecord *) const override;
403 annotateFault(ArmISA::ArmFault *fault) override
410 def template LoadStoreImmDU64Declare {{
411 class %(class_name)s : public %(base_class)s
416 %(class_name)s(ExtMachInst machInst,
417 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
418 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
421 Fault execute(ExecContext *, Trace::InstRecord *) const override;
422 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
423 Fault completeAcc(PacketPtr, ExecContext *,
424 Trace::InstRecord *) const override;
427 annotateFault(ArmISA::ArmFault *fault) override
434 def template StoreImmDEx64Declare {{
436 * Static instruction class for "%(mnemonic)s".
438 class %(class_name)s : public %(base_class)s
443 %(class_name)s(ExtMachInst machInst,
444 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
445 IntRegIndex _base, int64_t _imm = 0);
447 Fault execute(ExecContext *, Trace::InstRecord *) const override;
448 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
449 Fault completeAcc(PacketPtr, ExecContext *,
450 Trace::InstRecord *) const override;
455 def template LoadStoreReg64Declare {{
456 class %(class_name)s : public %(base_class)s
461 %(class_name)s(ExtMachInst machInst,
462 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
463 ArmExtendType _type, uint32_t _shiftAmt);
465 Fault execute(ExecContext *, Trace::InstRecord *) const override;
466 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
467 Fault completeAcc(PacketPtr, ExecContext *,
468 Trace::InstRecord *) const override;
471 annotateFault(ArmISA::ArmFault *fault) override
478 def template LoadStoreRegU64Declare {{
479 class %(class_name)s : public %(base_class)s
484 %(class_name)s(ExtMachInst machInst,
485 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
486 ArmExtendType _type, uint32_t _shiftAmt,
487 bool noAlloc = false, bool exclusive = false,
490 Fault execute(ExecContext *, Trace::InstRecord *) const override;
491 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
492 Fault completeAcc(PacketPtr, ExecContext *,
493 Trace::InstRecord *) const override;
496 annotateFault(ArmISA::ArmFault *fault) override
503 def template LoadStoreRaw64Declare {{
504 class %(class_name)s : public %(base_class)s
509 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
512 Fault execute(ExecContext *, Trace::InstRecord *) const override;
513 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
514 Fault completeAcc(PacketPtr, ExecContext *,
515 Trace::InstRecord *) const override;
518 annotateFault(ArmISA::ArmFault *fault) override
525 def template LoadStoreEx64Declare {{
526 class %(class_name)s : public %(base_class)s
531 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
532 IntRegIndex _base, IntRegIndex _result);
534 Fault execute(ExecContext *, Trace::InstRecord *) const override;
535 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
536 Fault completeAcc(PacketPtr, ExecContext *,
537 Trace::InstRecord *) const override;
540 annotateFault(ArmISA::ArmFault *fault) override
547 def template LoadStoreLit64Declare {{
548 class %(class_name)s : public %(base_class)s
553 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
555 Fault execute(ExecContext *, Trace::InstRecord *) const override;
556 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
557 Fault completeAcc(PacketPtr, ExecContext *,
558 Trace::InstRecord *) const override;
561 annotateFault(ArmISA::ArmFault *fault) override
568 def template LoadStoreLitU64Declare {{
569 class %(class_name)s : public %(base_class)s
574 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
575 bool noAlloc = false, bool exclusive = false,
578 Fault execute(ExecContext *, Trace::InstRecord *) const override;
579 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
580 Fault completeAcc(PacketPtr, ExecContext *,
581 Trace::InstRecord *) const override;
584 annotateFault(ArmISA::ArmFault *fault) override
591 def template LoadStoreImm64Constructor {{
592 %(class_name)s::%(class_name)s(ExtMachInst machInst,
593 IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
594 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
595 (IntRegIndex)_dest, (IntRegIndex)_base, _imm)
599 assert(numMicroops >= 2);
600 uops = new StaticInstPtr[numMicroops];
601 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
602 uops[0]->setDelayedCommit();
603 uops[0]->setFirstMicroop();
604 uops[1] = new %(wb_decl)s;
605 uops[1]->setLastMicroop();
610 def template LoadStoreImmU64Constructor {{
611 %(class_name)s::%(class_name)s(ExtMachInst machInst,
612 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
613 bool noAlloc, bool exclusive, bool acrel)
614 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
618 assert(!%(use_uops)d);
619 setExcAcRel(exclusive, acrel);
623 def template LoadStoreImmDU64Constructor {{
624 %(class_name)s::%(class_name)s(ExtMachInst machInst,
625 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
626 int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
627 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
628 _dest, _dest2, _base, _imm)
631 assert(!%(use_uops)d);
632 setExcAcRel(exclusive, acrel);
636 def template StoreImmDEx64Constructor {{
637 %(class_name)s::%(class_name)s(ExtMachInst machInst,
638 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
639 IntRegIndex _base, int64_t _imm)
640 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
641 _result, _dest, _dest2, _base, _imm)
644 assert(!%(use_uops)d);
649 def template LoadStoreReg64Constructor {{
650 %(class_name)s::%(class_name)s(ExtMachInst machInst,
651 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
652 ArmExtendType _type, uint32_t _shiftAmt)
653 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
654 _dest, _base, _offset, _type, _shiftAmt)
658 assert(numMicroops >= 2);
659 uops = new StaticInstPtr[numMicroops];
660 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
662 uops[0]->setDelayedCommit();
663 uops[0]->setFirstMicroop();
664 uops[1] = new %(wb_decl)s;
665 uops[1]->setLastMicroop();
670 def template LoadStoreRegU64Constructor {{
671 %(class_name)s::%(class_name)s(ExtMachInst machInst,
672 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
673 ArmExtendType _type, uint32_t _shiftAmt,
674 bool noAlloc, bool exclusive, bool acrel)
675 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
676 _dest, _base, _offset, _type, _shiftAmt)
679 assert(!%(use_uops)d);
680 setExcAcRel(exclusive, acrel);
684 def template LoadStoreRaw64Constructor {{
685 %(class_name)s::%(class_name)s(ExtMachInst machInst,
686 IntRegIndex _dest, IntRegIndex _base)
687 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
693 def template LoadStoreEx64Constructor {{
694 %(class_name)s::%(class_name)s(ExtMachInst machInst,
695 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
696 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
697 _dest, _base, _result)
703 def template LoadStoreLit64Constructor {{
704 %(class_name)s::%(class_name)s(ExtMachInst machInst,
705 IntRegIndex _dest, int64_t _imm)
706 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
707 (IntRegIndex)_dest, _imm)
711 assert(numMicroops >= 2);
712 uops = new StaticInstPtr[numMicroops];
713 uops[0] = new %(acc_name)s(machInst, _dest, _imm);
714 uops[0]->setDelayedCommit();
715 uops[0]->setFirstMicroop();
716 uops[1] = new %(wb_decl)s;
717 uops[1]->setLastMicroop();
722 def template LoadStoreLitU64Constructor {{
723 %(class_name)s::%(class_name)s(ExtMachInst machInst,
724 IntRegIndex _dest, int64_t _imm,
725 bool noAlloc, bool exclusive, bool acrel)
726 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
727 (IntRegIndex)_dest, _imm)
730 assert(!%(use_uops)d);
731 setExcAcRel(exclusive, acrel);
735 // Atomic operations in memory
737 def template AmoOpExecute {{
738 Fault %(class_name)s::execute(ExecContext *xc,
739 Trace::InstRecord *traceData) const
754 const Fault fault = amoMemAtomicLE(xc, traceData, Mem, EA,
755 memAccessFlags, amo_op);
757 if (fault == NoFault) {
766 def template AmoOpInitiateAcc {{
767 Fault %(class_name)s::initiateAcc(ExecContext *xc,
768 Trace::InstRecord *traceData) const
782 return initiateMemAMO(xc, traceData, EA, Mem, memAccessFlags,
787 def template AmoOpCompleteAcc {{
788 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
789 Trace::InstRecord *traceData) const
794 // ARM instructions will not have a pkt if the predicate is false
795 getMemLE(pkt, Mem, traceData);
806 def template AmoOpDeclare {{
807 class %(class_name)s : public %(base_class)s
812 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
813 IntRegIndex _base, IntRegIndex _result);
815 Fault execute(ExecContext *, Trace::InstRecord *) const override;
816 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
817 Fault completeAcc(PacketPtr, ExecContext *,
818 Trace::InstRecord *) const override;
821 annotateFault(ArmISA::ArmFault *fault) override
829 def template AmoOpConstructor {{
830 %(class_name)s::%(class_name)s(ExtMachInst machInst,
831 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
832 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
833 _dest, _base, _result)
836 flags[IsStore] = false;
837 flags[IsLoad] = false;
841 def template AmoPairOpDeclare {{
842 class %(class_name)s : public %(base_class)s
849 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
850 IntRegIndex _base, IntRegIndex _result);
852 Fault execute(ExecContext *, Trace::InstRecord *) const override;
853 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
854 Fault completeAcc(PacketPtr, ExecContext *,
855 Trace::InstRecord *) const override;
858 annotateFault(ArmISA::ArmFault *fault) override
866 def template AmoPairOpConstructor {{
867 %(class_name)s::%(class_name)s(ExtMachInst machInst,
868 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
869 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
870 _dest, _base, _result)
874 uint32_t d2 = RegId(IntRegClass, dest).index() + 1 ;
875 uint32_t r2 = RegId(IntRegClass, result).index() + 1 ;
877 d2_src = _numSrcRegs ;
878 setSrcRegIdx(_numSrcRegs++, RegId(IntRegClass, d2));
879 r2_src = _numSrcRegs ;
880 setSrcRegIdx(_numSrcRegs++, RegId(IntRegClass, r2));
881 r2_dst = _numDestRegs ;
882 setDestRegIdx(_numDestRegs++, RegId(IntRegClass, r2));
883 flags[IsStore] = false;
884 flags[IsLoad] = false;
888 def template AmoArithmeticOpDeclare {{
889 class %(class_name)s : public %(base_class)s
894 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
895 IntRegIndex _base, IntRegIndex _result);
897 Fault execute(ExecContext *, Trace::InstRecord *) const override;
898 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
899 Fault completeAcc(PacketPtr, ExecContext *,
900 Trace::InstRecord *) const override;
903 annotateFault(ArmISA::ArmFault *fault) override
910 def template AmoArithmeticOpConstructor {{
911 %(class_name)s::%(class_name)s(ExtMachInst machInst,
912 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
913 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
914 _dest, _base, _result)
918 uint32_t r2 = RegId(IntRegClass, dest).index() ;
919 flags[IsStore] = false;
920 flags[IsLoad] = false;