3 // Copyright (c) 2011-2013 ARM Limited
6 // The license below extends only to copyright in the software and shall
7 // not be construed as granting a license to any other intellectual
8 // property including but not limited to intellectual property relating
9 // to a hardware implementation of the functionality of the software
10 // licensed hereunder. You may use the software subject to the license
11 // terms below provided that you ensure that this notice is replicated
12 // unmodified and in its entirety in all distributions of the software,
13 // modified or unmodified, in source code or in binary form.
15 // Redistribution and use in source and binary forms, with or without
16 // modification, are permitted provided that the following conditions are
17 // met: redistributions of source code must retain the above copyright
18 // notice, this list of conditions and the following disclaimer;
19 // redistributions in binary form must reproduce the above copyright
20 // notice, this list of conditions and the following disclaimer in the
21 // documentation and/or other materials provided with the distribution;
22 // neither the name of the copyright holders nor the names of its
23 // contributors may be used to endorse or promote products derived from
24 // this software without specific prior written permission.
26 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
38 // Authors: Gabe Black
41 SPAlignmentCheckCode = '''
42 if (baseIsSP && bits(XBase, 3, 0) &&
43 SPAlignmentCheckEnabled(xc->tcBase())) {
44 return new SPAlignmentFault();
49 def template Load64Execute {{
50 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
51 Trace::InstRecord *traceData) const
54 Fault fault = NoFault;
60 if (fault == NoFault) {
61 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
65 if (fault == NoFault) {
73 def template Store64Execute {{
74 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
75 Trace::InstRecord *traceData) const
78 Fault fault = NoFault;
84 if (fault == NoFault) {
88 if (fault == NoFault) {
89 fault = writeMemAtomic(xc, traceData, Mem, EA,
90 memAccessFlags, NULL);
93 if (fault == NoFault) {
101 def template Store64InitiateAcc {{
102 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc,
103 Trace::InstRecord *traceData) const
106 Fault fault = NoFault;
112 if (fault == NoFault) {
116 if (fault == NoFault) {
117 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
125 def template StoreEx64Execute {{
126 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
127 Trace::InstRecord *traceData) const
130 Fault fault = NoFault;
136 if (fault == NoFault) {
140 uint64_t writeResult = 0;
141 if (fault == NoFault) {
142 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
146 if (fault == NoFault) {
150 if (fault == NoFault) {
158 def template StoreEx64InitiateAcc {{
159 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc,
160 Trace::InstRecord *traceData) const
163 Fault fault = NoFault;
169 if (fault == NoFault) {
173 if (fault == NoFault) {
174 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
182 def template Load64InitiateAcc {{
183 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc,
184 Trace::InstRecord *traceData) const
187 Fault fault = NoFault;
193 if (fault == NoFault) {
194 fault = readMemTiming(xc, traceData, EA, Mem, memAccessFlags);
201 def template Load64CompleteAcc {{
202 Fault %(class_name)s::completeAcc(PacketPtr pkt,
203 CPU_EXEC_CONTEXT *xc,
204 Trace::InstRecord *traceData) const
206 Fault fault = NoFault;
211 // ARM instructions will not have a pkt if the predicate is false
212 getMem(pkt, Mem, traceData);
214 if (fault == NoFault) {
218 if (fault == NoFault) {
226 def template Store64CompleteAcc {{
227 Fault %(class_name)s::completeAcc(PacketPtr pkt,
228 CPU_EXEC_CONTEXT *xc,
229 Trace::InstRecord *traceData) const
235 def template StoreEx64CompleteAcc {{
236 Fault %(class_name)s::completeAcc(PacketPtr pkt,
237 CPU_EXEC_CONTEXT *xc,
238 Trace::InstRecord *traceData) const
240 Fault fault = NoFault;
245 uint64_t writeResult = pkt->req->getExtraData();
248 if (fault == NoFault) {
256 def template DCStore64Declare {{
257 class %(class_name)s : public %(base_class)s
262 %(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm);
265 %(InitiateAccDeclare)s
266 %(CompleteAccDeclare)s
269 annotateFault(ArmFault *fault) {
275 def template DCStore64Constructor {{
276 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm)
277 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
278 (IntRegIndex)_base, _dest, _imm)
281 assert(!%(use_uops)d);
285 def template DCStore64Execute {{
286 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
287 Trace::InstRecord *traceData) const
290 Fault fault = NoFault;
297 if (fault == NoFault) {
301 if (fault == NoFault) {
302 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
305 if (fault == NoFault) {
313 def template DCStore64InitiateAcc {{
314 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc,
315 Trace::InstRecord *traceData) const
318 Fault fault = NoFault;
324 if (fault == NoFault) {
328 if (fault == NoFault) {
329 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
337 def template LoadStoreImm64Declare {{
338 class %(class_name)s : public %(base_class)s
343 %(class_name)s(ExtMachInst machInst,
344 IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
347 %(InitiateAccDeclare)s
348 %(CompleteAccDeclare)s
351 annotateFault(ArmFault *fault) {
357 def template LoadStoreImmU64Declare {{
358 class %(class_name)s : public %(base_class)s
363 %(class_name)s(ExtMachInst machInst,
364 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
365 bool noAlloc = false, bool exclusive = false,
369 %(InitiateAccDeclare)s
370 %(CompleteAccDeclare)s
373 annotateFault(ArmFault *fault) {
379 def template LoadStoreImmDU64Declare {{
380 class %(class_name)s : public %(base_class)s
385 %(class_name)s(ExtMachInst machInst,
386 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
387 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
391 %(InitiateAccDeclare)s
392 %(CompleteAccDeclare)s
395 annotateFault(ArmFault *fault) {
401 def template StoreImmDEx64Declare {{
403 * Static instruction class for "%(mnemonic)s".
405 class %(class_name)s : public %(base_class)s
410 %(class_name)s(ExtMachInst machInst,
411 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
412 IntRegIndex _base, int64_t _imm = 0);
416 %(InitiateAccDeclare)s
418 %(CompleteAccDeclare)s
423 def template LoadStoreReg64Declare {{
424 class %(class_name)s : public %(base_class)s
429 %(class_name)s(ExtMachInst machInst,
430 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
431 ArmExtendType _type, uint32_t _shiftAmt);
434 %(InitiateAccDeclare)s
435 %(CompleteAccDeclare)s
438 annotateFault(ArmFault *fault) {
444 def template LoadStoreRegU64Declare {{
445 class %(class_name)s : public %(base_class)s
450 %(class_name)s(ExtMachInst machInst,
451 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
452 ArmExtendType _type, uint32_t _shiftAmt,
453 bool noAlloc = false, bool exclusive = false,
457 %(InitiateAccDeclare)s
458 %(CompleteAccDeclare)s
461 annotateFault(ArmFault *fault) {
467 def template LoadStoreRaw64Declare {{
468 class %(class_name)s : public %(base_class)s
473 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
477 %(InitiateAccDeclare)s
478 %(CompleteAccDeclare)s
481 annotateFault(ArmFault *fault) {
487 def template LoadStoreEx64Declare {{
488 class %(class_name)s : public %(base_class)s
493 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
494 IntRegIndex _base, IntRegIndex _result);
497 %(InitiateAccDeclare)s
498 %(CompleteAccDeclare)s
501 annotateFault(ArmFault *fault) {
507 def template LoadStoreLit64Declare {{
508 class %(class_name)s : public %(base_class)s
513 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
516 %(InitiateAccDeclare)s
517 %(CompleteAccDeclare)s
520 annotateFault(ArmFault *fault) {
526 def template LoadStoreLitU64Declare {{
527 class %(class_name)s : public %(base_class)s
532 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
533 bool noAlloc = false, bool exclusive = false,
537 %(InitiateAccDeclare)s
538 %(CompleteAccDeclare)s
541 annotateFault(ArmFault *fault) {
547 def template LoadStoreImm64Constructor {{
548 %(class_name)s::%(class_name)s(ExtMachInst machInst,
549 IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
550 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
551 (IntRegIndex)_dest, (IntRegIndex)_base, _imm)
555 assert(numMicroops >= 2);
556 uops = new StaticInstPtr[numMicroops];
557 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
558 uops[0]->setDelayedCommit();
559 uops[1] = new %(wb_decl)s;
560 uops[1]->setLastMicroop();
565 def template LoadStoreImmU64Constructor {{
566 %(class_name)s::%(class_name)s(ExtMachInst machInst,
567 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
568 bool noAlloc, bool exclusive, bool acrel)
569 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
573 assert(!%(use_uops)d);
574 setExcAcRel(exclusive, acrel);
578 def template LoadStoreImmDU64Constructor {{
579 %(class_name)s::%(class_name)s(ExtMachInst machInst,
580 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
581 int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
582 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
583 _dest, _dest2, _base, _imm)
586 assert(!%(use_uops)d);
587 setExcAcRel(exclusive, acrel);
591 def template StoreImmDEx64Constructor {{
592 %(class_name)s::%(class_name)s(ExtMachInst machInst,
593 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
594 IntRegIndex _base, int64_t _imm)
595 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
596 _result, _dest, _dest2, _base, _imm)
599 assert(!%(use_uops)d);
604 def template LoadStoreReg64Constructor {{
605 %(class_name)s::%(class_name)s(ExtMachInst machInst,
606 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
607 ArmExtendType _type, uint32_t _shiftAmt)
608 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
609 _dest, _base, _offset, _type, _shiftAmt)
613 assert(numMicroops >= 2);
614 uops = new StaticInstPtr[numMicroops];
615 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
617 uops[0]->setDelayedCommit();
618 uops[1] = new %(wb_decl)s;
619 uops[1]->setLastMicroop();
624 def template LoadStoreRegU64Constructor {{
625 %(class_name)s::%(class_name)s(ExtMachInst machInst,
626 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
627 ArmExtendType _type, uint32_t _shiftAmt,
628 bool noAlloc, bool exclusive, bool acrel)
629 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
630 _dest, _base, _offset, _type, _shiftAmt)
633 assert(!%(use_uops)d);
634 setExcAcRel(exclusive, acrel);
638 def template LoadStoreRaw64Constructor {{
639 %(class_name)s::%(class_name)s(ExtMachInst machInst,
640 IntRegIndex _dest, IntRegIndex _base)
641 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
647 def template LoadStoreEx64Constructor {{
648 %(class_name)s::%(class_name)s(ExtMachInst machInst,
649 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
650 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
651 _dest, _base, _result)
657 def template LoadStoreLit64Constructor {{
658 %(class_name)s::%(class_name)s(ExtMachInst machInst,
659 IntRegIndex _dest, int64_t _imm)
660 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
661 (IntRegIndex)_dest, _imm)
665 assert(numMicroops >= 2);
666 uops = new StaticInstPtr[numMicroops];
667 uops[0] = new %(acc_name)s(machInst, _dest, _imm);
668 uops[0]->setDelayedCommit();
669 uops[1] = new %(wb_decl)s;
670 uops[1]->setLastMicroop();
675 def template LoadStoreLitU64Constructor {{
676 %(class_name)s::%(class_name)s(ExtMachInst machInst,
677 IntRegIndex _dest, int64_t _imm,
678 bool noAlloc, bool exclusive, bool acrel)
679 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
680 (IntRegIndex)_dest, _imm)
683 assert(!%(use_uops)d);
684 setExcAcRel(exclusive, acrel);