arch: Use shared_ptr for all Faults
[gem5.git] / src / arch / arm / isa / templates / mem64.isa
1 // -*- mode:c++ -*-
2
3 // Copyright (c) 2011-2013 ARM Limited
4 // All rights reserved
5 //
6 // The license below extends only to copyright in the software and shall
7 // not be construed as granting a license to any other intellectual
8 // property including but not limited to intellectual property relating
9 // to a hardware implementation of the functionality of the software
10 // licensed hereunder. You may use the software subject to the license
11 // terms below provided that you ensure that this notice is replicated
12 // unmodified and in its entirety in all distributions of the software,
13 // modified or unmodified, in source code or in binary form.
14 //
15 // Redistribution and use in source and binary forms, with or without
16 // modification, are permitted provided that the following conditions are
17 // met: redistributions of source code must retain the above copyright
18 // notice, this list of conditions and the following disclaimer;
19 // redistributions in binary form must reproduce the above copyright
20 // notice, this list of conditions and the following disclaimer in the
21 // documentation and/or other materials provided with the distribution;
22 // neither the name of the copyright holders nor the names of its
23 // contributors may be used to endorse or promote products derived from
24 // this software without specific prior written permission.
25 //
26 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37 //
38 // Authors: Gabe Black
39
40 let {{
41 SPAlignmentCheckCode = '''
42 if (baseIsSP && bits(XBase, 3, 0) &&
43 SPAlignmentCheckEnabled(xc->tcBase())) {
44 return std::make_shared<SPAlignmentFault>();
45 }
46 '''
47 }};
48
49 def template Load64Execute {{
50 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
51 Trace::InstRecord *traceData) const
52 {
53 Addr EA;
54 Fault fault = NoFault;
55
56 %(op_decl)s;
57 %(op_rd)s;
58 %(ea_code)s;
59
60 if (fault == NoFault) {
61 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
62 %(memacc_code)s;
63 }
64
65 if (fault == NoFault) {
66 %(op_wb)s;
67 }
68
69 return fault;
70 }
71 }};
72
73 def template Store64Execute {{
74 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
75 Trace::InstRecord *traceData) const
76 {
77 Addr EA;
78 Fault fault = NoFault;
79
80 %(op_decl)s;
81 %(op_rd)s;
82 %(ea_code)s;
83
84 if (fault == NoFault) {
85 %(memacc_code)s;
86 }
87
88 if (fault == NoFault) {
89 fault = writeMemAtomic(xc, traceData, Mem, EA,
90 memAccessFlags, NULL);
91 }
92
93 if (fault == NoFault) {
94 %(op_wb)s;
95 }
96
97 return fault;
98 }
99 }};
100
101 def template Store64InitiateAcc {{
102 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc,
103 Trace::InstRecord *traceData) const
104 {
105 Addr EA;
106 Fault fault = NoFault;
107
108 %(op_decl)s;
109 %(op_rd)s;
110 %(ea_code)s;
111
112 if (fault == NoFault) {
113 %(memacc_code)s;
114 }
115
116 if (fault == NoFault) {
117 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
118 NULL);
119 }
120
121 return fault;
122 }
123 }};
124
125 def template StoreEx64Execute {{
126 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
127 Trace::InstRecord *traceData) const
128 {
129 Addr EA;
130 Fault fault = NoFault;
131
132 %(op_decl)s;
133 %(op_rd)s;
134 %(ea_code)s;
135
136 if (fault == NoFault) {
137 %(memacc_code)s;
138 }
139
140 uint64_t writeResult = 0;
141 if (fault == NoFault) {
142 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
143 &writeResult);
144 }
145
146 if (fault == NoFault) {
147 %(postacc_code)s;
148 }
149
150 if (fault == NoFault) {
151 %(op_wb)s;
152 }
153
154 return fault;
155 }
156 }};
157
158 def template StoreEx64InitiateAcc {{
159 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc,
160 Trace::InstRecord *traceData) const
161 {
162 Addr EA;
163 Fault fault = NoFault;
164
165 %(op_decl)s;
166 %(op_rd)s;
167 %(ea_code)s;
168
169 if (fault == NoFault) {
170 %(memacc_code)s;
171 }
172
173 if (fault == NoFault) {
174 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
175 NULL);
176 }
177
178 return fault;
179 }
180 }};
181
182 def template Load64InitiateAcc {{
183 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc,
184 Trace::InstRecord *traceData) const
185 {
186 Addr EA;
187 Fault fault = NoFault;
188
189 %(op_src_decl)s;
190 %(op_rd)s;
191 %(ea_code)s;
192
193 if (fault == NoFault) {
194 fault = readMemTiming(xc, traceData, EA, Mem, memAccessFlags);
195 }
196
197 return fault;
198 }
199 }};
200
201 def template Load64CompleteAcc {{
202 Fault %(class_name)s::completeAcc(PacketPtr pkt,
203 CPU_EXEC_CONTEXT *xc,
204 Trace::InstRecord *traceData) const
205 {
206 Fault fault = NoFault;
207
208 %(op_decl)s;
209 %(op_rd)s;
210
211 // ARM instructions will not have a pkt if the predicate is false
212 getMem(pkt, Mem, traceData);
213
214 if (fault == NoFault) {
215 %(memacc_code)s;
216 }
217
218 if (fault == NoFault) {
219 %(op_wb)s;
220 }
221
222 return fault;
223 }
224 }};
225
226 def template Store64CompleteAcc {{
227 Fault %(class_name)s::completeAcc(PacketPtr pkt,
228 CPU_EXEC_CONTEXT *xc,
229 Trace::InstRecord *traceData) const
230 {
231 return NoFault;
232 }
233 }};
234
235 def template StoreEx64CompleteAcc {{
236 Fault %(class_name)s::completeAcc(PacketPtr pkt,
237 CPU_EXEC_CONTEXT *xc,
238 Trace::InstRecord *traceData) const
239 {
240 Fault fault = NoFault;
241
242 %(op_decl)s;
243 %(op_rd)s;
244
245 uint64_t writeResult = pkt->req->getExtraData();
246 %(postacc_code)s;
247
248 if (fault == NoFault) {
249 %(op_wb)s;
250 }
251
252 return fault;
253 }
254 }};
255
256 def template DCStore64Declare {{
257 class %(class_name)s : public %(base_class)s
258 {
259 public:
260
261 /// Constructor.
262 %(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm);
263
264 %(BasicExecDeclare)s
265 %(InitiateAccDeclare)s
266 %(CompleteAccDeclare)s
267
268 virtual void
269 annotateFault(ArmFault *fault) {
270 %(fa_code)s
271 }
272 };
273 }};
274
275 def template DCStore64Constructor {{
276 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm)
277 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
278 (IntRegIndex)_base, _dest, _imm)
279 {
280 %(constructor)s;
281 assert(!%(use_uops)d);
282 }
283 }};
284
285 def template DCStore64Execute {{
286 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
287 Trace::InstRecord *traceData) const
288 {
289 Addr EA;
290 Fault fault = NoFault;
291
292 %(op_decl)s;
293 %(op_rd)s;
294 %(ea_code)s;
295
296
297 if (fault == NoFault) {
298 %(memacc_code)s;
299 }
300
301 if (fault == NoFault) {
302 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
303 }
304
305 if (fault == NoFault) {
306 %(op_wb)s;
307 }
308
309 return fault;
310 }
311 }};
312
313 def template DCStore64InitiateAcc {{
314 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc,
315 Trace::InstRecord *traceData) const
316 {
317 Addr EA;
318 Fault fault = NoFault;
319
320 %(op_decl)s;
321 %(op_rd)s;
322 %(ea_code)s;
323
324 if (fault == NoFault) {
325 %(memacc_code)s;
326 }
327
328 if (fault == NoFault) {
329 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
330 }
331
332 return fault;
333 }
334 }};
335
336
337 def template LoadStoreImm64Declare {{
338 class %(class_name)s : public %(base_class)s
339 {
340 public:
341
342 /// Constructor.
343 %(class_name)s(ExtMachInst machInst,
344 IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
345
346 %(BasicExecDeclare)s
347 %(InitiateAccDeclare)s
348 %(CompleteAccDeclare)s
349
350 virtual void
351 annotateFault(ArmFault *fault) {
352 %(fa_code)s
353 }
354 };
355 }};
356
357 def template LoadStoreImmU64Declare {{
358 class %(class_name)s : public %(base_class)s
359 {
360 public:
361
362 /// Constructor.
363 %(class_name)s(ExtMachInst machInst,
364 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
365 bool noAlloc = false, bool exclusive = false,
366 bool acrel = false);
367
368 %(BasicExecDeclare)s
369 %(InitiateAccDeclare)s
370 %(CompleteAccDeclare)s
371
372 virtual void
373 annotateFault(ArmFault *fault) {
374 %(fa_code)s
375 }
376 };
377 }};
378
379 def template LoadStoreImmDU64Declare {{
380 class %(class_name)s : public %(base_class)s
381 {
382 public:
383
384 /// Constructor.
385 %(class_name)s(ExtMachInst machInst,
386 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
387 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
388 bool acrel = false);
389
390 %(BasicExecDeclare)s
391 %(InitiateAccDeclare)s
392 %(CompleteAccDeclare)s
393
394 virtual void
395 annotateFault(ArmFault *fault) {
396 %(fa_code)s
397 }
398 };
399 }};
400
401 def template StoreImmDEx64Declare {{
402 /**
403 * Static instruction class for "%(mnemonic)s".
404 */
405 class %(class_name)s : public %(base_class)s
406 {
407 public:
408
409 /// Constructor.
410 %(class_name)s(ExtMachInst machInst,
411 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
412 IntRegIndex _base, int64_t _imm = 0);
413
414 %(BasicExecDeclare)s
415
416 %(InitiateAccDeclare)s
417
418 %(CompleteAccDeclare)s
419 };
420 }};
421
422
423 def template LoadStoreReg64Declare {{
424 class %(class_name)s : public %(base_class)s
425 {
426 public:
427
428 /// Constructor.
429 %(class_name)s(ExtMachInst machInst,
430 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
431 ArmExtendType _type, uint32_t _shiftAmt);
432
433 %(BasicExecDeclare)s
434 %(InitiateAccDeclare)s
435 %(CompleteAccDeclare)s
436
437 virtual void
438 annotateFault(ArmFault *fault) {
439 %(fa_code)s
440 }
441 };
442 }};
443
444 def template LoadStoreRegU64Declare {{
445 class %(class_name)s : public %(base_class)s
446 {
447 public:
448
449 /// Constructor.
450 %(class_name)s(ExtMachInst machInst,
451 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
452 ArmExtendType _type, uint32_t _shiftAmt,
453 bool noAlloc = false, bool exclusive = false,
454 bool acrel = false);
455
456 %(BasicExecDeclare)s
457 %(InitiateAccDeclare)s
458 %(CompleteAccDeclare)s
459
460 virtual void
461 annotateFault(ArmFault *fault) {
462 %(fa_code)s
463 }
464 };
465 }};
466
467 def template LoadStoreRaw64Declare {{
468 class %(class_name)s : public %(base_class)s
469 {
470 public:
471
472 /// Constructor.
473 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
474 IntRegIndex _base);
475
476 %(BasicExecDeclare)s
477 %(InitiateAccDeclare)s
478 %(CompleteAccDeclare)s
479
480 virtual void
481 annotateFault(ArmFault *fault) {
482 %(fa_code)s
483 }
484 };
485 }};
486
487 def template LoadStoreEx64Declare {{
488 class %(class_name)s : public %(base_class)s
489 {
490 public:
491
492 /// Constructor.
493 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
494 IntRegIndex _base, IntRegIndex _result);
495
496 %(BasicExecDeclare)s
497 %(InitiateAccDeclare)s
498 %(CompleteAccDeclare)s
499
500 virtual void
501 annotateFault(ArmFault *fault) {
502 %(fa_code)s
503 }
504 };
505 }};
506
507 def template LoadStoreLit64Declare {{
508 class %(class_name)s : public %(base_class)s
509 {
510 public:
511
512 /// Constructor.
513 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
514
515 %(BasicExecDeclare)s
516 %(InitiateAccDeclare)s
517 %(CompleteAccDeclare)s
518
519 virtual void
520 annotateFault(ArmFault *fault) {
521 %(fa_code)s
522 }
523 };
524 }};
525
526 def template LoadStoreLitU64Declare {{
527 class %(class_name)s : public %(base_class)s
528 {
529 public:
530
531 /// Constructor.
532 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
533 bool noAlloc = false, bool exclusive = false,
534 bool acrel = false);
535
536 %(BasicExecDeclare)s
537 %(InitiateAccDeclare)s
538 %(CompleteAccDeclare)s
539
540 virtual void
541 annotateFault(ArmFault *fault) {
542 %(fa_code)s
543 }
544 };
545 }};
546
547 def template LoadStoreImm64Constructor {{
548 %(class_name)s::%(class_name)s(ExtMachInst machInst,
549 IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
550 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
551 (IntRegIndex)_dest, (IntRegIndex)_base, _imm)
552 {
553 %(constructor)s;
554 #if %(use_uops)d
555 assert(numMicroops >= 2);
556 uops = new StaticInstPtr[numMicroops];
557 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
558 uops[0]->setDelayedCommit();
559 uops[1] = new %(wb_decl)s;
560 uops[1]->setLastMicroop();
561 #endif
562 }
563 }};
564
565 def template LoadStoreImmU64Constructor {{
566 %(class_name)s::%(class_name)s(ExtMachInst machInst,
567 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
568 bool noAlloc, bool exclusive, bool acrel)
569 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
570 _dest, _base, _imm)
571 {
572 %(constructor)s;
573 assert(!%(use_uops)d);
574 setExcAcRel(exclusive, acrel);
575 }
576 }};
577
578 def template LoadStoreImmDU64Constructor {{
579 %(class_name)s::%(class_name)s(ExtMachInst machInst,
580 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
581 int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
582 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
583 _dest, _dest2, _base, _imm)
584 {
585 %(constructor)s;
586 assert(!%(use_uops)d);
587 setExcAcRel(exclusive, acrel);
588 }
589 }};
590
591 def template StoreImmDEx64Constructor {{
592 %(class_name)s::%(class_name)s(ExtMachInst machInst,
593 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
594 IntRegIndex _base, int64_t _imm)
595 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
596 _result, _dest, _dest2, _base, _imm)
597 {
598 %(constructor)s;
599 assert(!%(use_uops)d);
600 }
601 }};
602
603
604 def template LoadStoreReg64Constructor {{
605 %(class_name)s::%(class_name)s(ExtMachInst machInst,
606 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
607 ArmExtendType _type, uint32_t _shiftAmt)
608 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
609 _dest, _base, _offset, _type, _shiftAmt)
610 {
611 %(constructor)s;
612 #if %(use_uops)d
613 assert(numMicroops >= 2);
614 uops = new StaticInstPtr[numMicroops];
615 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
616 _type, _shiftAmt);
617 uops[0]->setDelayedCommit();
618 uops[1] = new %(wb_decl)s;
619 uops[1]->setLastMicroop();
620 #endif
621 }
622 }};
623
624 def template LoadStoreRegU64Constructor {{
625 %(class_name)s::%(class_name)s(ExtMachInst machInst,
626 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
627 ArmExtendType _type, uint32_t _shiftAmt,
628 bool noAlloc, bool exclusive, bool acrel)
629 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
630 _dest, _base, _offset, _type, _shiftAmt)
631 {
632 %(constructor)s;
633 assert(!%(use_uops)d);
634 setExcAcRel(exclusive, acrel);
635 }
636 }};
637
638 def template LoadStoreRaw64Constructor {{
639 %(class_name)s::%(class_name)s(ExtMachInst machInst,
640 IntRegIndex _dest, IntRegIndex _base)
641 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
642 {
643 %(constructor)s;
644 }
645 }};
646
647 def template LoadStoreEx64Constructor {{
648 %(class_name)s::%(class_name)s(ExtMachInst machInst,
649 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
650 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
651 _dest, _base, _result)
652 {
653 %(constructor)s;
654 }
655 }};
656
657 def template LoadStoreLit64Constructor {{
658 %(class_name)s::%(class_name)s(ExtMachInst machInst,
659 IntRegIndex _dest, int64_t _imm)
660 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
661 (IntRegIndex)_dest, _imm)
662 {
663 %(constructor)s;
664 #if %(use_uops)d
665 assert(numMicroops >= 2);
666 uops = new StaticInstPtr[numMicroops];
667 uops[0] = new %(acc_name)s(machInst, _dest, _imm);
668 uops[0]->setDelayedCommit();
669 uops[1] = new %(wb_decl)s;
670 uops[1]->setLastMicroop();
671 #endif
672 }
673 }};
674
675 def template LoadStoreLitU64Constructor {{
676 %(class_name)s::%(class_name)s(ExtMachInst machInst,
677 IntRegIndex _dest, int64_t _imm,
678 bool noAlloc, bool exclusive, bool acrel)
679 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
680 (IntRegIndex)_dest, _imm)
681 {
682 %(constructor)s;
683 assert(!%(use_uops)d);
684 setExcAcRel(exclusive, acrel);
685 }
686 }};