arch-arm: Add first-/non-faulting load instructions
[gem5.git] / src / arch / arm / faults.hh
1 /*
2 * Copyright (c) 2010, 2012-2013, 2016-2018 ARM Limited
3 * All rights reserved
4 *
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software
9 * licensed hereunder. You may use the software subject to the license
10 * terms below provided that you ensure that this notice is replicated
11 * unmodified and in its entirety in all distributions of the software,
12 * modified or unmodified, in source code or in binary form.
13 *
14 * Copyright (c) 2003-2005 The Regents of The University of Michigan
15 * Copyright (c) 2007-2008 The Florida State University
16 * All rights reserved.
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions are
20 * met: redistributions of source code must retain the above copyright
21 * notice, this list of conditions and the following disclaimer;
22 * redistributions in binary form must reproduce the above copyright
23 * notice, this list of conditions and the following disclaimer in the
24 * documentation and/or other materials provided with the distribution;
25 * neither the name of the copyright holders nor the names of its
26 * contributors may be used to endorse or promote products derived from
27 * this software without specific prior written permission.
28 *
29 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
30 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
31 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
32 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
33 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
34 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
40 *
41 * Authors: Ali Saidi
42 * Gabe Black
43 * Giacomo Gabrielli
44 * Thomas Grocutt
45 */
46
47 #ifndef __ARM_FAULTS_HH__
48 #define __ARM_FAULTS_HH__
49
50 #include "arch/arm/miscregs.hh"
51 #include "arch/arm/pagetable.hh"
52 #include "arch/arm/types.hh"
53 #include "base/logging.hh"
54 #include "sim/faults.hh"
55 #include "sim/full_system.hh"
56
57 // The design of the "name" and "vect" functions is in sim/faults.hh
58
59 namespace ArmISA
60 {
61 typedef Addr FaultOffset;
62
63 class ArmStaticInst;
64
65 class ArmFault : public FaultBase
66 {
67 protected:
68 ExtMachInst machInst;
69 uint32_t issRaw;
70
71 // Helper variables for ARMv8 exception handling
72 bool from64; // True if the exception is generated from the AArch64 state
73 bool to64; // True if the exception is taken in AArch64 state
74 ExceptionLevel fromEL; // Source exception level
75 ExceptionLevel toEL; // Target exception level
76 OperatingMode fromMode; // Source operating mode (aarch32)
77 OperatingMode toMode; // Next operating mode (aarch32)
78
79 // This variable is true if the above fault specific informations
80 // have been updated. This is to prevent that a client is using their
81 // un-updated default constructed value.
82 bool faultUpdated;
83
84 bool hypRouted; // True if the fault has been routed to Hypervisor
85
86 virtual Addr getVector(ThreadContext *tc);
87 Addr getVector64(ThreadContext *tc);
88
89 public:
90 /// Generic fault source enums used to index into
91 /// {short/long/aarch64}DescFaultSources[] to get the actual encodings based
92 /// on the current register width state and the translation table format in
93 /// use
94 enum FaultSource
95 {
96 AlignmentFault = 0,
97 InstructionCacheMaintenance, // Short-desc. format only
98 SynchExtAbtOnTranslTableWalkLL,
99 SynchPtyErrOnTranslTableWalkLL = SynchExtAbtOnTranslTableWalkLL + 4,
100 TranslationLL = SynchPtyErrOnTranslTableWalkLL + 4,
101 AccessFlagLL = TranslationLL + 4,
102 DomainLL = AccessFlagLL + 4,
103 PermissionLL = DomainLL + 4,
104 DebugEvent = PermissionLL + 4,
105 SynchronousExternalAbort,
106 TLBConflictAbort, // Requires LPAE
107 SynchPtyErrOnMemoryAccess,
108 AsynchronousExternalAbort,
109 AsynchPtyErrOnMemoryAccess,
110 AddressSizeLL, // AArch64 only
111
112 // Not real faults. These are faults to allow the translation function
113 // to inform the memory access function not to proceed for a prefetch
114 // that misses in the TLB or that targets an uncacheable address
115 PrefetchTLBMiss = AddressSizeLL + 4,
116 PrefetchUncacheable,
117
118 NumFaultSources,
119 FaultSourceInvalid = 0xff
120 };
121
122 /// Encodings of the fault sources when the short-desc. translation table
123 /// format is in use (ARM ARM Issue C B3.13.3)
124 static uint8_t shortDescFaultSources[NumFaultSources];
125 /// Encodings of the fault sources when the long-desc. translation table
126 /// format is in use (ARM ARM Issue C B3.13.3)
127 static uint8_t longDescFaultSources[NumFaultSources];
128 /// Encodings of the fault sources in AArch64 state
129 static uint8_t aarch64FaultSources[NumFaultSources];
130
131 enum AnnotationIDs
132 {
133 S1PTW, // DataAbort, PrefetchAbort: Stage 1 Page Table Walk,
134 OVA, // DataAbort, PrefetchAbort: stage 1 Virtual Address for stage 2 faults
135 SAS, // DataAbort: Syndrome Access Size
136 SSE, // DataAbort: Syndrome Sign Extend
137 SRT, // DataAbort: Syndrome Register Transfer
138
139 // AArch64 only
140 SF, // DataAbort: width of the accessed register is SixtyFour
141 AR // DataAbort: Acquire/Release semantics
142 };
143
144 enum TranMethod
145 {
146 LpaeTran,
147 VmsaTran,
148 UnknownTran
149 };
150
151 struct FaultVals
152 {
153 const FaultName name;
154
155 const FaultOffset offset;
156
157 // Offsets used for exceptions taken in AArch64 state
158 const uint16_t currELTOffset;
159 const uint16_t currELHOffset;
160 const uint16_t lowerEL64Offset;
161 const uint16_t lowerEL32Offset;
162
163 const OperatingMode nextMode;
164
165 const uint8_t armPcOffset;
166 const uint8_t thumbPcOffset;
167 // The following two values are used in place of armPcOffset and
168 // thumbPcOffset when the exception return address is saved into ELR
169 // registers (exceptions taken in HYP mode or in AArch64 state)
170 const uint8_t armPcElrOffset;
171 const uint8_t thumbPcElrOffset;
172
173 const bool hypTrappable;
174 const bool abortDisable;
175 const bool fiqDisable;
176
177 // Exception class used to appropriately set the syndrome register
178 // (exceptions taken in HYP mode or in AArch64 state)
179 const ExceptionClass ec;
180
181 FaultStat count;
182 FaultVals(const FaultName& name_, const FaultOffset& offset_,
183 const uint16_t& currELTOffset_, const uint16_t& currELHOffset_,
184 const uint16_t& lowerEL64Offset_,
185 const uint16_t& lowerEL32Offset_,
186 const OperatingMode& nextMode_, const uint8_t& armPcOffset_,
187 const uint8_t& thumbPcOffset_, const uint8_t& armPcElrOffset_,
188 const uint8_t& thumbPcElrOffset_, const bool& hypTrappable_,
189 const bool& abortDisable_, const bool& fiqDisable_,
190 const ExceptionClass& ec_)
191 : name(name_), offset(offset_), currELTOffset(currELTOffset_),
192 currELHOffset(currELHOffset_), lowerEL64Offset(lowerEL64Offset_),
193 lowerEL32Offset(lowerEL32Offset_), nextMode(nextMode_),
194 armPcOffset(armPcOffset_), thumbPcOffset(thumbPcOffset_),
195 armPcElrOffset(armPcElrOffset_), thumbPcElrOffset(thumbPcElrOffset_),
196 hypTrappable(hypTrappable_), abortDisable(abortDisable_),
197 fiqDisable(fiqDisable_), ec(ec_) {}
198 };
199
200 ArmFault(ExtMachInst _machInst = 0, uint32_t _iss = 0) :
201 machInst(_machInst), issRaw(_iss), from64(false), to64(false),
202 fromEL(EL0), toEL(EL0), fromMode(MODE_UNDEFINED),
203 faultUpdated(false), hypRouted(false) {}
204
205 // Returns the actual syndrome register to use based on the target
206 // exception level
207 MiscRegIndex getSyndromeReg64() const;
208 // Returns the actual fault address register to use based on the target
209 // exception level
210 MiscRegIndex getFaultAddrReg64() const;
211
212 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
213 StaticInst::nullStaticInstPtr) override;
214 void invoke64(ThreadContext *tc, const StaticInstPtr &inst =
215 StaticInst::nullStaticInstPtr);
216 void update(ThreadContext *tc);
217
218 ArmStaticInst *instrAnnotate(const StaticInstPtr &inst);
219 virtual void annotate(AnnotationIDs id, uint64_t val) {}
220 virtual FaultStat& countStat() = 0;
221 virtual FaultOffset offset(ThreadContext *tc) = 0;
222 virtual FaultOffset offset64(ThreadContext *tc) = 0;
223 virtual OperatingMode nextMode() = 0;
224 virtual bool routeToMonitor(ThreadContext *tc) const = 0;
225 virtual bool routeToHyp(ThreadContext *tc) const { return false; }
226 virtual uint8_t armPcOffset(bool isHyp) = 0;
227 virtual uint8_t thumbPcOffset(bool isHyp) = 0;
228 virtual uint8_t armPcElrOffset() = 0;
229 virtual uint8_t thumbPcElrOffset() = 0;
230 virtual bool abortDisable(ThreadContext *tc) = 0;
231 virtual bool fiqDisable(ThreadContext *tc) = 0;
232 virtual ExceptionClass ec(ThreadContext *tc) const = 0;
233 virtual uint32_t iss() const = 0;
234 virtual bool isStage2() const { return false; }
235 virtual FSR getFsr(ThreadContext *tc) const { return 0; }
236 virtual void setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg);
237 virtual bool getFaultVAddr(Addr &va) const { return false; }
238
239 };
240
241 template<typename T>
242 class ArmFaultVals : public ArmFault
243 {
244 protected:
245 static FaultVals vals;
246
247 public:
248 ArmFaultVals<T>(ExtMachInst _machInst = 0, uint32_t _iss = 0) :
249 ArmFault(_machInst, _iss) {}
250 FaultName name() const override { return vals.name; }
251 FaultStat & countStat() override { return vals.count; }
252 FaultOffset offset(ThreadContext *tc) override;
253
254 FaultOffset offset64(ThreadContext *tc) override;
255
256 OperatingMode nextMode() override { return vals.nextMode; }
257 virtual bool routeToMonitor(ThreadContext *tc) const override {
258 return false;
259 }
260 uint8_t armPcOffset(bool isHyp) override {
261 return isHyp ? vals.armPcElrOffset
262 : vals.armPcOffset;
263 }
264 uint8_t thumbPcOffset(bool isHyp) override {
265 return isHyp ? vals.thumbPcElrOffset
266 : vals.thumbPcOffset;
267 }
268 uint8_t armPcElrOffset() override { return vals.armPcElrOffset; }
269 uint8_t thumbPcElrOffset() override { return vals.thumbPcElrOffset; }
270 bool abortDisable(ThreadContext* tc) override { return vals.abortDisable; }
271 bool fiqDisable(ThreadContext* tc) override { return vals.fiqDisable; }
272 ExceptionClass ec(ThreadContext *tc) const override { return vals.ec; }
273 uint32_t iss() const override { return issRaw; }
274 };
275
276 class Reset : public ArmFaultVals<Reset>
277 {
278 protected:
279 Addr getVector(ThreadContext *tc) override;
280
281 public:
282 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
283 StaticInst::nullStaticInstPtr) override;
284 };
285
286 class UndefinedInstruction : public ArmFaultVals<UndefinedInstruction>
287 {
288 protected:
289 bool unknown;
290 bool disabled;
291 ExceptionClass overrideEc;
292 const char *mnemonic;
293
294 public:
295 UndefinedInstruction(ExtMachInst _machInst,
296 bool _unknown,
297 const char *_mnemonic = NULL,
298 bool _disabled = false) :
299 ArmFaultVals<UndefinedInstruction>(_machInst),
300 unknown(_unknown), disabled(_disabled),
301 overrideEc(EC_INVALID), mnemonic(_mnemonic)
302 {}
303 UndefinedInstruction(ExtMachInst _machInst, uint32_t _iss,
304 ExceptionClass _overrideEc, const char *_mnemonic = NULL) :
305 ArmFaultVals<UndefinedInstruction>(_machInst, _iss),
306 unknown(false), disabled(true), overrideEc(_overrideEc),
307 mnemonic(_mnemonic)
308 {}
309
310 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
311 StaticInst::nullStaticInstPtr) override;
312 bool routeToHyp(ThreadContext *tc) const override;
313 ExceptionClass ec(ThreadContext *tc) const override;
314 uint32_t iss() const override;
315 };
316
317 class SupervisorCall : public ArmFaultVals<SupervisorCall>
318 {
319 protected:
320 ExceptionClass overrideEc;
321 public:
322 SupervisorCall(ExtMachInst _machInst, uint32_t _iss,
323 ExceptionClass _overrideEc = EC_INVALID) :
324 ArmFaultVals<SupervisorCall>(_machInst, _iss),
325 overrideEc(_overrideEc)
326 {}
327
328 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
329 StaticInst::nullStaticInstPtr) override;
330 bool routeToHyp(ThreadContext *tc) const override;
331 ExceptionClass ec(ThreadContext *tc) const override;
332 uint32_t iss() const override;
333 };
334
335 class SecureMonitorCall : public ArmFaultVals<SecureMonitorCall>
336 {
337 public:
338 SecureMonitorCall(ExtMachInst _machInst) :
339 ArmFaultVals<SecureMonitorCall>(_machInst)
340 {}
341
342 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
343 StaticInst::nullStaticInstPtr) override;
344 ExceptionClass ec(ThreadContext *tc) const override;
345 uint32_t iss() const override;
346 };
347
348 class SupervisorTrap : public ArmFaultVals<SupervisorTrap>
349 {
350 protected:
351 ExtMachInst machInst;
352 ExceptionClass overrideEc;
353
354 public:
355 SupervisorTrap(ExtMachInst _machInst, uint32_t _iss,
356 ExceptionClass _overrideEc = EC_INVALID) :
357 ArmFaultVals<SupervisorTrap>(_machInst, _iss),
358 overrideEc(_overrideEc)
359 {}
360
361 bool routeToHyp(ThreadContext *tc) const override;
362 uint32_t iss() const override;
363 ExceptionClass ec(ThreadContext *tc) const override;
364 };
365
366 class SecureMonitorTrap : public ArmFaultVals<SecureMonitorTrap>
367 {
368 protected:
369 ExtMachInst machInst;
370 ExceptionClass overrideEc;
371
372 public:
373 SecureMonitorTrap(ExtMachInst _machInst, uint32_t _iss,
374 ExceptionClass _overrideEc = EC_INVALID) :
375 ArmFaultVals<SecureMonitorTrap>(_machInst, _iss),
376 overrideEc(_overrideEc)
377 {}
378
379 ExceptionClass ec(ThreadContext *tc) const override;
380 };
381
382 class HypervisorCall : public ArmFaultVals<HypervisorCall>
383 {
384 public:
385 HypervisorCall(ExtMachInst _machInst, uint32_t _imm);
386
387 ExceptionClass ec(ThreadContext *tc) const override;
388 };
389
390 class HypervisorTrap : public ArmFaultVals<HypervisorTrap>
391 {
392 protected:
393 ExtMachInst machInst;
394 ExceptionClass overrideEc;
395
396 public:
397 HypervisorTrap(ExtMachInst _machInst, uint32_t _iss,
398 ExceptionClass _overrideEc = EC_INVALID) :
399 ArmFaultVals<HypervisorTrap>(_machInst, _iss),
400 overrideEc(_overrideEc)
401 {}
402
403 ExceptionClass ec(ThreadContext *tc) const override;
404 };
405
406 template <class T>
407 class AbortFault : public ArmFaultVals<T>
408 {
409 protected:
410 /**
411 * The virtual address the fault occured at. If 2 stages of
412 * translation are being used then this is the intermediate
413 * physical address that is the starting point for the second
414 * stage of translation.
415 */
416 Addr faultAddr;
417 /**
418 * Original virtual address. If the fault was generated on the
419 * second stage of translation then this variable stores the
420 * virtual address used in the original stage 1 translation.
421 */
422 Addr OVAddr;
423 bool write;
424 TlbEntry::DomainType domain;
425 uint8_t source;
426 uint8_t srcEncoded;
427 bool stage2;
428 bool s1ptw;
429 ArmFault::TranMethod tranMethod;
430
431 public:
432 AbortFault(Addr _faultAddr, bool _write, TlbEntry::DomainType _domain,
433 uint8_t _source, bool _stage2,
434 ArmFault::TranMethod _tranMethod = ArmFault::UnknownTran) :
435 faultAddr(_faultAddr), OVAddr(0), write(_write),
436 domain(_domain), source(_source), srcEncoded(0),
437 stage2(_stage2), s1ptw(false), tranMethod(_tranMethod)
438 {}
439
440 bool getFaultVAddr(Addr &va) const override;
441
442 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
443 StaticInst::nullStaticInstPtr) override;
444
445 FSR getFsr(ThreadContext *tc) const override;
446 uint8_t getFaultStatusCode(ThreadContext *tc) const;
447 bool abortDisable(ThreadContext *tc) override;
448 uint32_t iss() const override;
449 bool isStage2() const override { return stage2; }
450 void annotate(ArmFault::AnnotationIDs id, uint64_t val) override;
451 void setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg) override;
452 bool isMMUFault() const;
453 };
454
455 class PrefetchAbort : public AbortFault<PrefetchAbort>
456 {
457 public:
458 static const MiscRegIndex FsrIndex = MISCREG_IFSR;
459 static const MiscRegIndex FarIndex = MISCREG_IFAR;
460 static const MiscRegIndex HFarIndex = MISCREG_HIFAR;
461
462 PrefetchAbort(Addr _addr, uint8_t _source, bool _stage2 = false,
463 ArmFault::TranMethod _tranMethod = ArmFault::UnknownTran) :
464 AbortFault<PrefetchAbort>(_addr, false, TlbEntry::DomainType::NoAccess,
465 _source, _stage2, _tranMethod)
466 {}
467
468 ExceptionClass ec(ThreadContext *tc) const override;
469 // @todo: external aborts should be routed if SCR.EA == 1
470 bool routeToMonitor(ThreadContext *tc) const override;
471 bool routeToHyp(ThreadContext *tc) const override;
472 };
473
474 class DataAbort : public AbortFault<DataAbort>
475 {
476 public:
477 static const MiscRegIndex FsrIndex = MISCREG_DFSR;
478 static const MiscRegIndex FarIndex = MISCREG_DFAR;
479 static const MiscRegIndex HFarIndex = MISCREG_HDFAR;
480 bool isv;
481 uint8_t sas;
482 uint8_t sse;
483 uint8_t srt;
484
485 // AArch64 only
486 bool sf;
487 bool ar;
488
489 DataAbort(Addr _addr, TlbEntry::DomainType _domain, bool _write, uint8_t _source,
490 bool _stage2 = false, ArmFault::TranMethod _tranMethod = ArmFault::UnknownTran) :
491 AbortFault<DataAbort>(_addr, _write, _domain, _source, _stage2,
492 _tranMethod),
493 isv(false), sas (0), sse(0), srt(0), sf(false), ar(false)
494 {}
495
496 ExceptionClass ec(ThreadContext *tc) const override;
497 // @todo: external aborts should be routed if SCR.EA == 1
498 bool routeToMonitor(ThreadContext *tc) const override;
499 bool routeToHyp(ThreadContext *tc) const override;
500 uint32_t iss() const override;
501 void annotate(AnnotationIDs id, uint64_t val) override;
502 };
503
504 class VirtualDataAbort : public AbortFault<VirtualDataAbort>
505 {
506 public:
507 static const MiscRegIndex FsrIndex = MISCREG_DFSR;
508 static const MiscRegIndex FarIndex = MISCREG_DFAR;
509 static const MiscRegIndex HFarIndex = MISCREG_HDFAR;
510
511 VirtualDataAbort(Addr _addr, TlbEntry::DomainType _domain, bool _write,
512 uint8_t _source) :
513 AbortFault<VirtualDataAbort>(_addr, _write, _domain, _source, false)
514 {}
515
516 void invoke(ThreadContext *tc, const StaticInstPtr &inst) override;
517 };
518
519 class Interrupt : public ArmFaultVals<Interrupt>
520 {
521 public:
522 bool routeToMonitor(ThreadContext *tc) const override;
523 bool routeToHyp(ThreadContext *tc) const override;
524 bool abortDisable(ThreadContext *tc) override;
525 };
526
527 class VirtualInterrupt : public ArmFaultVals<VirtualInterrupt>
528 {
529 public:
530 VirtualInterrupt();
531 };
532
533 class FastInterrupt : public ArmFaultVals<FastInterrupt>
534 {
535 public:
536 bool routeToMonitor(ThreadContext *tc) const override;
537 bool routeToHyp(ThreadContext *tc) const override;
538 bool abortDisable(ThreadContext *tc) override;
539 bool fiqDisable(ThreadContext *tc) override;
540 };
541
542 class VirtualFastInterrupt : public ArmFaultVals<VirtualFastInterrupt>
543 {
544 public:
545 VirtualFastInterrupt();
546 };
547
548 /// PC alignment fault (AArch64 only)
549 class PCAlignmentFault : public ArmFaultVals<PCAlignmentFault>
550 {
551 protected:
552 /// The unaligned value of the PC
553 Addr faultPC;
554 public:
555 PCAlignmentFault(Addr _faultPC) : faultPC(_faultPC)
556 {}
557 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
558 StaticInst::nullStaticInstPtr) override;
559 bool routeToHyp(ThreadContext *tc) const override;
560 };
561
562 /// Stack pointer alignment fault (AArch64 only)
563 class SPAlignmentFault : public ArmFaultVals<SPAlignmentFault>
564 {
565 public:
566 SPAlignmentFault();
567 };
568
569 /// System error (AArch64 only)
570 class SystemError : public ArmFaultVals<SystemError>
571 {
572 public:
573 SystemError();
574 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
575 StaticInst::nullStaticInstPtr) override;
576 bool routeToMonitor(ThreadContext *tc) const override;
577 bool routeToHyp(ThreadContext *tc) const override;
578 };
579
580 /// System error (AArch64 only)
581 class SoftwareBreakpoint : public ArmFaultVals<SoftwareBreakpoint>
582 {
583 public:
584 SoftwareBreakpoint(ExtMachInst _mach_inst, uint32_t _iss);
585
586 bool routeToHyp(ThreadContext *tc) const override;
587 ExceptionClass ec(ThreadContext *tc) const override;
588 };
589
590 // A fault that flushes the pipe, excluding the faulting instructions
591 class ArmSev : public ArmFaultVals<ArmSev>
592 {
593 public:
594 ArmSev () {}
595 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
596 StaticInst::nullStaticInstPtr) override;
597 };
598
599 /// Illegal Instruction Set State fault (AArch64 only)
600 class IllegalInstSetStateFault : public ArmFaultVals<IllegalInstSetStateFault>
601 {
602 public:
603 IllegalInstSetStateFault();
604 };
605
606 /*
607 * Explicitly declare template static member variables to avoid warnings
608 * in some clang versions
609 */
610 template<> ArmFault::FaultVals ArmFaultVals<Reset>::vals;
611 template<> ArmFault::FaultVals ArmFaultVals<UndefinedInstruction>::vals;
612 template<> ArmFault::FaultVals ArmFaultVals<SupervisorCall>::vals;
613 template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorCall>::vals;
614 template<> ArmFault::FaultVals ArmFaultVals<HypervisorCall>::vals;
615 template<> ArmFault::FaultVals ArmFaultVals<PrefetchAbort>::vals;
616 template<> ArmFault::FaultVals ArmFaultVals<DataAbort>::vals;
617 template<> ArmFault::FaultVals ArmFaultVals<VirtualDataAbort>::vals;
618 template<> ArmFault::FaultVals ArmFaultVals<HypervisorTrap>::vals;
619 template<> ArmFault::FaultVals ArmFaultVals<Interrupt>::vals;
620 template<> ArmFault::FaultVals ArmFaultVals<VirtualInterrupt>::vals;
621 template<> ArmFault::FaultVals ArmFaultVals<FastInterrupt>::vals;
622 template<> ArmFault::FaultVals ArmFaultVals<VirtualFastInterrupt>::vals;
623 template<> ArmFault::FaultVals ArmFaultVals<IllegalInstSetStateFault>::vals;
624 template<> ArmFault::FaultVals ArmFaultVals<SupervisorTrap>::vals;
625 template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorTrap>::vals;
626 template<> ArmFault::FaultVals ArmFaultVals<PCAlignmentFault>::vals;
627 template<> ArmFault::FaultVals ArmFaultVals<SPAlignmentFault>::vals;
628 template<> ArmFault::FaultVals ArmFaultVals<SystemError>::vals;
629 template<> ArmFault::FaultVals ArmFaultVals<SoftwareBreakpoint>::vals;
630 template<> ArmFault::FaultVals ArmFaultVals<ArmSev>::vals;
631
632 /**
633 * Returns true if the fault passed as a first argument was triggered
634 * by a memory access, false otherwise.
635 * If true it is storing the faulting address in the va argument
636 *
637 * @param fault generated fault
638 * @param va function will modify this passed-by-reference parameter
639 * with the correct faulting virtual address
640 * @return true if va contains a valid value, false otherwise
641 */
642 bool getFaultVAddr(Fault fault, Addr &va);
643
644
645 } // namespace ArmISA
646
647 #endif // __ARM_FAULTS_HH__