mem-cache: Add multiple eviction stats
[gem5.git] / src / arch / arm / faults.hh
1 /*
2 * Copyright (c) 2010, 2012-2013, 2016-2019 ARM Limited
3 * All rights reserved
4 *
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software
9 * licensed hereunder. You may use the software subject to the license
10 * terms below provided that you ensure that this notice is replicated
11 * unmodified and in its entirety in all distributions of the software,
12 * modified or unmodified, in source code or in binary form.
13 *
14 * Copyright (c) 2003-2005 The Regents of The University of Michigan
15 * Copyright (c) 2007-2008 The Florida State University
16 * All rights reserved.
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions are
20 * met: redistributions of source code must retain the above copyright
21 * notice, this list of conditions and the following disclaimer;
22 * redistributions in binary form must reproduce the above copyright
23 * notice, this list of conditions and the following disclaimer in the
24 * documentation and/or other materials provided with the distribution;
25 * neither the name of the copyright holders nor the names of its
26 * contributors may be used to endorse or promote products derived from
27 * this software without specific prior written permission.
28 *
29 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
30 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
31 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
32 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
33 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
34 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
40 *
41 * Authors: Ali Saidi
42 * Gabe Black
43 * Giacomo Gabrielli
44 * Thomas Grocutt
45 */
46
47 #ifndef __ARM_FAULTS_HH__
48 #define __ARM_FAULTS_HH__
49
50 #include "arch/arm/miscregs.hh"
51 #include "arch/arm/pagetable.hh"
52 #include "arch/arm/types.hh"
53 #include "base/logging.hh"
54 #include "sim/faults.hh"
55 #include "sim/full_system.hh"
56
57 // The design of the "name" and "vect" functions is in sim/faults.hh
58
59 namespace ArmISA
60 {
61 typedef Addr FaultOffset;
62
63 class ArmStaticInst;
64
65 class ArmFault : public FaultBase
66 {
67 protected:
68 ExtMachInst machInst;
69 uint32_t issRaw;
70
71 // Helper variables for ARMv8 exception handling
72 bool from64; // True if the exception is generated from the AArch64 state
73 bool to64; // True if the exception is taken in AArch64 state
74 ExceptionLevel fromEL; // Source exception level
75 ExceptionLevel toEL; // Target exception level
76 OperatingMode fromMode; // Source operating mode (aarch32)
77 OperatingMode toMode; // Next operating mode (aarch32)
78
79 // This variable is true if the above fault specific informations
80 // have been updated. This is to prevent that a client is using their
81 // un-updated default constructed value.
82 bool faultUpdated;
83
84 bool hypRouted; // True if the fault has been routed to Hypervisor
85 bool span; // True if the fault is setting the PSTATE.PAN bit
86
87 virtual Addr getVector(ThreadContext *tc);
88 Addr getVector64(ThreadContext *tc);
89
90 public:
91 /// Generic fault source enums used to index into
92 /// {short/long/aarch64}DescFaultSources[] to get the actual encodings based
93 /// on the current register width state and the translation table format in
94 /// use
95 enum FaultSource
96 {
97 AlignmentFault = 0,
98 InstructionCacheMaintenance, // Short-desc. format only
99 SynchExtAbtOnTranslTableWalkLL,
100 SynchPtyErrOnTranslTableWalkLL = SynchExtAbtOnTranslTableWalkLL + 4,
101 TranslationLL = SynchPtyErrOnTranslTableWalkLL + 4,
102 AccessFlagLL = TranslationLL + 4,
103 DomainLL = AccessFlagLL + 4,
104 PermissionLL = DomainLL + 4,
105 DebugEvent = PermissionLL + 4,
106 SynchronousExternalAbort,
107 TLBConflictAbort, // Requires LPAE
108 SynchPtyErrOnMemoryAccess,
109 AsynchronousExternalAbort,
110 AsynchPtyErrOnMemoryAccess,
111 AddressSizeLL, // AArch64 only
112
113 // Not real faults. These are faults to allow the translation function
114 // to inform the memory access function not to proceed for a prefetch
115 // that misses in the TLB or that targets an uncacheable address
116 PrefetchTLBMiss = AddressSizeLL + 4,
117 PrefetchUncacheable,
118
119 NumFaultSources,
120 FaultSourceInvalid = 0xff
121 };
122
123 /// Encodings of the fault sources when the short-desc. translation table
124 /// format is in use (ARM ARM Issue C B3.13.3)
125 static uint8_t shortDescFaultSources[NumFaultSources];
126 /// Encodings of the fault sources when the long-desc. translation table
127 /// format is in use (ARM ARM Issue C B3.13.3)
128 static uint8_t longDescFaultSources[NumFaultSources];
129 /// Encodings of the fault sources in AArch64 state
130 static uint8_t aarch64FaultSources[NumFaultSources];
131
132 enum AnnotationIDs
133 {
134 S1PTW, // DataAbort, PrefetchAbort: Stage 1 Page Table Walk,
135 OVA, // DataAbort, PrefetchAbort: stage 1 Virtual Address for stage 2 faults
136 SAS, // DataAbort: Syndrome Access Size
137 SSE, // DataAbort: Syndrome Sign Extend
138 SRT, // DataAbort: Syndrome Register Transfer
139 CM, // DataAbort: Cache Maintenance/Address Translation Op
140 OFA, // DataAbort: Override fault Address. This is needed when
141 // the abort is triggered by a CMO. The faulting address is
142 // then the address specified in the register argument of the
143 // instruction and not the cacheline address (See FAR doc)
144
145 // AArch64 only
146 SF, // DataAbort: width of the accessed register is SixtyFour
147 AR // DataAbort: Acquire/Release semantics
148 };
149
150 enum TranMethod
151 {
152 LpaeTran,
153 VmsaTran,
154 UnknownTran
155 };
156
157 struct FaultVals
158 {
159 const FaultName name;
160
161 const FaultOffset offset;
162
163 // Offsets used for exceptions taken in AArch64 state
164 const uint16_t currELTOffset;
165 const uint16_t currELHOffset;
166 const uint16_t lowerEL64Offset;
167 const uint16_t lowerEL32Offset;
168
169 const OperatingMode nextMode;
170
171 const uint8_t armPcOffset;
172 const uint8_t thumbPcOffset;
173 // The following two values are used in place of armPcOffset and
174 // thumbPcOffset when the exception return address is saved into ELR
175 // registers (exceptions taken in HYP mode or in AArch64 state)
176 const uint8_t armPcElrOffset;
177 const uint8_t thumbPcElrOffset;
178
179 const bool hypTrappable;
180 const bool abortDisable;
181 const bool fiqDisable;
182
183 // Exception class used to appropriately set the syndrome register
184 // (exceptions taken in HYP mode or in AArch64 state)
185 const ExceptionClass ec;
186
187 FaultStat count;
188 FaultVals(const FaultName& name_, const FaultOffset& offset_,
189 const uint16_t& currELTOffset_, const uint16_t& currELHOffset_,
190 const uint16_t& lowerEL64Offset_,
191 const uint16_t& lowerEL32Offset_,
192 const OperatingMode& nextMode_, const uint8_t& armPcOffset_,
193 const uint8_t& thumbPcOffset_, const uint8_t& armPcElrOffset_,
194 const uint8_t& thumbPcElrOffset_, const bool& hypTrappable_,
195 const bool& abortDisable_, const bool& fiqDisable_,
196 const ExceptionClass& ec_)
197 : name(name_), offset(offset_), currELTOffset(currELTOffset_),
198 currELHOffset(currELHOffset_), lowerEL64Offset(lowerEL64Offset_),
199 lowerEL32Offset(lowerEL32Offset_), nextMode(nextMode_),
200 armPcOffset(armPcOffset_), thumbPcOffset(thumbPcOffset_),
201 armPcElrOffset(armPcElrOffset_), thumbPcElrOffset(thumbPcElrOffset_),
202 hypTrappable(hypTrappable_), abortDisable(abortDisable_),
203 fiqDisable(fiqDisable_), ec(ec_) {}
204 };
205
206 ArmFault(ExtMachInst _machInst = 0, uint32_t _iss = 0) :
207 machInst(_machInst), issRaw(_iss), from64(false), to64(false),
208 fromEL(EL0), toEL(EL0), fromMode(MODE_UNDEFINED),
209 faultUpdated(false), hypRouted(false), span(false) {}
210
211 // Returns the actual syndrome register to use based on the target
212 // exception level
213 MiscRegIndex getSyndromeReg64() const;
214 // Returns the actual fault address register to use based on the target
215 // exception level
216 MiscRegIndex getFaultAddrReg64() const;
217
218 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
219 StaticInst::nullStaticInstPtr) override;
220 void invoke64(ThreadContext *tc, const StaticInstPtr &inst =
221 StaticInst::nullStaticInstPtr);
222 void update(ThreadContext *tc);
223
224 ArmStaticInst *instrAnnotate(const StaticInstPtr &inst);
225 virtual void annotate(AnnotationIDs id, uint64_t val) {}
226 virtual FaultStat& countStat() = 0;
227 virtual FaultOffset offset(ThreadContext *tc) = 0;
228 virtual FaultOffset offset64(ThreadContext *tc) = 0;
229 virtual OperatingMode nextMode() = 0;
230 virtual bool routeToMonitor(ThreadContext *tc) const = 0;
231 virtual bool routeToHyp(ThreadContext *tc) const { return false; }
232 virtual uint8_t armPcOffset(bool isHyp) = 0;
233 virtual uint8_t thumbPcOffset(bool isHyp) = 0;
234 virtual uint8_t armPcElrOffset() = 0;
235 virtual uint8_t thumbPcElrOffset() = 0;
236 virtual bool abortDisable(ThreadContext *tc) = 0;
237 virtual bool fiqDisable(ThreadContext *tc) = 0;
238 virtual ExceptionClass ec(ThreadContext *tc) const = 0;
239 virtual uint32_t iss() const = 0;
240 virtual bool isStage2() const { return false; }
241 virtual FSR getFsr(ThreadContext *tc) const { return 0; }
242 virtual void setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg);
243 virtual bool getFaultVAddr(Addr &va) const { return false; }
244
245 };
246
247 template<typename T>
248 class ArmFaultVals : public ArmFault
249 {
250 protected:
251 static FaultVals vals;
252
253 public:
254 ArmFaultVals<T>(ExtMachInst _machInst = 0, uint32_t _iss = 0) :
255 ArmFault(_machInst, _iss) {}
256 FaultName name() const override { return vals.name; }
257 FaultStat & countStat() override { return vals.count; }
258 FaultOffset offset(ThreadContext *tc) override;
259
260 FaultOffset offset64(ThreadContext *tc) override;
261
262 OperatingMode nextMode() override { return vals.nextMode; }
263 virtual bool routeToMonitor(ThreadContext *tc) const override {
264 return false;
265 }
266 uint8_t armPcOffset(bool isHyp) override {
267 return isHyp ? vals.armPcElrOffset
268 : vals.armPcOffset;
269 }
270 uint8_t thumbPcOffset(bool isHyp) override {
271 return isHyp ? vals.thumbPcElrOffset
272 : vals.thumbPcOffset;
273 }
274 uint8_t armPcElrOffset() override { return vals.armPcElrOffset; }
275 uint8_t thumbPcElrOffset() override { return vals.thumbPcElrOffset; }
276 bool abortDisable(ThreadContext* tc) override { return vals.abortDisable; }
277 bool fiqDisable(ThreadContext* tc) override { return vals.fiqDisable; }
278 ExceptionClass ec(ThreadContext *tc) const override { return vals.ec; }
279 uint32_t iss() const override { return issRaw; }
280 };
281
282 class Reset : public ArmFaultVals<Reset>
283 {
284 protected:
285 Addr getVector(ThreadContext *tc) override;
286
287 public:
288 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
289 StaticInst::nullStaticInstPtr) override;
290 };
291
292 class UndefinedInstruction : public ArmFaultVals<UndefinedInstruction>
293 {
294 protected:
295 bool unknown;
296 bool disabled;
297 ExceptionClass overrideEc;
298 const char *mnemonic;
299
300 public:
301 UndefinedInstruction(ExtMachInst _machInst,
302 bool _unknown,
303 const char *_mnemonic = NULL,
304 bool _disabled = false) :
305 ArmFaultVals<UndefinedInstruction>(_machInst),
306 unknown(_unknown), disabled(_disabled),
307 overrideEc(EC_INVALID), mnemonic(_mnemonic)
308 {}
309 UndefinedInstruction(ExtMachInst _machInst, uint32_t _iss,
310 ExceptionClass _overrideEc, const char *_mnemonic = NULL) :
311 ArmFaultVals<UndefinedInstruction>(_machInst, _iss),
312 unknown(false), disabled(true), overrideEc(_overrideEc),
313 mnemonic(_mnemonic)
314 {}
315
316 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
317 StaticInst::nullStaticInstPtr) override;
318 bool routeToHyp(ThreadContext *tc) const override;
319 ExceptionClass ec(ThreadContext *tc) const override;
320 uint32_t iss() const override;
321 };
322
323 class SupervisorCall : public ArmFaultVals<SupervisorCall>
324 {
325 protected:
326 ExceptionClass overrideEc;
327 public:
328 SupervisorCall(ExtMachInst _machInst, uint32_t _iss,
329 ExceptionClass _overrideEc = EC_INVALID) :
330 ArmFaultVals<SupervisorCall>(_machInst, _iss),
331 overrideEc(_overrideEc)
332 {}
333
334 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
335 StaticInst::nullStaticInstPtr) override;
336 bool routeToHyp(ThreadContext *tc) const override;
337 ExceptionClass ec(ThreadContext *tc) const override;
338 uint32_t iss() const override;
339 };
340
341 class SecureMonitorCall : public ArmFaultVals<SecureMonitorCall>
342 {
343 public:
344 SecureMonitorCall(ExtMachInst _machInst) :
345 ArmFaultVals<SecureMonitorCall>(_machInst)
346 {}
347
348 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
349 StaticInst::nullStaticInstPtr) override;
350 ExceptionClass ec(ThreadContext *tc) const override;
351 uint32_t iss() const override;
352 };
353
354 class SupervisorTrap : public ArmFaultVals<SupervisorTrap>
355 {
356 protected:
357 ExtMachInst machInst;
358 ExceptionClass overrideEc;
359
360 public:
361 SupervisorTrap(ExtMachInst _machInst, uint32_t _iss,
362 ExceptionClass _overrideEc = EC_INVALID) :
363 ArmFaultVals<SupervisorTrap>(_machInst, _iss),
364 overrideEc(_overrideEc)
365 {}
366
367 bool routeToHyp(ThreadContext *tc) const override;
368 uint32_t iss() const override;
369 ExceptionClass ec(ThreadContext *tc) const override;
370 };
371
372 class SecureMonitorTrap : public ArmFaultVals<SecureMonitorTrap>
373 {
374 protected:
375 ExtMachInst machInst;
376 ExceptionClass overrideEc;
377
378 public:
379 SecureMonitorTrap(ExtMachInst _machInst, uint32_t _iss,
380 ExceptionClass _overrideEc = EC_INVALID) :
381 ArmFaultVals<SecureMonitorTrap>(_machInst, _iss),
382 overrideEc(_overrideEc)
383 {}
384
385 ExceptionClass ec(ThreadContext *tc) const override;
386 };
387
388 class HypervisorCall : public ArmFaultVals<HypervisorCall>
389 {
390 public:
391 HypervisorCall(ExtMachInst _machInst, uint32_t _imm);
392
393 ExceptionClass ec(ThreadContext *tc) const override;
394 };
395
396 class HypervisorTrap : public ArmFaultVals<HypervisorTrap>
397 {
398 protected:
399 ExtMachInst machInst;
400 ExceptionClass overrideEc;
401
402 public:
403 HypervisorTrap(ExtMachInst _machInst, uint32_t _iss,
404 ExceptionClass _overrideEc = EC_INVALID) :
405 ArmFaultVals<HypervisorTrap>(_machInst, _iss),
406 overrideEc(_overrideEc)
407 {}
408
409 ExceptionClass ec(ThreadContext *tc) const override;
410 };
411
412 template <class T>
413 class AbortFault : public ArmFaultVals<T>
414 {
415 protected:
416 /**
417 * The virtual address the fault occured at. If 2 stages of
418 * translation are being used then this is the intermediate
419 * physical address that is the starting point for the second
420 * stage of translation.
421 */
422 Addr faultAddr;
423 /**
424 * Original virtual address. If the fault was generated on the
425 * second stage of translation then this variable stores the
426 * virtual address used in the original stage 1 translation.
427 */
428 Addr OVAddr;
429 bool write;
430 TlbEntry::DomainType domain;
431 uint8_t source;
432 uint8_t srcEncoded;
433 bool stage2;
434 bool s1ptw;
435 ArmFault::TranMethod tranMethod;
436
437 public:
438 AbortFault(Addr _faultAddr, bool _write, TlbEntry::DomainType _domain,
439 uint8_t _source, bool _stage2,
440 ArmFault::TranMethod _tranMethod = ArmFault::UnknownTran) :
441 faultAddr(_faultAddr), OVAddr(0), write(_write),
442 domain(_domain), source(_source), srcEncoded(0),
443 stage2(_stage2), s1ptw(false), tranMethod(_tranMethod)
444 {}
445
446 bool getFaultVAddr(Addr &va) const override;
447
448 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
449 StaticInst::nullStaticInstPtr) override;
450
451 FSR getFsr(ThreadContext *tc) const override;
452 uint8_t getFaultStatusCode(ThreadContext *tc) const;
453 bool abortDisable(ThreadContext *tc) override;
454 uint32_t iss() const override;
455 bool isStage2() const override { return stage2; }
456 void annotate(ArmFault::AnnotationIDs id, uint64_t val) override;
457 void setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg) override;
458 bool isMMUFault() const;
459 };
460
461 class PrefetchAbort : public AbortFault<PrefetchAbort>
462 {
463 public:
464 static const MiscRegIndex FsrIndex = MISCREG_IFSR;
465 static const MiscRegIndex FarIndex = MISCREG_IFAR;
466 static const MiscRegIndex HFarIndex = MISCREG_HIFAR;
467
468 PrefetchAbort(Addr _addr, uint8_t _source, bool _stage2 = false,
469 ArmFault::TranMethod _tranMethod = ArmFault::UnknownTran) :
470 AbortFault<PrefetchAbort>(_addr, false, TlbEntry::DomainType::NoAccess,
471 _source, _stage2, _tranMethod)
472 {}
473
474 ExceptionClass ec(ThreadContext *tc) const override;
475 // @todo: external aborts should be routed if SCR.EA == 1
476 bool routeToMonitor(ThreadContext *tc) const override;
477 bool routeToHyp(ThreadContext *tc) const override;
478 };
479
480 class DataAbort : public AbortFault<DataAbort>
481 {
482 public:
483 static const MiscRegIndex FsrIndex = MISCREG_DFSR;
484 static const MiscRegIndex FarIndex = MISCREG_DFAR;
485 static const MiscRegIndex HFarIndex = MISCREG_HDFAR;
486 bool isv;
487 uint8_t sas;
488 uint8_t sse;
489 uint8_t srt;
490 uint8_t cm;
491
492 // AArch64 only
493 bool sf;
494 bool ar;
495
496 DataAbort(Addr _addr, TlbEntry::DomainType _domain, bool _write, uint8_t _source,
497 bool _stage2 = false, ArmFault::TranMethod _tranMethod = ArmFault::UnknownTran) :
498 AbortFault<DataAbort>(_addr, _write, _domain, _source, _stage2,
499 _tranMethod),
500 isv(false), sas (0), sse(0), srt(0), cm(0), sf(false), ar(false)
501 {}
502
503 ExceptionClass ec(ThreadContext *tc) const override;
504 // @todo: external aborts should be routed if SCR.EA == 1
505 bool routeToMonitor(ThreadContext *tc) const override;
506 bool routeToHyp(ThreadContext *tc) const override;
507 uint32_t iss() const override;
508 void annotate(AnnotationIDs id, uint64_t val) override;
509 };
510
511 class VirtualDataAbort : public AbortFault<VirtualDataAbort>
512 {
513 public:
514 static const MiscRegIndex FsrIndex = MISCREG_DFSR;
515 static const MiscRegIndex FarIndex = MISCREG_DFAR;
516 static const MiscRegIndex HFarIndex = MISCREG_HDFAR;
517
518 VirtualDataAbort(Addr _addr, TlbEntry::DomainType _domain, bool _write,
519 uint8_t _source) :
520 AbortFault<VirtualDataAbort>(_addr, _write, _domain, _source, false)
521 {}
522
523 void invoke(ThreadContext *tc, const StaticInstPtr &inst) override;
524 };
525
526 class Interrupt : public ArmFaultVals<Interrupt>
527 {
528 public:
529 bool routeToMonitor(ThreadContext *tc) const override;
530 bool routeToHyp(ThreadContext *tc) const override;
531 bool abortDisable(ThreadContext *tc) override;
532 };
533
534 class VirtualInterrupt : public ArmFaultVals<VirtualInterrupt>
535 {
536 public:
537 VirtualInterrupt();
538 };
539
540 class FastInterrupt : public ArmFaultVals<FastInterrupt>
541 {
542 public:
543 bool routeToMonitor(ThreadContext *tc) const override;
544 bool routeToHyp(ThreadContext *tc) const override;
545 bool abortDisable(ThreadContext *tc) override;
546 bool fiqDisable(ThreadContext *tc) override;
547 };
548
549 class VirtualFastInterrupt : public ArmFaultVals<VirtualFastInterrupt>
550 {
551 public:
552 VirtualFastInterrupt();
553 };
554
555 /// PC alignment fault (AArch64 only)
556 class PCAlignmentFault : public ArmFaultVals<PCAlignmentFault>
557 {
558 protected:
559 /// The unaligned value of the PC
560 Addr faultPC;
561 public:
562 PCAlignmentFault(Addr _faultPC) : faultPC(_faultPC)
563 {}
564 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
565 StaticInst::nullStaticInstPtr) override;
566 bool routeToHyp(ThreadContext *tc) const override;
567 };
568
569 /// Stack pointer alignment fault (AArch64 only)
570 class SPAlignmentFault : public ArmFaultVals<SPAlignmentFault>
571 {
572 public:
573 SPAlignmentFault();
574 };
575
576 /// System error (AArch64 only)
577 class SystemError : public ArmFaultVals<SystemError>
578 {
579 public:
580 SystemError();
581 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
582 StaticInst::nullStaticInstPtr) override;
583 bool routeToMonitor(ThreadContext *tc) const override;
584 bool routeToHyp(ThreadContext *tc) const override;
585 };
586
587 /// System error (AArch64 only)
588 class SoftwareBreakpoint : public ArmFaultVals<SoftwareBreakpoint>
589 {
590 public:
591 SoftwareBreakpoint(ExtMachInst _mach_inst, uint32_t _iss);
592
593 bool routeToHyp(ThreadContext *tc) const override;
594 ExceptionClass ec(ThreadContext *tc) const override;
595 };
596
597 // A fault that flushes the pipe, excluding the faulting instructions
598 class ArmSev : public ArmFaultVals<ArmSev>
599 {
600 public:
601 ArmSev () {}
602 void invoke(ThreadContext *tc, const StaticInstPtr &inst =
603 StaticInst::nullStaticInstPtr) override;
604 };
605
606 /// Illegal Instruction Set State fault (AArch64 only)
607 class IllegalInstSetStateFault : public ArmFaultVals<IllegalInstSetStateFault>
608 {
609 public:
610 IllegalInstSetStateFault();
611 };
612
613 /*
614 * Explicitly declare template static member variables to avoid warnings
615 * in some clang versions
616 */
617 template<> ArmFault::FaultVals ArmFaultVals<Reset>::vals;
618 template<> ArmFault::FaultVals ArmFaultVals<UndefinedInstruction>::vals;
619 template<> ArmFault::FaultVals ArmFaultVals<SupervisorCall>::vals;
620 template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorCall>::vals;
621 template<> ArmFault::FaultVals ArmFaultVals<HypervisorCall>::vals;
622 template<> ArmFault::FaultVals ArmFaultVals<PrefetchAbort>::vals;
623 template<> ArmFault::FaultVals ArmFaultVals<DataAbort>::vals;
624 template<> ArmFault::FaultVals ArmFaultVals<VirtualDataAbort>::vals;
625 template<> ArmFault::FaultVals ArmFaultVals<HypervisorTrap>::vals;
626 template<> ArmFault::FaultVals ArmFaultVals<Interrupt>::vals;
627 template<> ArmFault::FaultVals ArmFaultVals<VirtualInterrupt>::vals;
628 template<> ArmFault::FaultVals ArmFaultVals<FastInterrupt>::vals;
629 template<> ArmFault::FaultVals ArmFaultVals<VirtualFastInterrupt>::vals;
630 template<> ArmFault::FaultVals ArmFaultVals<IllegalInstSetStateFault>::vals;
631 template<> ArmFault::FaultVals ArmFaultVals<SupervisorTrap>::vals;
632 template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorTrap>::vals;
633 template<> ArmFault::FaultVals ArmFaultVals<PCAlignmentFault>::vals;
634 template<> ArmFault::FaultVals ArmFaultVals<SPAlignmentFault>::vals;
635 template<> ArmFault::FaultVals ArmFaultVals<SystemError>::vals;
636 template<> ArmFault::FaultVals ArmFaultVals<SoftwareBreakpoint>::vals;
637 template<> ArmFault::FaultVals ArmFaultVals<ArmSev>::vals;
638
639 /**
640 * Returns true if the fault passed as a first argument was triggered
641 * by a memory access, false otherwise.
642 * If true it is storing the faulting address in the va argument
643 *
644 * @param fault generated fault
645 * @param va function will modify this passed-by-reference parameter
646 * with the correct faulting virtual address
647 * @return true if va contains a valid value, false otherwise
648 */
649 bool getFaultVAddr(Fault fault, Addr &va);
650
651
652 } // namespace ArmISA
653
654 #endif // __ARM_FAULTS_HH__