40cf6341c1c5f848d13a302de252955cef42f1e7
[gem5.git] / src / arch / arm / faults.cc
1 /*
2 * Copyright (c) 2010, 2012-2014, 2016-2019 ARM Limited
3 * All rights reserved
4 *
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software
9 * licensed hereunder. You may use the software subject to the license
10 * terms below provided that you ensure that this notice is replicated
11 * unmodified and in its entirety in all distributions of the software,
12 * modified or unmodified, in source code or in binary form.
13 *
14 * Copyright (c) 2003-2005 The Regents of The University of Michigan
15 * Copyright (c) 2007-2008 The Florida State University
16 * All rights reserved.
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions are
20 * met: redistributions of source code must retain the above copyright
21 * notice, this list of conditions and the following disclaimer;
22 * redistributions in binary form must reproduce the above copyright
23 * notice, this list of conditions and the following disclaimer in the
24 * documentation and/or other materials provided with the distribution;
25 * neither the name of the copyright holders nor the names of its
26 * contributors may be used to endorse or promote products derived from
27 * this software without specific prior written permission.
28 *
29 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
30 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
31 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
32 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
33 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
34 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
40 */
41
42 #include "arch/arm/faults.hh"
43
44 #include "arch/arm/insts/static_inst.hh"
45 #include "arch/arm/isa.hh"
46 #include "arch/arm/self_debug.hh"
47 #include "arch/arm/system.hh"
48 #include "arch/arm/utility.hh"
49 #include "base/compiler.hh"
50 #include "base/trace.hh"
51 #include "cpu/base.hh"
52 #include "cpu/thread_context.hh"
53 #include "debug/Faults.hh"
54 #include "sim/full_system.hh"
55
56 namespace ArmISA
57 {
58
59 uint8_t ArmFault::shortDescFaultSources[] = {
60 0x01, // AlignmentFault
61 0x04, // InstructionCacheMaintenance
62 0xff, // SynchExtAbtOnTranslTableWalkL0 (INVALID)
63 0x0c, // SynchExtAbtOnTranslTableWalkL1
64 0x0e, // SynchExtAbtOnTranslTableWalkL2
65 0xff, // SynchExtAbtOnTranslTableWalkL3 (INVALID)
66 0xff, // SynchPtyErrOnTranslTableWalkL0 (INVALID)
67 0x1c, // SynchPtyErrOnTranslTableWalkL1
68 0x1e, // SynchPtyErrOnTranslTableWalkL2
69 0xff, // SynchPtyErrOnTranslTableWalkL3 (INVALID)
70 0xff, // TranslationL0 (INVALID)
71 0x05, // TranslationL1
72 0x07, // TranslationL2
73 0xff, // TranslationL3 (INVALID)
74 0xff, // AccessFlagL0 (INVALID)
75 0x03, // AccessFlagL1
76 0x06, // AccessFlagL2
77 0xff, // AccessFlagL3 (INVALID)
78 0xff, // DomainL0 (INVALID)
79 0x09, // DomainL1
80 0x0b, // DomainL2
81 0xff, // DomainL3 (INVALID)
82 0xff, // PermissionL0 (INVALID)
83 0x0d, // PermissionL1
84 0x0f, // PermissionL2
85 0xff, // PermissionL3 (INVALID)
86 0x02, // DebugEvent
87 0x08, // SynchronousExternalAbort
88 0x10, // TLBConflictAbort
89 0x19, // SynchPtyErrOnMemoryAccess
90 0x16, // AsynchronousExternalAbort
91 0x18, // AsynchPtyErrOnMemoryAccess
92 0xff, // AddressSizeL0 (INVALID)
93 0xff, // AddressSizeL1 (INVALID)
94 0xff, // AddressSizeL2 (INVALID)
95 0xff, // AddressSizeL3 (INVALID)
96 0x40, // PrefetchTLBMiss
97 0x80 // PrefetchUncacheable
98 };
99
100 static_assert(sizeof(ArmFault::shortDescFaultSources) ==
101 ArmFault::NumFaultSources,
102 "Invalid size of ArmFault::shortDescFaultSources[]");
103
104 uint8_t ArmFault::longDescFaultSources[] = {
105 0x21, // AlignmentFault
106 0xff, // InstructionCacheMaintenance (INVALID)
107 0xff, // SynchExtAbtOnTranslTableWalkL0 (INVALID)
108 0x15, // SynchExtAbtOnTranslTableWalkL1
109 0x16, // SynchExtAbtOnTranslTableWalkL2
110 0x17, // SynchExtAbtOnTranslTableWalkL3
111 0xff, // SynchPtyErrOnTranslTableWalkL0 (INVALID)
112 0x1d, // SynchPtyErrOnTranslTableWalkL1
113 0x1e, // SynchPtyErrOnTranslTableWalkL2
114 0x1f, // SynchPtyErrOnTranslTableWalkL3
115 0xff, // TranslationL0 (INVALID)
116 0x05, // TranslationL1
117 0x06, // TranslationL2
118 0x07, // TranslationL3
119 0xff, // AccessFlagL0 (INVALID)
120 0x09, // AccessFlagL1
121 0x0a, // AccessFlagL2
122 0x0b, // AccessFlagL3
123 0xff, // DomainL0 (INVALID)
124 0x3d, // DomainL1
125 0x3e, // DomainL2
126 0xff, // DomainL3 (RESERVED)
127 0xff, // PermissionL0 (INVALID)
128 0x0d, // PermissionL1
129 0x0e, // PermissionL2
130 0x0f, // PermissionL3
131 0x22, // DebugEvent
132 0x10, // SynchronousExternalAbort
133 0x30, // TLBConflictAbort
134 0x18, // SynchPtyErrOnMemoryAccess
135 0x11, // AsynchronousExternalAbort
136 0x19, // AsynchPtyErrOnMemoryAccess
137 0xff, // AddressSizeL0 (INVALID)
138 0xff, // AddressSizeL1 (INVALID)
139 0xff, // AddressSizeL2 (INVALID)
140 0xff, // AddressSizeL3 (INVALID)
141 0x40, // PrefetchTLBMiss
142 0x80 // PrefetchUncacheable
143 };
144
145 static_assert(sizeof(ArmFault::longDescFaultSources) ==
146 ArmFault::NumFaultSources,
147 "Invalid size of ArmFault::longDescFaultSources[]");
148
149 uint8_t ArmFault::aarch64FaultSources[] = {
150 0x21, // AlignmentFault
151 0xff, // InstructionCacheMaintenance (INVALID)
152 0x14, // SynchExtAbtOnTranslTableWalkL0
153 0x15, // SynchExtAbtOnTranslTableWalkL1
154 0x16, // SynchExtAbtOnTranslTableWalkL2
155 0x17, // SynchExtAbtOnTranslTableWalkL3
156 0x1c, // SynchPtyErrOnTranslTableWalkL0
157 0x1d, // SynchPtyErrOnTranslTableWalkL1
158 0x1e, // SynchPtyErrOnTranslTableWalkL2
159 0x1f, // SynchPtyErrOnTranslTableWalkL3
160 0x04, // TranslationL0
161 0x05, // TranslationL1
162 0x06, // TranslationL2
163 0x07, // TranslationL3
164 0x08, // AccessFlagL0
165 0x09, // AccessFlagL1
166 0x0a, // AccessFlagL2
167 0x0b, // AccessFlagL3
168 // @todo: Section & Page Domain Fault in AArch64?
169 0xff, // DomainL0 (INVALID)
170 0xff, // DomainL1 (INVALID)
171 0xff, // DomainL2 (INVALID)
172 0xff, // DomainL3 (INVALID)
173 0x0c, // PermissionL0
174 0x0d, // PermissionL1
175 0x0e, // PermissionL2
176 0x0f, // PermissionL3
177 0x22, // DebugEvent
178 0x10, // SynchronousExternalAbort
179 0x30, // TLBConflictAbort
180 0x18, // SynchPtyErrOnMemoryAccess
181 0xff, // AsynchronousExternalAbort (INVALID)
182 0xff, // AsynchPtyErrOnMemoryAccess (INVALID)
183 0x00, // AddressSizeL0
184 0x01, // AddressSizeL1
185 0x02, // AddressSizeL2
186 0x03, // AddressSizeL3
187 0x40, // PrefetchTLBMiss
188 0x80 // PrefetchUncacheable
189 };
190
191 static_assert(sizeof(ArmFault::aarch64FaultSources) ==
192 ArmFault::NumFaultSources,
193 "Invalid size of ArmFault::aarch64FaultSources[]");
194
195 // Fields: name, offset, cur{ELT,ELH}Offset, lowerEL{64,32}Offset, next mode,
196 // {ARM, Thumb, ARM_ELR, Thumb_ELR} PC offset, hyp trap,
197 // {A, F} disable, class, stat
198 template<> ArmFault::FaultVals ArmFaultVals<Reset>::vals(
199 // Some dummy values (the reset vector has an IMPLEMENTATION DEFINED
200 // location in AArch64)
201 "Reset", 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
202 0, 0, 0, 0, false, true, true, EC_UNKNOWN
203 );
204 template<> ArmFault::FaultVals ArmFaultVals<UndefinedInstruction>::vals(
205 "Undefined Instruction", 0x004, 0x000, 0x200, 0x400, 0x600, MODE_UNDEFINED,
206 4, 2, 0, 0, true, false, false, EC_UNKNOWN
207 );
208 template<> ArmFault::FaultVals ArmFaultVals<SupervisorCall>::vals(
209 "Supervisor Call", 0x008, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
210 4, 2, 4, 2, true, false, false, EC_SVC_TO_HYP
211 );
212 template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorCall>::vals(
213 "Secure Monitor Call", 0x008, 0x000, 0x200, 0x400, 0x600, MODE_MON,
214 4, 4, 4, 4, false, true, true, EC_SMC_TO_HYP
215 );
216 template<> ArmFault::FaultVals ArmFaultVals<HypervisorCall>::vals(
217 "Hypervisor Call", 0x008, 0x000, 0x200, 0x400, 0x600, MODE_HYP,
218 4, 4, 4, 4, true, false, false, EC_HVC
219 );
220 template<> ArmFault::FaultVals ArmFaultVals<PrefetchAbort>::vals(
221 "Prefetch Abort", 0x00C, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
222 4, 4, 0, 0, true, true, false, EC_PREFETCH_ABORT_TO_HYP
223 );
224 template<> ArmFault::FaultVals ArmFaultVals<DataAbort>::vals(
225 "Data Abort", 0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
226 8, 8, 0, 0, true, true, false, EC_DATA_ABORT_TO_HYP
227 );
228 template<> ArmFault::FaultVals ArmFaultVals<VirtualDataAbort>::vals(
229 "Virtual Data Abort", 0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
230 8, 8, 0, 0, true, true, false, EC_INVALID
231 );
232 template<> ArmFault::FaultVals ArmFaultVals<HypervisorTrap>::vals(
233 // @todo: double check these values
234 "Hypervisor Trap", 0x014, 0x000, 0x200, 0x400, 0x600, MODE_HYP,
235 0, 0, 0, 0, false, false, false, EC_UNKNOWN
236 );
237 template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorTrap>::vals(
238 "Secure Monitor Trap", 0x004, 0x000, 0x200, 0x400, 0x600, MODE_MON,
239 4, 2, 0, 0, false, false, false, EC_UNKNOWN
240 );
241 template<> ArmFault::FaultVals ArmFaultVals<Interrupt>::vals(
242 "IRQ", 0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ,
243 4, 4, 0, 0, false, true, false, EC_UNKNOWN
244 );
245 template<> ArmFault::FaultVals ArmFaultVals<VirtualInterrupt>::vals(
246 "Virtual IRQ", 0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ,
247 4, 4, 0, 0, false, true, false, EC_INVALID
248 );
249 template<> ArmFault::FaultVals ArmFaultVals<FastInterrupt>::vals(
250 "FIQ", 0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ,
251 4, 4, 0, 0, false, true, true, EC_UNKNOWN
252 );
253 template<> ArmFault::FaultVals ArmFaultVals<VirtualFastInterrupt>::vals(
254 "Virtual FIQ", 0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ,
255 4, 4, 0, 0, false, true, true, EC_INVALID
256 );
257 template<> ArmFault::FaultVals ArmFaultVals<IllegalInstSetStateFault>::vals(
258 "Illegal Inst Set State Fault", 0x004, 0x000, 0x200, 0x400, 0x600, MODE_UNDEFINED,
259 4, 2, 0, 0, true, false, false, EC_ILLEGAL_INST
260 );
261 template<> ArmFault::FaultVals ArmFaultVals<SupervisorTrap>::vals(
262 // Some dummy values (SupervisorTrap is AArch64-only)
263 "Supervisor Trap", 0x014, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
264 0, 0, 0, 0, false, false, false, EC_UNKNOWN
265 );
266 template<> ArmFault::FaultVals ArmFaultVals<PCAlignmentFault>::vals(
267 // Some dummy values (PCAlignmentFault is AArch64-only)
268 "PC Alignment Fault", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
269 0, 0, 0, 0, true, false, false, EC_PC_ALIGNMENT
270 );
271 template<> ArmFault::FaultVals ArmFaultVals<SPAlignmentFault>::vals(
272 // Some dummy values (SPAlignmentFault is AArch64-only)
273 "SP Alignment Fault", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
274 0, 0, 0, 0, true, false, false, EC_STACK_PTR_ALIGNMENT
275 );
276 template<> ArmFault::FaultVals ArmFaultVals<SystemError>::vals(
277 // Some dummy values (SError is AArch64-only)
278 "SError", 0x000, 0x180, 0x380, 0x580, 0x780, MODE_SVC,
279 0, 0, 0, 0, false, true, true, EC_SERROR
280 );
281 template<> ArmFault::FaultVals ArmFaultVals<SoftwareBreakpoint>::vals(
282 // Some dummy values (SoftwareBreakpoint is AArch64-only)
283 "Software Breakpoint", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
284 0, 0, 0, 0, true, false, false, EC_SOFTWARE_BREAKPOINT
285 );
286 template<> ArmFault::FaultVals ArmFaultVals<HardwareBreakpoint>::vals(
287 "Hardware Breakpoint", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
288 0, 0, 0, 0, true, false, false, EC_HW_BREAKPOINT
289 );
290 template<> ArmFault::FaultVals ArmFaultVals<Watchpoint>::vals(
291 "Watchpoint", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
292 0, 0, 0, 0, true, false, false, EC_WATCHPOINT
293 );
294 template<> ArmFault::FaultVals ArmFaultVals<SoftwareStepFault>::vals(
295 "SoftwareStep", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
296 0, 0, 0, 0, true, false, false, EC_SOFTWARE_STEP
297 );
298 template<> ArmFault::FaultVals ArmFaultVals<ArmSev>::vals(
299 // Some dummy values
300 "ArmSev Flush", 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
301 0, 0, 0, 0, false, true, true, EC_UNKNOWN
302 );
303
304 Addr
305 ArmFault::getVector(ThreadContext *tc)
306 {
307 Addr base;
308
309 // Check for invalid modes
310 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
311 assert(ArmSystem::haveSecurity(tc) || cpsr.mode != MODE_MON);
312 assert(ArmSystem::haveVirtualization(tc) || cpsr.mode != MODE_HYP);
313
314 switch (cpsr.mode)
315 {
316 case MODE_MON:
317 base = tc->readMiscReg(MISCREG_MVBAR);
318 break;
319 case MODE_HYP:
320 base = tc->readMiscReg(MISCREG_HVBAR);
321 break;
322 default:
323 SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR);
324 if (sctlr.v) {
325 base = HighVecs;
326 } else {
327 base = ArmSystem::haveSecurity(tc) ?
328 tc->readMiscReg(MISCREG_VBAR) : 0;
329 }
330 break;
331 }
332
333 return base + offset(tc);
334 }
335
336 Addr
337 ArmFault::getVector64(ThreadContext *tc)
338 {
339 Addr vbar;
340 switch (toEL) {
341 case EL3:
342 assert(ArmSystem::haveSecurity(tc));
343 vbar = tc->readMiscReg(MISCREG_VBAR_EL3);
344 break;
345 case EL2:
346 assert(ArmSystem::haveVirtualization(tc));
347 vbar = tc->readMiscReg(MISCREG_VBAR_EL2);
348 break;
349 case EL1:
350 vbar = tc->readMiscReg(MISCREG_VBAR_EL1);
351 break;
352 default:
353 panic("Invalid target exception level");
354 break;
355 }
356 return vbar + offset64(tc);
357 }
358
359 MiscRegIndex
360 ArmFault::getSyndromeReg64() const
361 {
362 switch (toEL) {
363 case EL1:
364 return MISCREG_ESR_EL1;
365 case EL2:
366 return MISCREG_ESR_EL2;
367 case EL3:
368 return MISCREG_ESR_EL3;
369 default:
370 panic("Invalid exception level");
371 break;
372 }
373 }
374
375 MiscRegIndex
376 ArmFault::getFaultAddrReg64() const
377 {
378 switch (toEL) {
379 case EL1:
380 return MISCREG_FAR_EL1;
381 case EL2:
382 return MISCREG_FAR_EL2;
383 case EL3:
384 return MISCREG_FAR_EL3;
385 default:
386 panic("Invalid exception level");
387 break;
388 }
389 }
390
391 void
392 ArmFault::setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg)
393 {
394 uint32_t value;
395 uint32_t exc_class = (uint32_t) ec(tc);
396 uint32_t issVal = iss();
397
398 assert(!from64 || ArmSystem::highestELIs64(tc));
399
400 value = exc_class << 26;
401
402 // HSR.IL not valid for Prefetch Aborts (0x20, 0x21) and Data Aborts (0x24,
403 // 0x25) for which the ISS information is not valid (ARMv7).
404 // @todo: ARMv8 revises AArch32 functionality: when HSR.IL is not
405 // valid it is treated as RES1.
406 if (to64) {
407 value |= 1 << 25;
408 } else if ((bits(exc_class, 5, 3) != 4) ||
409 (bits(exc_class, 2) && bits(issVal, 24))) {
410 if (!machInst.thumb || machInst.bigThumb)
411 value |= 1 << 25;
412 }
413 // Condition code valid for EC[5:4] nonzero
414 if (!from64 && ((bits(exc_class, 5, 4) == 0) &&
415 (bits(exc_class, 3, 0) != 0))) {
416 if (!machInst.thumb) {
417 uint32_t cond;
418 ConditionCode condCode = (ConditionCode) (uint32_t) machInst.condCode;
419 // If its on unconditional instruction report with a cond code of
420 // 0xE, ie the unconditional code
421 cond = (condCode == COND_UC) ? COND_AL : condCode;
422 value |= cond << 20;
423 value |= 1 << 24;
424 }
425 value |= bits(issVal, 19, 0);
426 } else {
427 value |= issVal;
428 }
429 tc->setMiscReg(syndrome_reg, value);
430 }
431
432 void
433 ArmFault::update(ThreadContext *tc)
434 {
435 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
436
437 // Determine source exception level and mode
438 fromMode = (OperatingMode) (uint8_t) cpsr.mode;
439 fromEL = opModeToEL(fromMode);
440 if (opModeIs64(fromMode))
441 from64 = true;
442
443 // Determine target exception level (aarch64) or target execution
444 // mode (aarch32).
445 if (ArmSystem::haveSecurity(tc) && routeToMonitor(tc)) {
446 toMode = MODE_MON;
447 toEL = EL3;
448 } else if (ArmSystem::haveVirtualization(tc) && routeToHyp(tc)) {
449 toMode = MODE_HYP;
450 toEL = EL2;
451 hypRouted = true;
452 } else {
453 toMode = nextMode();
454 toEL = opModeToEL(toMode);
455 }
456
457 if (fromEL > toEL)
458 toEL = fromEL;
459
460 // Check for Set Priviledge Access Never, if PAN is supported
461 AA64MMFR1 mmfr1 = tc->readMiscReg(MISCREG_ID_AA64MMFR1_EL1);
462 if (mmfr1.pan) {
463 if (toEL == EL1) {
464 const SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR_EL1);
465 span = !sctlr.span;
466 }
467
468 const HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
469 if (toEL == EL2 && hcr.e2h && hcr.tge) {
470 const SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR_EL2);
471 span = !sctlr.span;
472 }
473 }
474
475 to64 = ELIs64(tc, toEL);
476
477 // The fault specific informations have been updated; it is
478 // now possible to use them inside the fault.
479 faultUpdated = true;
480 }
481
482 void
483 ArmFault::invoke(ThreadContext *tc, const StaticInstPtr &inst)
484 {
485 // Update fault state informations, like the starting mode (aarch32)
486 // or EL (aarch64) and the ending mode or EL.
487 // From the update function we are also evaluating if the fault must
488 // be handled in AArch64 mode (to64).
489 update(tc);
490
491 if (to64) {
492 // Invoke exception handler in AArch64 state
493 invoke64(tc, inst);
494 return;
495 }
496
497 if (vectorCatch(tc, inst))
498 return;
499
500 // ARMv7 (ARM ARM issue C B1.9)
501
502 bool have_security = ArmSystem::haveSecurity(tc);
503
504 FaultBase::invoke(tc);
505 if (!FullSystem)
506 return;
507 countStat()++;
508
509 SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR);
510 SCR scr = tc->readMiscReg(MISCREG_SCR);
511 CPSR saved_cpsr = tc->readMiscReg(MISCREG_CPSR);
512 saved_cpsr.nz = tc->readCCReg(CCREG_NZ);
513 saved_cpsr.c = tc->readCCReg(CCREG_C);
514 saved_cpsr.v = tc->readCCReg(CCREG_V);
515 saved_cpsr.ge = tc->readCCReg(CCREG_GE);
516
517 Addr curPc M5_VAR_USED = tc->pcState().pc();
518 ITSTATE it = tc->pcState().itstate();
519 saved_cpsr.it2 = it.top6;
520 saved_cpsr.it1 = it.bottom2;
521
522 // if we have a valid instruction then use it to annotate this fault with
523 // extra information. This is used to generate the correct fault syndrome
524 // information
525 ArmStaticInst *arm_inst M5_VAR_USED = instrAnnotate(inst);
526
527 // Ensure Secure state if initially in Monitor mode
528 if (have_security && saved_cpsr.mode == MODE_MON) {
529 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
530 if (scr.ns) {
531 scr.ns = 0;
532 tc->setMiscRegNoEffect(MISCREG_SCR, scr);
533 }
534 }
535
536 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
537 cpsr.mode = toMode;
538
539 // some bits are set differently if we have been routed to hyp mode
540 if (cpsr.mode == MODE_HYP) {
541 SCTLR hsctlr = tc->readMiscReg(MISCREG_HSCTLR);
542 cpsr.t = hsctlr.te;
543 cpsr.e = hsctlr.ee;
544 if (!scr.ea) {cpsr.a = 1;}
545 if (!scr.fiq) {cpsr.f = 1;}
546 if (!scr.irq) {cpsr.i = 1;}
547 } else if (cpsr.mode == MODE_MON) {
548 // Special case handling when entering monitor mode
549 cpsr.t = sctlr.te;
550 cpsr.e = sctlr.ee;
551 cpsr.a = 1;
552 cpsr.f = 1;
553 cpsr.i = 1;
554 } else {
555 cpsr.t = sctlr.te;
556 cpsr.e = sctlr.ee;
557
558 // The *Disable functions are virtual and different per fault
559 cpsr.a = cpsr.a | abortDisable(tc);
560 cpsr.f = cpsr.f | fiqDisable(tc);
561 cpsr.i = 1;
562 }
563 cpsr.it1 = cpsr.it2 = 0;
564 cpsr.j = 0;
565 cpsr.pan = span ? 1 : saved_cpsr.pan;
566 tc->setMiscReg(MISCREG_CPSR, cpsr);
567
568 // Make sure mailbox sets to one always
569 tc->setMiscReg(MISCREG_SEV_MAILBOX, 1);
570
571 // Clear the exclusive monitor
572 tc->setMiscReg(MISCREG_LOCKFLAG, 0);
573
574 if (cpsr.mode == MODE_HYP) {
575 tc->setMiscReg(MISCREG_ELR_HYP, curPc +
576 (saved_cpsr.t ? thumbPcOffset(true) : armPcOffset(true)));
577 } else {
578 tc->setIntReg(INTREG_LR, curPc +
579 (saved_cpsr.t ? thumbPcOffset(false) : armPcOffset(false)));
580 }
581
582 switch (cpsr.mode) {
583 case MODE_FIQ:
584 tc->setMiscReg(MISCREG_SPSR_FIQ, saved_cpsr);
585 break;
586 case MODE_IRQ:
587 tc->setMiscReg(MISCREG_SPSR_IRQ, saved_cpsr);
588 break;
589 case MODE_SVC:
590 tc->setMiscReg(MISCREG_SPSR_SVC, saved_cpsr);
591 break;
592 case MODE_MON:
593 assert(have_security);
594 tc->setMiscReg(MISCREG_SPSR_MON, saved_cpsr);
595 break;
596 case MODE_ABORT:
597 tc->setMiscReg(MISCREG_SPSR_ABT, saved_cpsr);
598 break;
599 case MODE_UNDEFINED:
600 tc->setMiscReg(MISCREG_SPSR_UND, saved_cpsr);
601 if (ec(tc) != EC_UNKNOWN)
602 setSyndrome(tc, MISCREG_HSR);
603 break;
604 case MODE_HYP:
605 assert(ArmSystem::haveVirtualization(tc));
606 tc->setMiscReg(MISCREG_SPSR_HYP, saved_cpsr);
607 setSyndrome(tc, MISCREG_HSR);
608 break;
609 default:
610 panic("unknown Mode\n");
611 }
612
613 Addr newPc = getVector(tc);
614 DPRINTF(Faults, "Invoking Fault:%s cpsr:%#x PC:%#x lr:%#x newVec: %#x "
615 "%s\n", name(), cpsr, curPc, tc->readIntReg(INTREG_LR),
616 newPc, arm_inst ? csprintf("inst: %#x", arm_inst->encoding()) :
617 std::string());
618 PCState pc(newPc);
619 pc.thumb(cpsr.t);
620 pc.nextThumb(pc.thumb());
621 pc.jazelle(cpsr.j);
622 pc.nextJazelle(pc.jazelle());
623 pc.aarch64(!cpsr.width);
624 pc.nextAArch64(!cpsr.width);
625 pc.illegalExec(false);
626 tc->pcState(pc);
627 }
628
629 void
630 ArmFault::invoke64(ThreadContext *tc, const StaticInstPtr &inst)
631 {
632 // Determine actual misc. register indices for ELR_ELx and SPSR_ELx
633 MiscRegIndex elr_idx, spsr_idx;
634 switch (toEL) {
635 case EL1:
636 elr_idx = MISCREG_ELR_EL1;
637 spsr_idx = MISCREG_SPSR_EL1;
638 break;
639 case EL2:
640 assert(ArmSystem::haveVirtualization(tc));
641 elr_idx = MISCREG_ELR_EL2;
642 spsr_idx = MISCREG_SPSR_EL2;
643 break;
644 case EL3:
645 assert(ArmSystem::haveSecurity(tc));
646 elr_idx = MISCREG_ELR_EL3;
647 spsr_idx = MISCREG_SPSR_EL3;
648 break;
649 default:
650 panic("Invalid target exception level");
651 break;
652 }
653
654 // Save process state into SPSR_ELx
655 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
656 CPSR spsr = cpsr;
657 spsr.nz = tc->readCCReg(CCREG_NZ);
658 spsr.c = tc->readCCReg(CCREG_C);
659 spsr.v = tc->readCCReg(CCREG_V);
660 spsr.ss = isResetSPSR() ? 0: cpsr.ss;
661 if (from64) {
662 // Force some bitfields to 0
663 spsr.q = 0;
664 spsr.it1 = 0;
665 spsr.j = 0;
666 spsr.ge = 0;
667 spsr.it2 = 0;
668 spsr.t = 0;
669 } else {
670 spsr.ge = tc->readCCReg(CCREG_GE);
671 ITSTATE it = tc->pcState().itstate();
672 spsr.it2 = it.top6;
673 spsr.it1 = it.bottom2;
674 }
675 tc->setMiscReg(spsr_idx, spsr);
676
677 // Save preferred return address into ELR_ELx
678 Addr curr_pc = tc->pcState().pc();
679 Addr ret_addr = curr_pc;
680 if (from64)
681 ret_addr += armPcElrOffset();
682 else
683 ret_addr += spsr.t ? thumbPcElrOffset() : armPcElrOffset();
684 tc->setMiscReg(elr_idx, ret_addr);
685
686 Addr vec_address = getVector64(tc);
687
688 // Update process state
689 OperatingMode64 mode = 0;
690 mode.spX = 1;
691 mode.el = toEL;
692 mode.width = 0;
693 cpsr.mode = mode;
694 cpsr.daif = 0xf;
695 cpsr.il = 0;
696 cpsr.ss = 0;
697 cpsr.pan = span ? 1 : spsr.pan;
698 tc->setMiscReg(MISCREG_CPSR, cpsr);
699
700 // If we have a valid instruction then use it to annotate this fault with
701 // extra information. This is used to generate the correct fault syndrome
702 // information
703 ArmStaticInst *arm_inst M5_VAR_USED = instrAnnotate(inst);
704
705 // Set PC to start of exception handler
706 Addr new_pc = purifyTaggedAddr(vec_address, tc, toEL, true);
707 DPRINTF(Faults, "Invoking Fault (AArch64 target EL):%s cpsr:%#x PC:%#x "
708 "elr:%#x newVec: %#x %s\n", name(), cpsr, curr_pc, ret_addr,
709 new_pc, arm_inst ? csprintf("inst: %#x", arm_inst->encoding()) :
710 std::string());
711 PCState pc(new_pc);
712 pc.aarch64(!cpsr.width);
713 pc.nextAArch64(!cpsr.width);
714 pc.illegalExec(false);
715 pc.stepped(false);
716 tc->pcState(pc);
717
718 // Save exception syndrome
719 if ((nextMode() != MODE_IRQ) && (nextMode() != MODE_FIQ))
720 setSyndrome(tc, getSyndromeReg64());
721 }
722
723 bool
724 ArmFault::vectorCatch(ThreadContext *tc, const StaticInstPtr &inst)
725 {
726 auto *isa = static_cast<ArmISA::ISA *>(tc->getIsaPtr());
727 SelfDebug * sd = isa->getSelfDebug();
728 VectorCatch* vc = sd->getVectorCatch(tc);
729 if (!vc->isVCMatch()) {
730 Fault fault = sd->testVectorCatch(tc, 0x0, this);
731 if (fault != NoFault)
732 fault->invoke(tc, inst);
733 return true;
734 }
735 return false;
736 }
737
738 ArmStaticInst *
739 ArmFault::instrAnnotate(const StaticInstPtr &inst)
740 {
741 if (inst) {
742 auto arm_inst = static_cast<ArmStaticInst *>(inst.get());
743 arm_inst->annotateFault(this);
744 return arm_inst;
745 } else {
746 return nullptr;
747 }
748 }
749
750 Addr
751 Reset::getVector(ThreadContext *tc)
752 {
753 Addr base;
754
755 // Check for invalid modes
756 CPSR M5_VAR_USED cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
757 assert(ArmSystem::haveSecurity(tc) || cpsr.mode != MODE_MON);
758 assert(ArmSystem::haveVirtualization(tc) || cpsr.mode != MODE_HYP);
759
760 // RVBAR is aliased (implemented as) MVBAR in gem5, since the two
761 // are mutually exclusive; there is no need to check here for
762 // which register to use since they hold the same value
763 base = tc->readMiscReg(MISCREG_MVBAR);
764
765 return base + offset(tc);
766 }
767
768 void
769 Reset::invoke(ThreadContext *tc, const StaticInstPtr &inst)
770 {
771 if (FullSystem) {
772 tc->getCpuPtr()->clearInterrupts(tc->threadId());
773 tc->clearArchRegs();
774 }
775 if (!ArmSystem::highestELIs64(tc)) {
776 ArmFault::invoke(tc, inst);
777 tc->setMiscReg(MISCREG_VMPIDR,
778 getMPIDR(dynamic_cast<ArmSystem*>(tc->getSystemPtr()), tc));
779
780 // Unless we have SMC code to get us there, boot in HYP!
781 if (ArmSystem::haveVirtualization(tc) &&
782 !ArmSystem::haveSecurity(tc)) {
783 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
784 cpsr.mode = MODE_HYP;
785 tc->setMiscReg(MISCREG_CPSR, cpsr);
786 }
787 } else {
788 // Advance the PC to the IMPLEMENTATION DEFINED reset value
789 PCState pc = ArmSystem::resetAddr(tc);
790 pc.aarch64(true);
791 pc.nextAArch64(true);
792 tc->pcState(pc);
793 }
794 }
795
796 void
797 UndefinedInstruction::invoke(ThreadContext *tc, const StaticInstPtr &inst)
798 {
799 if (FullSystem) {
800 ArmFault::invoke(tc, inst);
801 return;
802 }
803
804 // If the mnemonic isn't defined this has to be an unknown instruction.
805 assert(unknown || mnemonic != NULL);
806 auto arm_inst = static_cast<ArmStaticInst *>(inst.get());
807 if (disabled) {
808 panic("Attempted to execute disabled instruction "
809 "'%s' (inst 0x%08x)", mnemonic, arm_inst->encoding());
810 } else if (unknown) {
811 panic("Attempted to execute unknown instruction (inst 0x%08x)",
812 arm_inst->encoding());
813 } else {
814 panic("Attempted to execute unimplemented instruction "
815 "'%s' (inst 0x%08x)", mnemonic, arm_inst->encoding());
816 }
817 }
818
819 bool
820 UndefinedInstruction::routeToHyp(ThreadContext *tc) const
821 {
822 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
823 return fromEL == EL2 ||
824 (EL2Enabled(tc) && (fromEL == EL0) && hcr.tge);
825 }
826
827 uint32_t
828 UndefinedInstruction::iss() const
829 {
830
831 // If UndefinedInstruction is routed to hypervisor, iss field is 0.
832 if (hypRouted) {
833 return 0;
834 }
835
836 if (overrideEc == EC_INVALID)
837 return issRaw;
838
839 uint32_t new_iss = 0;
840 uint32_t op0, op1, op2, CRn, CRm, Rt, dir;
841
842 dir = bits(machInst, 21, 21);
843 op0 = bits(machInst, 20, 19);
844 op1 = bits(machInst, 18, 16);
845 CRn = bits(machInst, 15, 12);
846 CRm = bits(machInst, 11, 8);
847 op2 = bits(machInst, 7, 5);
848 Rt = bits(machInst, 4, 0);
849
850 new_iss = op0 << 20 | op2 << 17 | op1 << 14 | CRn << 10 |
851 Rt << 5 | CRm << 1 | dir;
852
853 return new_iss;
854 }
855
856 void
857 SupervisorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst)
858 {
859 if (FullSystem) {
860 ArmFault::invoke(tc, inst);
861 return;
862 }
863
864 // As of now, there isn't a 32 bit thumb version of this instruction.
865 assert(!machInst.bigThumb);
866 Fault fault;
867 tc->syscall(&fault);
868
869 // Advance the PC since that won't happen automatically.
870 PCState pc = tc->pcState();
871 assert(inst);
872 inst->advancePC(pc);
873 tc->pcState(pc);
874 }
875
876 bool
877 SupervisorCall::routeToHyp(ThreadContext *tc) const
878 {
879 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
880 return fromEL == EL2 ||
881 (EL2Enabled(tc) && fromEL == EL0 && hcr.tge);
882 }
883
884 ExceptionClass
885 SupervisorCall::ec(ThreadContext *tc) const
886 {
887 return (overrideEc != EC_INVALID) ? overrideEc :
888 (from64 ? EC_SVC_64 : vals.ec);
889 }
890
891 uint32_t
892 SupervisorCall::iss() const
893 {
894 // Even if we have a 24 bit imm from an arm32 instruction then we only use
895 // the bottom 16 bits for the ISS value (it doesn't hurt for AArch64 SVC).
896 return issRaw & 0xFFFF;
897 }
898
899 uint32_t
900 SecureMonitorCall::iss() const
901 {
902 if (from64)
903 return bits(machInst, 20, 5);
904 return 0;
905 }
906
907 ExceptionClass
908 UndefinedInstruction::ec(ThreadContext *tc) const
909 {
910 // If UndefinedInstruction is routed to hypervisor,
911 // HSR.EC field is 0.
912 if (hypRouted)
913 return EC_UNKNOWN;
914 else
915 return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
916 }
917
918
919 HypervisorCall::HypervisorCall(ExtMachInst _machInst, uint32_t _imm) :
920 ArmFaultVals<HypervisorCall>(_machInst, _imm)
921 {
922 bStep = true;
923 }
924
925 bool
926 HypervisorCall::routeToMonitor(ThreadContext *tc) const
927 {
928 return from64 && fromEL == EL3;
929 }
930
931 bool
932 HypervisorCall::routeToHyp(ThreadContext *tc) const
933 {
934 return !from64 || fromEL != EL3;
935 }
936
937 ExceptionClass
938 HypervisorCall::ec(ThreadContext *tc) const
939 {
940 return from64 ? EC_HVC_64 : vals.ec;
941 }
942
943 ExceptionClass
944 HypervisorTrap::ec(ThreadContext *tc) const
945 {
946 return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
947 }
948
949 template<class T>
950 FaultOffset
951 ArmFaultVals<T>::offset(ThreadContext *tc)
952 {
953 bool isHypTrap = false;
954
955 // Normally we just use the exception vector from the table at the top if
956 // this file, however if this exception has caused a transition to hype
957 // mode, and its an exception type that would only do this if it has been
958 // trapped then we use the hyp trap vector instead of the normal vector
959 if (vals.hypTrappable) {
960 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
961 if (cpsr.mode == MODE_HYP) {
962 CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
963 isHypTrap = spsr.mode != MODE_HYP;
964 }
965 }
966 return isHypTrap ? 0x14 : vals.offset;
967 }
968
969 template<class T>
970 FaultOffset
971 ArmFaultVals<T>::offset64(ThreadContext *tc)
972 {
973 if (toEL == fromEL) {
974 if (opModeIsT(fromMode))
975 return vals.currELTOffset;
976 return vals.currELHOffset;
977 } else {
978 bool lower_32 = false;
979 if (toEL == EL3) {
980 if (!inSecureState(tc) && ArmSystem::haveEL(tc, EL2))
981 lower_32 = ELIs32(tc, EL2);
982 else
983 lower_32 = ELIs32(tc, EL1);
984 } else {
985 lower_32 = ELIs32(tc, static_cast<ExceptionLevel>(toEL - 1));
986 }
987
988 if (lower_32)
989 return vals.lowerEL32Offset;
990 return vals.lowerEL64Offset;
991 }
992 }
993
994 // void
995 // SupervisorCall::setSyndrome64(ThreadContext *tc, MiscRegIndex esr_idx)
996 // {
997 // ESR esr = 0;
998 // esr.ec = machInst.aarch64 ? SvcAArch64 : SvcAArch32;
999 // esr.il = !machInst.thumb;
1000 // if (machInst.aarch64)
1001 // esr.imm16 = bits(machInst.instBits, 20, 5);
1002 // else if (machInst.thumb)
1003 // esr.imm16 = bits(machInst.instBits, 7, 0);
1004 // else
1005 // esr.imm16 = bits(machInst.instBits, 15, 0);
1006 // tc->setMiscReg(esr_idx, esr);
1007 // }
1008
1009 void
1010 SecureMonitorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1011 {
1012 if (FullSystem) {
1013 ArmFault::invoke(tc, inst);
1014 return;
1015 }
1016 }
1017
1018 ExceptionClass
1019 SecureMonitorCall::ec(ThreadContext *tc) const
1020 {
1021 return (from64 ? EC_SMC_64 : vals.ec);
1022 }
1023
1024 bool
1025 SupervisorTrap::routeToHyp(ThreadContext *tc) const
1026 {
1027 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
1028 return EL2Enabled(tc) && currEL(tc) <= EL1 && hcr.tge;
1029 }
1030
1031 uint32_t
1032 SupervisorTrap::iss() const
1033 {
1034 // If SupervisorTrap is routed to hypervisor, iss field is 0.
1035 if (hypRouted) {
1036 return 0;
1037 }
1038 return issRaw;
1039 }
1040
1041 ExceptionClass
1042 SupervisorTrap::ec(ThreadContext *tc) const
1043 {
1044 if (hypRouted)
1045 return EC_UNKNOWN;
1046 else
1047 return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
1048 }
1049
1050 ExceptionClass
1051 SecureMonitorTrap::ec(ThreadContext *tc) const
1052 {
1053 return (overrideEc != EC_INVALID) ? overrideEc :
1054 (from64 ? EC_SMC_64 : vals.ec);
1055 }
1056
1057 template<class T>
1058 void
1059 AbortFault<T>::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1060 {
1061 if (tranMethod == ArmFault::UnknownTran) {
1062 tranMethod = longDescFormatInUse(tc) ? ArmFault::LpaeTran
1063 : ArmFault::VmsaTran;
1064
1065 if ((tranMethod == ArmFault::VmsaTran) && this->routeToMonitor(tc)) {
1066 // See ARM ARM B3-1416
1067 bool override_LPAE = false;
1068 TTBCR ttbcr_s = tc->readMiscReg(MISCREG_TTBCR_S);
1069 TTBCR M5_VAR_USED ttbcr_ns = tc->readMiscReg(MISCREG_TTBCR_NS);
1070 if (ttbcr_s.eae) {
1071 override_LPAE = true;
1072 } else {
1073 // Unimplemented code option, not seen in testing. May need
1074 // extension according to the manual exceprt above.
1075 DPRINTF(Faults, "Warning: Incomplete translation method "
1076 "override detected.\n");
1077 }
1078 if (override_LPAE)
1079 tranMethod = ArmFault::LpaeTran;
1080 }
1081 }
1082
1083 if (source == ArmFault::AsynchronousExternalAbort) {
1084 tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0);
1085 }
1086 // Get effective fault source encoding
1087 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
1088
1089 // source must be determined BEFORE invoking generic routines which will
1090 // try to set hsr etc. and are based upon source!
1091 ArmFaultVals<T>::invoke(tc, inst);
1092
1093 if (!this->to64) { // AArch32
1094 FSR fsr = getFsr(tc);
1095 if (cpsr.mode == MODE_HYP) {
1096 tc->setMiscReg(T::HFarIndex, faultAddr);
1097 } else if (stage2) {
1098 tc->setMiscReg(MISCREG_HPFAR, (faultAddr >> 8) & ~0xf);
1099 tc->setMiscReg(T::HFarIndex, OVAddr);
1100 } else if (debug > ArmFault::NODEBUG) {
1101 DBGDS32 Rext = tc->readMiscReg(MISCREG_DBGDSCRext);
1102 tc->setMiscReg(T::FarIndex, faultAddr);
1103 if (debug == ArmFault::BRKPOINT){
1104 Rext.moe = 0x1;
1105 } else if (debug == ArmFault::VECTORCATCH){
1106 Rext.moe = 0x5;
1107 } else if (debug > ArmFault::VECTORCATCH) {
1108 Rext.moe = 0xa;
1109 fsr.cm = (debug == ArmFault::WPOINT_CM)? 1 : 0;
1110 }
1111
1112 tc->setMiscReg(T::FsrIndex, fsr);
1113 tc->setMiscReg(MISCREG_DBGDSCRext, Rext);
1114
1115 } else {
1116 tc->setMiscReg(T::FsrIndex, fsr);
1117 tc->setMiscReg(T::FarIndex, faultAddr);
1118 }
1119 DPRINTF(Faults, "Abort Fault source=%#x fsr=%#x faultAddr=%#x "\
1120 "tranMethod=%#x\n", source, fsr, faultAddr, tranMethod);
1121 } else { // AArch64
1122 // Set the FAR register. Nothing else to do if we are in AArch64 state
1123 // because the syndrome register has already been set inside invoke64()
1124 if (stage2) {
1125 // stage 2 fault, set HPFAR_EL2 to the faulting IPA
1126 // and FAR_EL2 to the Original VA
1127 tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), OVAddr);
1128 tc->setMiscReg(MISCREG_HPFAR_EL2, bits(faultAddr, 47, 12) << 4);
1129
1130 DPRINTF(Faults, "Abort Fault (Stage 2) VA: 0x%x IPA: 0x%x\n",
1131 OVAddr, faultAddr);
1132 } else {
1133 tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), faultAddr);
1134 }
1135 }
1136 }
1137
1138 template<class T>
1139 void
1140 AbortFault<T>::setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg)
1141 {
1142 srcEncoded = getFaultStatusCode(tc);
1143 if (srcEncoded == ArmFault::FaultSourceInvalid) {
1144 panic("Invalid fault source\n");
1145 }
1146 ArmFault::setSyndrome(tc, syndrome_reg);
1147 }
1148
1149 template<class T>
1150 uint8_t
1151 AbortFault<T>::getFaultStatusCode(ThreadContext *tc) const
1152 {
1153
1154 panic_if(!this->faultUpdated,
1155 "Trying to use un-updated ArmFault internal variables\n");
1156
1157 uint8_t fsc = 0;
1158
1159 if (!this->to64) {
1160 // AArch32
1161 assert(tranMethod != ArmFault::UnknownTran);
1162 if (tranMethod == ArmFault::LpaeTran) {
1163 fsc = ArmFault::longDescFaultSources[source];
1164 } else {
1165 fsc = ArmFault::shortDescFaultSources[source];
1166 }
1167 } else {
1168 // AArch64
1169 fsc = ArmFault::aarch64FaultSources[source];
1170 }
1171
1172 return fsc;
1173 }
1174
1175 template<class T>
1176 FSR
1177 AbortFault<T>::getFsr(ThreadContext *tc) const
1178 {
1179 FSR fsr = 0;
1180
1181 auto fsc = getFaultStatusCode(tc);
1182
1183 // AArch32
1184 assert(tranMethod != ArmFault::UnknownTran);
1185 if (tranMethod == ArmFault::LpaeTran) {
1186 fsr.status = fsc;
1187 fsr.lpae = 1;
1188 } else {
1189 fsr.fsLow = bits(fsc, 3, 0);
1190 fsr.fsHigh = bits(fsc, 4);
1191 fsr.domain = static_cast<uint8_t>(domain);
1192 }
1193
1194 fsr.wnr = (write ? 1 : 0);
1195 fsr.ext = 0;
1196
1197 return fsr;
1198 }
1199
1200 template<class T>
1201 bool
1202 AbortFault<T>::abortDisable(ThreadContext *tc)
1203 {
1204 if (ArmSystem::haveSecurity(tc)) {
1205 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1206 return (!scr.ns || scr.aw);
1207 }
1208 return true;
1209 }
1210
1211 template<class T>
1212 void
1213 AbortFault<T>::annotate(ArmFault::AnnotationIDs id, uint64_t val)
1214 {
1215 switch (id)
1216 {
1217 case ArmFault::S1PTW:
1218 s1ptw = val;
1219 break;
1220 case ArmFault::OVA:
1221 OVAddr = val;
1222 break;
1223
1224 // Just ignore unknown ID's
1225 default:
1226 break;
1227 }
1228 }
1229
1230 template<class T>
1231 uint32_t
1232 AbortFault<T>::iss() const
1233 {
1234 uint32_t val;
1235
1236 val = srcEncoded & 0x3F;
1237 val |= write << 6;
1238 val |= s1ptw << 7;
1239 return (val);
1240 }
1241
1242 template<class T>
1243 bool
1244 AbortFault<T>::isMMUFault() const
1245 {
1246 // NOTE: Not relying on LL information being aligned to lowest bits here
1247 return
1248 (source == ArmFault::AlignmentFault) ||
1249 ((source >= ArmFault::TranslationLL) &&
1250 (source < ArmFault::TranslationLL + 4)) ||
1251 ((source >= ArmFault::AccessFlagLL) &&
1252 (source < ArmFault::AccessFlagLL + 4)) ||
1253 ((source >= ArmFault::DomainLL) &&
1254 (source < ArmFault::DomainLL + 4)) ||
1255 ((source >= ArmFault::PermissionLL) &&
1256 (source < ArmFault::PermissionLL + 4));
1257 }
1258
1259 template<class T>
1260 bool
1261 AbortFault<T>::getFaultVAddr(Addr &va) const
1262 {
1263 va = (stage2 ? OVAddr : faultAddr);
1264 return true;
1265 }
1266
1267 ExceptionClass
1268 PrefetchAbort::ec(ThreadContext *tc) const
1269 {
1270 if (to64) {
1271 // AArch64
1272 if (toEL == fromEL)
1273 return EC_PREFETCH_ABORT_CURR_EL;
1274 else
1275 return EC_PREFETCH_ABORT_LOWER_EL;
1276 } else {
1277 // AArch32
1278 // Abort faults have different EC codes depending on whether
1279 // the fault originated within HYP mode, or not. So override
1280 // the method and add the extra adjustment of the EC value.
1281
1282 ExceptionClass ec = ArmFaultVals<PrefetchAbort>::vals.ec;
1283
1284 CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
1285 if (spsr.mode == MODE_HYP) {
1286 ec = ((ExceptionClass) (((uint32_t) ec) + 1));
1287 }
1288 return ec;
1289 }
1290 }
1291
1292 bool
1293 PrefetchAbort::routeToMonitor(ThreadContext *tc) const
1294 {
1295 SCR scr = 0;
1296 if (from64)
1297 scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1298 else
1299 scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1300
1301 return scr.ea && !isMMUFault();
1302 }
1303
1304 bool
1305 PrefetchAbort::routeToHyp(ThreadContext *tc) const
1306 {
1307 bool toHyp;
1308
1309 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
1310 HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR);
1311
1312 toHyp = fromEL == EL2;
1313 toHyp |= ArmSystem::haveEL(tc, EL2) && !inSecureState(tc) &&
1314 currEL(tc) <= EL1 && (hcr.tge || stage2 ||
1315 (source == DebugEvent && hdcr.tde));
1316 return toHyp;
1317 }
1318
1319 ExceptionClass
1320 DataAbort::ec(ThreadContext *tc) const
1321 {
1322 if (to64) {
1323 // AArch64
1324 if (source == ArmFault::AsynchronousExternalAbort) {
1325 panic("Asynchronous External Abort should be handled with "
1326 "SystemErrors (SErrors)!");
1327 }
1328 if (toEL == fromEL)
1329 return EC_DATA_ABORT_CURR_EL;
1330 else
1331 return EC_DATA_ABORT_LOWER_EL;
1332 } else {
1333 // AArch32
1334 // Abort faults have different EC codes depending on whether
1335 // the fault originated within HYP mode, or not. So override
1336 // the method and add the extra adjustment of the EC value.
1337
1338 ExceptionClass ec = ArmFaultVals<DataAbort>::vals.ec;
1339
1340 CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
1341 if (spsr.mode == MODE_HYP) {
1342 ec = ((ExceptionClass) (((uint32_t) ec) + 1));
1343 }
1344 return ec;
1345 }
1346 }
1347
1348 bool
1349 DataAbort::routeToMonitor(ThreadContext *tc) const
1350 {
1351 SCR scr = 0;
1352 if (from64)
1353 scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1354 else
1355 scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1356
1357 return scr.ea && !isMMUFault();
1358 }
1359
1360 bool
1361 DataAbort::routeToHyp(ThreadContext *tc) const
1362 {
1363 bool toHyp;
1364
1365 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
1366 HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR);
1367
1368 bool amo = hcr.amo;
1369 if (hcr.tge == 1)
1370 amo = (!HaveVirtHostExt(tc) || hcr.e2h == 0);
1371
1372 // if in Hyp mode then stay in Hyp mode
1373 toHyp = fromEL == EL2 ||
1374 (EL2Enabled(tc) && fromEL <= EL1
1375 && (hcr.tge || stage2 ||
1376 ((source == AsynchronousExternalAbort) && amo) ||
1377 ((fromEL == EL0) && hcr.tge &&
1378 ((source == AlignmentFault) ||
1379 (source == SynchronousExternalAbort))) ||
1380 ((source == DebugEvent) && (hdcr.tde || hcr.tge))));
1381 return toHyp;
1382 }
1383
1384 uint32_t
1385 DataAbort::iss() const
1386 {
1387 uint32_t val;
1388
1389 // Add on the data abort specific fields to the generic abort ISS value
1390 val = AbortFault<DataAbort>::iss();
1391
1392 val |= cm << 8;
1393
1394 // ISS is valid if not caused by a stage 1 page table walk, and when taken
1395 // to AArch64 only when directed to EL2
1396 if (!s1ptw && stage2 && (!to64 || toEL == EL2)) {
1397 val |= isv << 24;
1398 if (isv) {
1399 val |= sas << 22;
1400 val |= sse << 21;
1401 val |= srt << 16;
1402 // AArch64 only. These assignments are safe on AArch32 as well
1403 // because these vars are initialized to false
1404 val |= sf << 15;
1405 val |= ar << 14;
1406 }
1407 }
1408 return (val);
1409 }
1410
1411 void
1412 DataAbort::annotate(AnnotationIDs id, uint64_t val)
1413 {
1414 AbortFault<DataAbort>::annotate(id, val);
1415 switch (id)
1416 {
1417 case SAS:
1418 isv = true;
1419 sas = val;
1420 break;
1421 case SSE:
1422 isv = true;
1423 sse = val;
1424 break;
1425 case SRT:
1426 isv = true;
1427 srt = val;
1428 break;
1429 case SF:
1430 isv = true;
1431 sf = val;
1432 break;
1433 case AR:
1434 isv = true;
1435 ar = val;
1436 break;
1437 case CM:
1438 cm = val;
1439 break;
1440 case OFA:
1441 faultAddr = val;
1442 break;
1443 // Just ignore unknown ID's
1444 default:
1445 break;
1446 }
1447 }
1448
1449 void
1450 VirtualDataAbort::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1451 {
1452 AbortFault<VirtualDataAbort>::invoke(tc, inst);
1453 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
1454 hcr.va = 0;
1455 tc->setMiscRegNoEffect(MISCREG_HCR, hcr);
1456 }
1457
1458 bool
1459 Interrupt::routeToMonitor(ThreadContext *tc) const
1460 {
1461 assert(ArmSystem::haveSecurity(tc));
1462 SCR scr = 0;
1463 if (from64)
1464 scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1465 else
1466 scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1467 return scr.irq;
1468 }
1469
1470 bool
1471 Interrupt::routeToHyp(ThreadContext *tc) const
1472 {
1473 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
1474 return fromEL == EL2 ||
1475 (EL2Enabled(tc) && fromEL <= EL1 && (hcr.tge || hcr.imo));
1476 }
1477
1478 bool
1479 Interrupt::abortDisable(ThreadContext *tc)
1480 {
1481 if (ArmSystem::haveSecurity(tc)) {
1482 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1483 return (!scr.ns || scr.aw);
1484 }
1485 return true;
1486 }
1487
1488 VirtualInterrupt::VirtualInterrupt()
1489 {}
1490
1491 bool
1492 FastInterrupt::routeToMonitor(ThreadContext *tc) const
1493 {
1494 assert(ArmSystem::haveSecurity(tc));
1495 SCR scr = 0;
1496 if (from64)
1497 scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1498 else
1499 scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1500 return scr.fiq;
1501 }
1502
1503 bool
1504 FastInterrupt::routeToHyp(ThreadContext *tc) const
1505 {
1506 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
1507 return fromEL == EL2 ||
1508 (EL2Enabled(tc) && fromEL <= EL1 && (hcr.tge || hcr.fmo));
1509 }
1510
1511 bool
1512 FastInterrupt::abortDisable(ThreadContext *tc)
1513 {
1514 if (ArmSystem::haveSecurity(tc)) {
1515 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1516 return (!scr.ns || scr.aw);
1517 }
1518 return true;
1519 }
1520
1521 bool
1522 FastInterrupt::fiqDisable(ThreadContext *tc)
1523 {
1524 if (ArmSystem::haveVirtualization(tc)) {
1525 return true;
1526 } else if (ArmSystem::haveSecurity(tc)) {
1527 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1528 return (!scr.ns || scr.fw);
1529 }
1530 return true;
1531 }
1532
1533 VirtualFastInterrupt::VirtualFastInterrupt()
1534 {}
1535
1536 void
1537 PCAlignmentFault::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1538 {
1539 ArmFaultVals<PCAlignmentFault>::invoke(tc, inst);
1540 assert(from64);
1541 // Set the FAR
1542 tc->setMiscReg(getFaultAddrReg64(), faultPC);
1543 }
1544
1545 bool
1546 PCAlignmentFault::routeToHyp(ThreadContext *tc) const
1547 {
1548 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
1549 return fromEL == EL2 || (EL2Enabled(tc) && fromEL <= EL1 && hcr.tge);
1550 }
1551
1552 SPAlignmentFault::SPAlignmentFault()
1553 {}
1554
1555 bool
1556 SPAlignmentFault::routeToHyp(ThreadContext *tc) const
1557 {
1558 assert(from64);
1559 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
1560 return EL2Enabled(tc) && currEL(tc) <= EL1 && hcr.tge == 1;
1561 }
1562
1563 SystemError::SystemError()
1564 {}
1565
1566 void
1567 SystemError::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1568 {
1569 tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0);
1570 ArmFault::invoke(tc, inst);
1571 }
1572
1573 bool
1574 SystemError::routeToMonitor(ThreadContext *tc) const
1575 {
1576 assert(ArmSystem::haveSecurity(tc));
1577 assert(from64);
1578 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1579 return scr.ea || fromEL == EL3;
1580 }
1581
1582 bool
1583 SystemError::routeToHyp(ThreadContext *tc) const
1584 {
1585 assert(from64);
1586
1587 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
1588
1589 return fromEL == EL2 ||
1590 (EL2Enabled(tc) && fromEL <= EL1 && (hcr.tge || hcr.amo));
1591 }
1592
1593
1594 SoftwareBreakpoint::SoftwareBreakpoint(ExtMachInst _mach_inst, uint32_t _iss)
1595 : ArmFaultVals<SoftwareBreakpoint>(_mach_inst, _iss)
1596 {}
1597
1598 bool
1599 SoftwareBreakpoint::routeToHyp(ThreadContext *tc) const
1600 {
1601 const HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
1602 const HDCR mdcr = tc->readMiscRegNoEffect(MISCREG_MDCR_EL2);
1603
1604 return fromEL == EL2 ||
1605 (EL2Enabled(tc) && fromEL <= EL1 && (hcr.tge || mdcr.tde));
1606 }
1607
1608 ExceptionClass
1609 SoftwareBreakpoint::ec(ThreadContext *tc) const
1610 {
1611 return from64 ? EC_SOFTWARE_BREAKPOINT_64 : vals.ec;
1612 }
1613
1614 HardwareBreakpoint::HardwareBreakpoint(Addr _vaddr, uint32_t _iss)
1615 : ArmFaultVals<HardwareBreakpoint>(0x0, _iss), vAddr(_vaddr)
1616 {}
1617
1618 bool
1619 HardwareBreakpoint::routeToHyp(ThreadContext *tc) const
1620 {
1621 const HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
1622 const HDCR mdcr = tc->readMiscRegNoEffect(MISCREG_MDCR_EL2);
1623
1624 return EL2Enabled(tc) && fromEL <= EL1 && (hcr.tge || mdcr.tde);
1625 }
1626
1627 ExceptionClass
1628 HardwareBreakpoint::ec(ThreadContext *tc) const
1629 {
1630 // AArch64
1631 if (toEL == fromEL)
1632 return EC_HW_BREAKPOINT_CURR_EL;
1633 else
1634 return EC_HW_BREAKPOINT_LOWER_EL;
1635 }
1636
1637 void
1638 HardwareBreakpoint::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1639 {
1640
1641 ArmFaultVals<HardwareBreakpoint>::invoke(tc, inst);
1642 MiscRegIndex elr_idx;
1643 switch (toEL) {
1644 case EL1:
1645 elr_idx = MISCREG_ELR_EL1;
1646 break;
1647 case EL2:
1648 assert(ArmSystem::haveVirtualization(tc));
1649 elr_idx = MISCREG_ELR_EL2;
1650 break;
1651 case EL3:
1652 assert(ArmSystem::haveSecurity(tc));
1653 elr_idx = MISCREG_ELR_EL3;
1654 break;
1655 default:
1656 panic("Invalid target exception level");
1657 break;
1658 }
1659
1660 tc->setMiscReg(elr_idx, vAddr);
1661
1662 }
1663
1664 Watchpoint::Watchpoint(ExtMachInst _mach_inst, Addr _vaddr,
1665 bool _write, bool _cm)
1666 : ArmFaultVals<Watchpoint>(_mach_inst), vAddr(_vaddr),
1667 write(_write), cm(_cm)
1668 {}
1669
1670 uint32_t
1671 Watchpoint::iss() const
1672 {
1673 uint32_t iss = 0x0022;
1674 // NV
1675 // if (toEL == EL2)
1676 // iss |= 0x02000;
1677 if (cm)
1678 iss |= 0x00100;
1679 if (write)
1680 iss |= 0x00040;
1681 return iss;
1682 }
1683
1684 void
1685 Watchpoint::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1686 {
1687 ArmFaultVals<Watchpoint>::invoke(tc, inst);
1688 // Set the FAR
1689 tc->setMiscReg(getFaultAddrReg64(), vAddr);
1690
1691 }
1692
1693 bool
1694 Watchpoint::routeToHyp(ThreadContext *tc) const
1695 {
1696 const HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
1697 const HDCR mdcr = tc->readMiscRegNoEffect(MISCREG_MDCR_EL2);
1698
1699 return fromEL == EL2 ||
1700 (EL2Enabled(tc) && fromEL <= EL1 && (hcr.tge || mdcr.tde));
1701 }
1702
1703 void
1704 Watchpoint::annotate(AnnotationIDs id, uint64_t val)
1705 {
1706 ArmFaultVals<Watchpoint>::annotate(id, val);
1707 switch (id)
1708 {
1709 case OFA:
1710 vAddr = val;
1711 break;
1712 // Just ignore unknown ID's
1713 default:
1714 break;
1715 }
1716 }
1717
1718 ExceptionClass
1719 Watchpoint::ec(ThreadContext *tc) const
1720 {
1721 // AArch64
1722 if (toEL == fromEL)
1723 return EC_WATCHPOINT_CURR_EL;
1724 else
1725 return EC_WATCHPOINT_LOWER_EL;
1726 }
1727
1728 SoftwareStepFault::SoftwareStepFault(ExtMachInst _mach_inst, bool is_ldx,
1729 bool _stepped)
1730 : ArmFaultVals<SoftwareStepFault>(_mach_inst), isldx(is_ldx),
1731 stepped(_stepped)
1732 {
1733 bStep = true;
1734 }
1735
1736 bool
1737 SoftwareStepFault::routeToHyp(ThreadContext *tc) const
1738 {
1739 const HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
1740 const HDCR mdcr = tc->readMiscRegNoEffect(MISCREG_MDCR_EL2);
1741
1742 return fromEL == EL2 ||
1743 (EL2Enabled(tc) && fromEL <= EL1 && (hcr.tge || mdcr.tde));
1744 }
1745
1746 ExceptionClass
1747 SoftwareStepFault::ec(ThreadContext *tc) const
1748 {
1749 // AArch64
1750 if (toEL == fromEL)
1751 return EC_SOFTWARE_STEP_CURR_EL;
1752 else
1753 return EC_SOFTWARE_STEP_LOWER_EL;
1754 }
1755
1756 uint32_t
1757 SoftwareStepFault::iss() const
1758 {
1759 uint32_t iss= 0x0022;
1760 if (stepped) {
1761 iss |= 0x1000000;
1762 }
1763
1764 if (isldx) {
1765 iss |= 0x40;
1766 }
1767
1768 return iss;
1769
1770 }
1771
1772 void
1773 ArmSev::invoke(ThreadContext *tc, const StaticInstPtr &inst) {
1774 DPRINTF(Faults, "Invoking ArmSev Fault\n");
1775 if (!FullSystem)
1776 return;
1777
1778 // Set sev_mailbox to 1, clear the pending interrupt from remote
1779 // SEV execution and let pipeline continue as pcState is still
1780 // valid.
1781 tc->setMiscReg(MISCREG_SEV_MAILBOX, 1);
1782 tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_SEV, 0);
1783 }
1784
1785 // Instantiate all the templates to make the linker happy
1786 template class ArmFaultVals<Reset>;
1787 template class ArmFaultVals<UndefinedInstruction>;
1788 template class ArmFaultVals<SupervisorCall>;
1789 template class ArmFaultVals<SecureMonitorCall>;
1790 template class ArmFaultVals<HypervisorCall>;
1791 template class ArmFaultVals<PrefetchAbort>;
1792 template class ArmFaultVals<DataAbort>;
1793 template class ArmFaultVals<VirtualDataAbort>;
1794 template class ArmFaultVals<HypervisorTrap>;
1795 template class ArmFaultVals<Interrupt>;
1796 template class ArmFaultVals<VirtualInterrupt>;
1797 template class ArmFaultVals<FastInterrupt>;
1798 template class ArmFaultVals<VirtualFastInterrupt>;
1799 template class ArmFaultVals<SupervisorTrap>;
1800 template class ArmFaultVals<SecureMonitorTrap>;
1801 template class ArmFaultVals<PCAlignmentFault>;
1802 template class ArmFaultVals<SPAlignmentFault>;
1803 template class ArmFaultVals<SystemError>;
1804 template class ArmFaultVals<SoftwareBreakpoint>;
1805 template class ArmFaultVals<HardwareBreakpoint>;
1806 template class ArmFaultVals<Watchpoint>;
1807 template class ArmFaultVals<SoftwareStepFault>;
1808 template class ArmFaultVals<ArmSev>;
1809 template class AbortFault<PrefetchAbort>;
1810 template class AbortFault<DataAbort>;
1811 template class AbortFault<VirtualDataAbort>;
1812
1813
1814 IllegalInstSetStateFault::IllegalInstSetStateFault()
1815 {}
1816
1817 bool
1818 IllegalInstSetStateFault::routeToHyp(ThreadContext *tc) const
1819 {
1820 const HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
1821 return EL2Enabled(tc) && fromEL == EL0 && hcr.tge;
1822 }
1823
1824 bool
1825 getFaultVAddr(Fault fault, Addr &va)
1826 {
1827 auto arm_fault = dynamic_cast<ArmFault *>(fault.get());
1828
1829 if (arm_fault) {
1830 return arm_fault->getFaultVAddr(va);
1831 } else {
1832 auto pgt_fault = dynamic_cast<GenericPageTableFault *>(fault.get());
1833 if (pgt_fault) {
1834 va = pgt_fault->getFaultVAddr();
1835 return true;
1836 }
1837
1838 auto align_fault = dynamic_cast<GenericAlignmentFault *>(fault.get());
1839 if (align_fault) {
1840 va = align_fault->getFaultVAddr();
1841 return true;
1842 }
1843
1844 // Return false since it's not an address triggered exception
1845 return false;
1846 }
1847 }
1848
1849 } // namespace ArmISA