arch-arm: GenericTimer arch regs, perms/trapping
[gem5.git] / src / arch / arm / utility.hh
1 /*
2 * Copyright (c) 2010, 2012-2013, 2016-2020 ARM Limited
3 * All rights reserved
4 *
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software
9 * licensed hereunder. You may use the software subject to the license
10 * terms below provided that you ensure that this notice is replicated
11 * unmodified and in its entirety in all distributions of the software,
12 * modified or unmodified, in source code or in binary form.
13 *
14 * Copyright (c) 2003-2005 The Regents of The University of Michigan
15 * Copyright (c) 2007-2008 The Florida State University
16 * All rights reserved.
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions are
20 * met: redistributions of source code must retain the above copyright
21 * notice, this list of conditions and the following disclaimer;
22 * redistributions in binary form must reproduce the above copyright
23 * notice, this list of conditions and the following disclaimer in the
24 * documentation and/or other materials provided with the distribution;
25 * neither the name of the copyright holders nor the names of its
26 * contributors may be used to endorse or promote products derived from
27 * this software without specific prior written permission.
28 *
29 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
30 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
31 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
32 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
33 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
34 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
40 */
41
42 #ifndef __ARCH_ARM_UTILITY_HH__
43 #define __ARCH_ARM_UTILITY_HH__
44
45 #include "arch/arm/isa_traits.hh"
46 #include "arch/arm/miscregs.hh"
47 #include "arch/arm/types.hh"
48 #include "base/logging.hh"
49 #include "base/trace.hh"
50 #include "base/types.hh"
51 #include "cpu/static_inst.hh"
52 #include "cpu/thread_context.hh"
53
54 class ArmSystem;
55
56 namespace ArmISA {
57
58 inline PCState
59 buildRetPC(const PCState &curPC, const PCState &callPC)
60 {
61 PCState retPC = callPC;
62 retPC.uEnd();
63 return retPC;
64 }
65
66 inline bool
67 testPredicate(uint32_t nz, uint32_t c, uint32_t v, ConditionCode code)
68 {
69 bool n = (nz & 0x2);
70 bool z = (nz & 0x1);
71
72 switch (code)
73 {
74 case COND_EQ: return z;
75 case COND_NE: return !z;
76 case COND_CS: return c;
77 case COND_CC: return !c;
78 case COND_MI: return n;
79 case COND_PL: return !n;
80 case COND_VS: return v;
81 case COND_VC: return !v;
82 case COND_HI: return (c && !z);
83 case COND_LS: return !(c && !z);
84 case COND_GE: return !(n ^ v);
85 case COND_LT: return (n ^ v);
86 case COND_GT: return !(n ^ v || z);
87 case COND_LE: return (n ^ v || z);
88 case COND_AL: return true;
89 case COND_UC: return true;
90 default:
91 panic("Unhandled predicate condition: %d\n", code);
92 }
93 }
94
95 void copyRegs(ThreadContext *src, ThreadContext *dest);
96
97 static inline void
98 copyMiscRegs(ThreadContext *src, ThreadContext *dest)
99 {
100 panic("Copy Misc. Regs Not Implemented Yet\n");
101 }
102
103 /** Send an event (SEV) to a specific PE if there isn't
104 * already a pending event */
105 void sendEvent(ThreadContext *tc);
106
107 static inline bool
108 inUserMode(CPSR cpsr)
109 {
110 return cpsr.mode == MODE_USER || cpsr.mode == MODE_EL0T;
111 }
112
113 static inline bool
114 inUserMode(ThreadContext *tc)
115 {
116 return inUserMode(tc->readMiscRegNoEffect(MISCREG_CPSR));
117 }
118
119 static inline bool
120 inPrivilegedMode(CPSR cpsr)
121 {
122 return !inUserMode(cpsr);
123 }
124
125 static inline bool
126 inPrivilegedMode(ThreadContext *tc)
127 {
128 return !inUserMode(tc);
129 }
130
131 bool inAArch64(ThreadContext *tc);
132
133 static inline OperatingMode
134 currOpMode(const ThreadContext *tc)
135 {
136 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
137 return (OperatingMode) (uint8_t) cpsr.mode;
138 }
139
140 static inline ExceptionLevel
141 currEL(const ThreadContext *tc)
142 {
143 return opModeToEL(currOpMode(tc));
144 }
145
146 inline ExceptionLevel
147 currEL(CPSR cpsr)
148 {
149 return opModeToEL((OperatingMode) (uint8_t)cpsr.mode);
150 }
151
152 bool HaveVirtHostExt(ThreadContext *tc);
153 bool HaveSecureEL2Ext(ThreadContext *tc);
154 bool IsSecureEL2Enabled(ThreadContext *tc);
155 bool EL2Enabled(ThreadContext *tc);
156
157 /**
158 * This function checks whether selected EL provided as an argument
159 * is using the AArch32 ISA. This information might be unavailable
160 * at the current EL status: it hence returns a pair of boolean values:
161 * a first boolean, true if information is available (known),
162 * and a second one, true if EL is using AArch32, false for AArch64.
163 *
164 * @param tc The thread context.
165 * @param el The target exception level.
166 * @retval known is FALSE for EL0 if the current Exception level
167 * is not EL0 and EL1 is using AArch64, since it cannot
168 * determine the state of EL0; TRUE otherwise.
169 * @retval aarch32 is TRUE if the specified Exception level is using AArch32;
170 * FALSE otherwise.
171 */
172 std::pair<bool, bool>
173 ELUsingAArch32K(ThreadContext *tc, ExceptionLevel el);
174
175 bool ELIs32(ThreadContext *tc, ExceptionLevel el);
176
177 bool ELIs64(ThreadContext *tc, ExceptionLevel el);
178
179 /**
180 * Returns true if the current exception level `el` is executing a Host OS or
181 * an application of a Host OS (Armv8.1 Virtualization Host Extensions).
182 */
183 bool ELIsInHost(ThreadContext *tc, ExceptionLevel el);
184
185 bool isBigEndian64(const ThreadContext *tc);
186
187 /**
188 * badMode is checking if the execution mode provided as an argument is
189 * valid and implemented for AArch32
190 *
191 * @param tc ThreadContext
192 * @param mode OperatingMode to check
193 * @return false if mode is valid and implemented, true otherwise
194 */
195 bool badMode32(ThreadContext *tc, OperatingMode mode);
196
197 /**
198 * badMode is checking if the execution mode provided as an argument is
199 * valid and implemented.
200 *
201 * @param tc ThreadContext
202 * @param mode OperatingMode to check
203 * @return false if mode is valid and implemented, true otherwise
204 */
205 bool badMode(ThreadContext *tc, OperatingMode mode);
206
207 static inline uint8_t
208 itState(CPSR psr)
209 {
210 ITSTATE it = 0;
211 it.top6 = psr.it2;
212 it.bottom2 = psr.it1;
213
214 return (uint8_t)it;
215 }
216
217 ExceptionLevel s1TranslationRegime(ThreadContext* tc, ExceptionLevel el);
218
219 /**
220 * Removes the tag from tagged addresses if that mode is enabled.
221 * @param addr The address to be purified.
222 * @param tc The thread context.
223 * @param el The controlled exception level.
224 * @return The purified address.
225 */
226 Addr purifyTaggedAddr(Addr addr, ThreadContext *tc, ExceptionLevel el,
227 TCR tcr, bool isInstr);
228 Addr purifyTaggedAddr(Addr addr, ThreadContext *tc, ExceptionLevel el,
229 bool isInstr);
230 int computeAddrTop(ThreadContext *tc, bool selbit, bool isInstr,
231 TTBCR tcr, ExceptionLevel el);
232
233 static inline bool
234 inSecureState(SCR scr, CPSR cpsr)
235 {
236 switch ((OperatingMode) (uint8_t) cpsr.mode) {
237 case MODE_MON:
238 case MODE_EL3T:
239 case MODE_EL3H:
240 return true;
241 case MODE_HYP:
242 case MODE_EL2T:
243 case MODE_EL2H:
244 return false;
245 default:
246 return !scr.ns;
247 }
248 }
249
250 bool inSecureState(ThreadContext *tc);
251
252 bool longDescFormatInUse(ThreadContext *tc);
253
254 /** This helper function is either returing the value of
255 * MPIDR_EL1 (by calling getMPIDR), or it is issuing a read
256 * to VMPIDR_EL2 (as it happens in virtualized systems) */
257 RegVal readMPIDR(ArmSystem *arm_sys, ThreadContext *tc);
258
259 /** This helper function is returing the value of MPIDR_EL1 */
260 RegVal getMPIDR(ArmSystem *arm_sys, ThreadContext *tc);
261
262 static inline uint32_t
263 mcrMrcIssBuild(bool isRead, uint32_t crm, IntRegIndex rt, uint32_t crn,
264 uint32_t opc1, uint32_t opc2)
265 {
266 return (isRead << 0) |
267 (crm << 1) |
268 (rt << 5) |
269 (crn << 10) |
270 (opc1 << 14) |
271 (opc2 << 17);
272 }
273
274 static inline void
275 mcrMrcIssExtract(uint32_t iss, bool &isRead, uint32_t &crm, IntRegIndex &rt,
276 uint32_t &crn, uint32_t &opc1, uint32_t &opc2)
277 {
278 isRead = (iss >> 0) & 0x1;
279 crm = (iss >> 1) & 0xF;
280 rt = (IntRegIndex) ((iss >> 5) & 0xF);
281 crn = (iss >> 10) & 0xF;
282 opc1 = (iss >> 14) & 0x7;
283 opc2 = (iss >> 17) & 0x7;
284 }
285
286 static inline uint32_t
287 mcrrMrrcIssBuild(bool isRead, uint32_t crm, IntRegIndex rt, IntRegIndex rt2,
288 uint32_t opc1)
289 {
290 return (isRead << 0) |
291 (crm << 1) |
292 (rt << 5) |
293 (rt2 << 10) |
294 (opc1 << 16);
295 }
296
297 static inline uint32_t
298 msrMrs64IssBuild(bool isRead, uint32_t op0, uint32_t op1, uint32_t crn,
299 uint32_t crm, uint32_t op2, IntRegIndex rt)
300 {
301 return isRead |
302 (crm << 1) |
303 (rt << 5) |
304 (crn << 10) |
305 (op1 << 14) |
306 (op2 << 17) |
307 (op0 << 20);
308 }
309
310 Fault
311 mcrMrc15Trap(const MiscRegIndex miscReg, ExtMachInst machInst,
312 ThreadContext *tc, uint32_t imm);
313 bool
314 mcrMrc15TrapToHyp(const MiscRegIndex miscReg, ThreadContext *tc, uint32_t iss,
315 ExceptionClass *ec = nullptr);
316
317 bool
318 mcrMrc14TrapToHyp(const MiscRegIndex miscReg, HCR hcr, CPSR cpsr, SCR scr,
319 HDCR hdcr, HSTR hstr, HCPTR hcptr, uint32_t iss);
320
321 Fault
322 mcrrMrrc15Trap(const MiscRegIndex miscReg, ExtMachInst machInst,
323 ThreadContext *tc, uint32_t imm);
324 bool
325 mcrrMrrc15TrapToHyp(const MiscRegIndex miscReg, ThreadContext *tc,
326 uint32_t iss, ExceptionClass *ec = nullptr);
327
328 Fault
329 AArch64AArch32SystemAccessTrap(const MiscRegIndex miscReg,
330 ExtMachInst machInst, ThreadContext *tc,
331 uint32_t imm, ExceptionClass ec);
332 bool
333 isAArch64AArch32SystemAccessTrapEL1(const MiscRegIndex miscReg,
334 ThreadContext *tc);
335 bool
336 isAArch64AArch32SystemAccessTrapEL2(const MiscRegIndex miscReg,
337 ThreadContext *tc);
338 bool
339 isGenericTimerHypTrap(const MiscRegIndex miscReg, ThreadContext *tc,
340 ExceptionClass *ec);
341 bool condGenericTimerPhysHypTrap(const MiscRegIndex miscReg,
342 ThreadContext *tc);
343 bool
344 isGenericTimerCommonEL0HypTrap(const MiscRegIndex miscReg, ThreadContext *tc,
345 ExceptionClass *ec);
346 bool
347 isGenericTimerPhysHypTrap(const MiscRegIndex miscReg, ThreadContext *tc,
348 ExceptionClass *ec);
349 bool
350 condGenericTimerPhysHypTrap(const MiscRegIndex miscReg, ThreadContext *tc);
351 bool
352 isGenericTimerSystemAccessTrapEL1(const MiscRegIndex miscReg,
353 ThreadContext *tc);
354 bool
355 condGenericTimerSystemAccessTrapEL1(const MiscRegIndex miscReg,
356 ThreadContext *tc);
357 bool
358 isGenericTimerSystemAccessTrapEL2(const MiscRegIndex miscReg,
359 ThreadContext *tc);
360 bool
361 isGenericTimerCommonEL0SystemAccessTrapEL2(const MiscRegIndex miscReg,
362 ThreadContext *tc);
363 bool
364 isGenericTimerPhysEL0SystemAccessTrapEL2(const MiscRegIndex miscReg,
365 ThreadContext *tc);
366 bool
367 isGenericTimerPhysEL1SystemAccessTrapEL2(const MiscRegIndex miscReg,
368 ThreadContext *tc);
369 bool
370 isGenericTimerVirtSystemAccessTrapEL2(const MiscRegIndex miscReg,
371 ThreadContext *tc);
372 bool
373 condGenericTimerCommonEL0SystemAccessTrapEL2(const MiscRegIndex miscReg,
374 ThreadContext *tc);
375 bool
376 condGenericTimerCommonEL1SystemAccessTrapEL2(const MiscRegIndex miscReg,
377 ThreadContext *tc);
378 bool
379 condGenericTimerPhysEL1SystemAccessTrapEL2(const MiscRegIndex miscReg,
380 ThreadContext *tc);
381 bool
382 isGenericTimerSystemAccessTrapEL3(const MiscRegIndex miscReg,
383 ThreadContext *tc);
384
385 bool SPAlignmentCheckEnabled(ThreadContext* tc);
386
387 uint64_t getArgument(ThreadContext *tc, int &number, uint16_t size, bool fp);
388
389 void skipFunction(ThreadContext *tc);
390
391 inline void
392 advancePC(PCState &pc, const StaticInstPtr &inst)
393 {
394 inst->advancePC(pc);
395 }
396
397 Addr truncPage(Addr addr);
398 Addr roundPage(Addr addr);
399
400 inline uint64_t
401 getExecutingAsid(ThreadContext *tc)
402 {
403 return tc->readMiscReg(MISCREG_CONTEXTIDR);
404 }
405
406 // Decodes the register index to access based on the fields used in a MSR
407 // or MRS instruction
408 bool
409 decodeMrsMsrBankedReg(uint8_t sysM, bool r, bool &isIntReg, int &regIdx,
410 CPSR cpsr, SCR scr, NSACR nsacr,
411 bool checkSecurity = true);
412
413 // This wrapper function is used to turn the register index into a source
414 // parameter for the instruction. See Operands.isa
415 static inline int
416 decodeMrsMsrBankedIntRegIndex(uint8_t sysM, bool r)
417 {
418 int regIdx;
419 bool isIntReg;
420 bool validReg;
421
422 validReg = decodeMrsMsrBankedReg(sysM, r, isIntReg, regIdx, 0, 0, 0, false);
423 return (validReg && isIntReg) ? regIdx : INTREG_DUMMY;
424 }
425
426 /**
427 * Returns the n. of PA bits corresponding to the specified encoding.
428 */
429 int decodePhysAddrRange64(uint8_t pa_enc);
430
431 /**
432 * Returns the encoding corresponding to the specified n. of PA bits.
433 */
434 uint8_t encodePhysAddrRange64(int pa_size);
435
436 inline ByteOrder byteOrder(const ThreadContext *tc)
437 {
438 return isBigEndian64(tc) ? BigEndianByteOrder : LittleEndianByteOrder;
439 };
440
441 }
442
443 #endif