stats: update stats for mmap() change.
[gem5.git] / src / arch / arm / insts / static_inst.hh
1 /*
2 * Copyright (c) 2010-2013 ARM Limited
3 * All rights reserved
4 *
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software
9 * licensed hereunder. You may use the software subject to the license
10 * terms below provided that you ensure that this notice is replicated
11 * unmodified and in its entirety in all distributions of the software,
12 * modified or unmodified, in source code or in binary form.
13 *
14 * Copyright (c) 2007-2008 The Florida State University
15 * All rights reserved.
16 *
17 * Redistribution and use in source and binary forms, with or without
18 * modification, are permitted provided that the following conditions are
19 * met: redistributions of source code must retain the above copyright
20 * notice, this list of conditions and the following disclaimer;
21 * redistributions in binary form must reproduce the above copyright
22 * notice, this list of conditions and the following disclaimer in the
23 * documentation and/or other materials provided with the distribution;
24 * neither the name of the copyright holders nor the names of its
25 * contributors may be used to endorse or promote products derived from
26 * this software without specific prior written permission.
27 *
28 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
31 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
32 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
33 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
34 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
35 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
36 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
38 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39 *
40 * Authors: Stephen Hines
41 */
42 #ifndef __ARCH_ARM_INSTS_STATICINST_HH__
43 #define __ARCH_ARM_INSTS_STATICINST_HH__
44
45 #include <memory>
46
47 #include "arch/arm/faults.hh"
48 #include "arch/arm/utility.hh"
49 #include "arch/arm/system.hh"
50 #include "base/trace.hh"
51 #include "cpu/static_inst.hh"
52 #include "sim/byteswap.hh"
53 #include "sim/full_system.hh"
54
55 namespace ArmISA
56 {
57
58 class ArmStaticInst : public StaticInst
59 {
60 protected:
61 bool aarch64;
62 uint8_t intWidth;
63
64 int32_t shift_rm_imm(uint32_t base, uint32_t shamt,
65 uint32_t type, uint32_t cfval) const;
66 int32_t shift_rm_rs(uint32_t base, uint32_t shamt,
67 uint32_t type, uint32_t cfval) const;
68
69 bool shift_carry_imm(uint32_t base, uint32_t shamt,
70 uint32_t type, uint32_t cfval) const;
71 bool shift_carry_rs(uint32_t base, uint32_t shamt,
72 uint32_t type, uint32_t cfval) const;
73
74 int64_t shiftReg64(uint64_t base, uint64_t shiftAmt,
75 ArmShiftType type, uint8_t width) const;
76 int64_t extendReg64(uint64_t base, ArmExtendType type,
77 uint64_t shiftAmt, uint8_t width) const;
78
79 template<int width>
80 static inline bool
81 saturateOp(int32_t &res, int64_t op1, int64_t op2, bool sub=false)
82 {
83 int64_t midRes = sub ? (op1 - op2) : (op1 + op2);
84 if (bits(midRes, width) != bits(midRes, width - 1)) {
85 if (midRes > 0)
86 res = (LL(1) << (width - 1)) - 1;
87 else
88 res = -(LL(1) << (width - 1));
89 return true;
90 } else {
91 res = midRes;
92 return false;
93 }
94 }
95
96 static inline bool
97 satInt(int32_t &res, int64_t op, int width)
98 {
99 width--;
100 if (op >= (LL(1) << width)) {
101 res = (LL(1) << width) - 1;
102 return true;
103 } else if (op < -(LL(1) << width)) {
104 res = -(LL(1) << width);
105 return true;
106 } else {
107 res = op;
108 return false;
109 }
110 }
111
112 template<int width>
113 static inline bool
114 uSaturateOp(uint32_t &res, int64_t op1, int64_t op2, bool sub=false)
115 {
116 int64_t midRes = sub ? (op1 - op2) : (op1 + op2);
117 if (midRes >= (LL(1) << width)) {
118 res = (LL(1) << width) - 1;
119 return true;
120 } else if (midRes < 0) {
121 res = 0;
122 return true;
123 } else {
124 res = midRes;
125 return false;
126 }
127 }
128
129 static inline bool
130 uSatInt(int32_t &res, int64_t op, int width)
131 {
132 if (op >= (LL(1) << width)) {
133 res = (LL(1) << width) - 1;
134 return true;
135 } else if (op < 0) {
136 res = 0;
137 return true;
138 } else {
139 res = op;
140 return false;
141 }
142 }
143
144 // Constructor
145 ArmStaticInst(const char *mnem, ExtMachInst _machInst,
146 OpClass __opClass)
147 : StaticInst(mnem, _machInst, __opClass)
148 {
149 aarch64 = machInst.aarch64;
150 if (bits(machInst, 28, 24) == 0x10)
151 intWidth = 64; // Force 64-bit width for ADR/ADRP
152 else
153 intWidth = (aarch64 && bits(machInst, 31)) ? 64 : 32;
154 }
155
156 /// Print a register name for disassembly given the unique
157 /// dependence tag number (FP or int).
158 void printReg(std::ostream &os, int reg) const;
159 void printMnemonic(std::ostream &os,
160 const std::string &suffix = "",
161 bool withPred = true,
162 bool withCond64 = false,
163 ConditionCode cond64 = COND_UC) const;
164 void printTarget(std::ostream &os, Addr target,
165 const SymbolTable *symtab) const;
166 void printCondition(std::ostream &os, unsigned code,
167 bool noImplicit=false) const;
168 void printMemSymbol(std::ostream &os, const SymbolTable *symtab,
169 const std::string &prefix, const Addr addr,
170 const std::string &suffix) const;
171 void printShiftOperand(std::ostream &os, IntRegIndex rm,
172 bool immShift, uint32_t shiftAmt,
173 IntRegIndex rs, ArmShiftType type) const;
174 void printExtendOperand(bool firstOperand, std::ostream &os,
175 IntRegIndex rm, ArmExtendType type,
176 int64_t shiftAmt) const;
177
178
179 void printDataInst(std::ostream &os, bool withImm) const;
180 void printDataInst(std::ostream &os, bool withImm, bool immShift, bool s,
181 IntRegIndex rd, IntRegIndex rn, IntRegIndex rm,
182 IntRegIndex rs, uint32_t shiftAmt, ArmShiftType type,
183 uint64_t imm) const;
184
185 void
186 advancePC(PCState &pcState) const
187 {
188 pcState.advance();
189 }
190
191 std::string generateDisassembly(Addr pc, const SymbolTable *symtab) const;
192
193 static inline uint32_t
194 cpsrWriteByInstr(CPSR cpsr, uint32_t val, SCR scr, NSACR nsacr,
195 uint8_t byteMask, bool affectState, bool nmfi, ThreadContext *tc)
196 {
197 bool privileged = (cpsr.mode != MODE_USER);
198 bool haveVirt = ArmSystem::haveVirtualization(tc);
199 bool haveSecurity = ArmSystem::haveSecurity(tc);
200 bool isSecure = inSecureState(scr, cpsr) || !haveSecurity;
201
202 uint32_t bitMask = 0;
203
204 if (bits(byteMask, 3)) {
205 unsigned lowIdx = affectState ? 24 : 27;
206 bitMask = bitMask | mask(31, lowIdx);
207 }
208 if (bits(byteMask, 2)) {
209 bitMask = bitMask | mask(19, 16);
210 }
211 if (bits(byteMask, 1)) {
212 unsigned highIdx = affectState ? 15 : 9;
213 unsigned lowIdx = (privileged && (isSecure || scr.aw || haveVirt))
214 ? 8 : 9;
215 bitMask = bitMask | mask(highIdx, lowIdx);
216 }
217 if (bits(byteMask, 0)) {
218 if (privileged) {
219 bitMask |= 1 << 7;
220 if ( (!nmfi || !((val >> 6) & 0x1)) &&
221 (isSecure || scr.fw || haveVirt) ) {
222 bitMask |= 1 << 6;
223 }
224 // Now check the new mode is allowed
225 OperatingMode newMode = (OperatingMode) (val & mask(5));
226 OperatingMode oldMode = (OperatingMode)(uint32_t)cpsr.mode;
227 if (!badMode(newMode)) {
228 bool validModeChange = true;
229 // Check for attempts to enter modes only permitted in
230 // Secure state from Non-secure state. These are Monitor
231 // mode ('10110'), and FIQ mode ('10001') if the Security
232 // Extensions have reserved it.
233 if (!isSecure && newMode == MODE_MON)
234 validModeChange = false;
235 if (!isSecure && newMode == MODE_FIQ && nsacr.rfr == '1')
236 validModeChange = false;
237 // There is no Hyp mode ('11010') in Secure state, so that
238 // is UNPREDICTABLE
239 if (scr.ns == '0' && newMode == MODE_HYP)
240 validModeChange = false;
241 // Cannot move into Hyp mode directly from a Non-secure
242 // PL1 mode
243 if (!isSecure && oldMode != MODE_HYP && newMode == MODE_HYP)
244 validModeChange = false;
245 // Cannot move out of Hyp mode with this function except
246 // on an exception return
247 if (oldMode == MODE_HYP && newMode != MODE_HYP && !affectState)
248 validModeChange = false;
249 // Must not change to 64 bit when running in 32 bit mode
250 if (!opModeIs64(oldMode) && opModeIs64(newMode))
251 validModeChange = false;
252
253 // If we passed all of the above then set the bit mask to
254 // copy the mode accross
255 if (validModeChange) {
256 bitMask = bitMask | mask(5);
257 } else {
258 warn_once("Illegal change to CPSR mode attempted\n");
259 }
260 } else {
261 warn_once("Ignoring write of bad mode to CPSR.\n");
262 }
263 }
264 if (affectState)
265 bitMask = bitMask | (1 << 5);
266 }
267
268 return ((uint32_t)cpsr & ~bitMask) | (val & bitMask);
269 }
270
271 static inline uint32_t
272 spsrWriteByInstr(uint32_t spsr, uint32_t val,
273 uint8_t byteMask, bool affectState)
274 {
275 uint32_t bitMask = 0;
276
277 if (bits(byteMask, 3))
278 bitMask = bitMask | mask(31, 24);
279 if (bits(byteMask, 2))
280 bitMask = bitMask | mask(19, 16);
281 if (bits(byteMask, 1))
282 bitMask = bitMask | mask(15, 8);
283 if (bits(byteMask, 0))
284 bitMask = bitMask | mask(7, 0);
285
286 return ((spsr & ~bitMask) | (val & bitMask));
287 }
288
289 template<class XC>
290 static inline Addr
291 readPC(XC *xc)
292 {
293 return xc->pcState().instPC();
294 }
295
296 template<class XC>
297 static inline void
298 setNextPC(XC *xc, Addr val)
299 {
300 PCState pc = xc->pcState();
301 pc.instNPC(val);
302 xc->pcState(pc);
303 }
304
305 template<class T>
306 static inline T
307 cSwap(T val, bool big)
308 {
309 if (big) {
310 return gtobe(val);
311 } else {
312 return gtole(val);
313 }
314 }
315
316 template<class T, class E>
317 static inline T
318 cSwap(T val, bool big)
319 {
320 const unsigned count = sizeof(T) / sizeof(E);
321 union {
322 T tVal;
323 E eVals[count];
324 } conv;
325 conv.tVal = htog(val);
326 if (big) {
327 for (unsigned i = 0; i < count; i++) {
328 conv.eVals[i] = gtobe(conv.eVals[i]);
329 }
330 } else {
331 for (unsigned i = 0; i < count; i++) {
332 conv.eVals[i] = gtole(conv.eVals[i]);
333 }
334 }
335 return gtoh(conv.tVal);
336 }
337
338 // Perform an interworking branch.
339 template<class XC>
340 static inline void
341 setIWNextPC(XC *xc, Addr val)
342 {
343 PCState pc = xc->pcState();
344 pc.instIWNPC(val);
345 xc->pcState(pc);
346 }
347
348 // Perform an interworking branch in ARM mode, a regular branch
349 // otherwise.
350 template<class XC>
351 static inline void
352 setAIWNextPC(XC *xc, Addr val)
353 {
354 PCState pc = xc->pcState();
355 pc.instAIWNPC(val);
356 xc->pcState(pc);
357 }
358
359 inline Fault
360 disabledFault() const
361 {
362 return std::make_shared<UndefinedInstruction>(machInst, false,
363 mnemonic, true);
364 }
365
366 public:
367 virtual void
368 annotateFault(ArmFault *fault) {}
369 };
370 }
371
372 #endif //__ARCH_ARM_INSTS_STATICINST_HH__