3 // Copyright (c) 2012-2013 ARM Limited
6 // The license below extends only to copyright in the software and shall
7 // not be construed as granting a license to any other intellectual
8 // property including but not limited to intellectual property relating
9 // to a hardware implementation of the functionality of the software
10 // licensed hereunder. You may use the software subject to the license
11 // terms below provided that you ensure that this notice is replicated
12 // unmodified and in its entirety in all distributions of the software,
13 // modified or unmodified, in source code or in binary form.
15 // Redistribution and use in source and binary forms, with or without
16 // modification, are permitted provided that the following conditions are
17 // met: redistributions of source code must retain the above copyright
18 // notice, this list of conditions and the following disclaimer;
19 // redistributions in binary form must reproduce the above copyright
20 // notice, this list of conditions and the following disclaimer in the
21 // documentation and/or other materials provided with the distribution;
22 // neither the name of the copyright holders nor the names of its
23 // contributors may be used to endorse or promote products derived from
24 // this software without specific prior written permission.
26 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
38 // Authors: Mbou Eyole
42 simd64EnabledCheckCode = vfp64EnabledCheckCode
45 def template NeonX2RegOpDeclare {{
46 template <class _Element>
47 class %(class_name)s : public %(base_class)s
50 typedef _Element Element;
53 %(class_name)s(ExtMachInst machInst,
54 IntRegIndex _dest, IntRegIndex _op1, IntRegIndex _op2)
55 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
65 def template NeonX2RegImmOpDeclare {{
66 template <class _Element>
67 class %(class_name)s : public %(base_class)s
70 typedef _Element Element;
73 %(class_name)s(ExtMachInst machInst,
74 IntRegIndex _dest, IntRegIndex _op1, IntRegIndex _op2,
76 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
77 _dest, _op1, _op2, _imm)
86 def template NeonX1RegOpDeclare {{
87 template <class _Element>
88 class %(class_name)s : public %(base_class)s
91 typedef _Element Element;
94 %(class_name)s(ExtMachInst machInst,
95 IntRegIndex _dest, IntRegIndex _op1)
96 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
106 def template NeonX1RegImmOpDeclare {{
107 template <class _Element>
108 class %(class_name)s : public %(base_class)s
111 typedef _Element Element;
114 %(class_name)s(ExtMachInst machInst,
115 IntRegIndex _dest, IntRegIndex _op1, uint64_t _imm)
116 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
126 def template NeonX1Reg2ImmOpDeclare {{
127 template <class _Element>
128 class %(class_name)s : public %(base_class)s
131 typedef _Element Element;
134 %(class_name)s(ExtMachInst machInst,
135 IntRegIndex _dest, IntRegIndex _op1, uint64_t _imm1,
137 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
138 _dest, _op1, _imm1, _imm2)
147 def template NeonX1RegImmOnlyOpDeclare {{
148 template <class _Element>
149 class %(class_name)s : public %(base_class)s
152 typedef _Element Element;
155 %(class_name)s(ExtMachInst machInst,
156 IntRegIndex _dest, uint64_t _imm)
157 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
167 def template NeonXExecDeclare {{
169 Fault %(class_name)s<%(targs)s>::execute(
170 CPU_EXEC_CONTEXT *, Trace::InstRecord *) const;
173 def template NeonXEqualRegOpExecute {{
174 template <class Element>
175 Fault %(class_name)s<Element>::execute(CPU_EXEC_CONTEXT *xc,
176 Trace::InstRecord *traceData) const
178 Fault fault = NoFault;
182 const unsigned rCount = %(r_count)d;
183 const unsigned eCount = rCount * sizeof(FloatRegBits) / sizeof(Element);
184 const unsigned eCountFull = 4 * sizeof(FloatRegBits) / sizeof(Element);
187 FloatRegBits regs[rCount];
188 Element elements[eCount];
192 FloatRegBits regs[4];
193 Element elements[eCountFull];
197 if (fault == NoFault)
206 def template NeonXUnequalRegOpExecute {{
207 template <class Element>
208 Fault %(class_name)s<Element>::execute(CPU_EXEC_CONTEXT *xc,
209 Trace::InstRecord *traceData) const
211 typedef typename bigger_type_t<Element>::type BigElement;
212 Fault fault = NoFault;
216 const unsigned rCount = %(r_count)d;
217 const unsigned eCount = rCount * sizeof(FloatRegBits) / sizeof(Element);
218 const unsigned eCountFull = 4 * sizeof(FloatRegBits) / sizeof(Element);
221 FloatRegBits regs[rCount];
222 Element elements[eCount];
223 BigElement bigElements[eCount / 2];
227 FloatRegBits regs[2 * rCount];
228 BigElement elements[eCount];
232 FloatRegBits regs[4];
233 Element elements[eCountFull];
237 if (fault == NoFault)
246 def template MicroNeonMemDeclare64 {{
247 class %(class_name)s : public %(base_class)s
250 // True if the base register is SP (used for SP alignment checking)
252 // Access size in bytes
254 // Vector element size (0 -> 8-bit, 1 -> 16-bit, 2 -> 32-bit,
259 %(class_name)s(ExtMachInst machInst, RegIndex _dest, RegIndex _ura,
260 uint32_t _imm, unsigned extraMemFlags, bool _baseIsSP,
261 uint8_t _accSize, uint8_t _eSize)
262 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest,
264 baseIsSP(_baseIsSP), accSize(_accSize), eSize(_eSize)
266 memAccessFlags |= extraMemFlags;
271 %(InitiateAccDeclare)s
272 %(CompleteAccDeclare)s
276 def template NeonLoadExecute64 {{
277 Fault %(class_name)s::execute(
278 CPU_EXEC_CONTEXT *xc, Trace::InstRecord *traceData) const
281 Fault fault = NoFault;
289 uint8_t *dataPtr = memUnion.bytes;
291 if (fault == NoFault) {
292 fault = xc->readMem(EA, dataPtr, accSize, memAccessFlags);
296 if (fault == NoFault) {
304 def template NeonLoadInitiateAcc64 {{
305 Fault %(class_name)s::initiateAcc(
306 CPU_EXEC_CONTEXT *xc, Trace::InstRecord *traceData) const
309 Fault fault = NoFault;
316 if (fault == NoFault) {
317 fault = xc->initiateMemRead(EA, accSize, memAccessFlags);
324 def template NeonLoadCompleteAcc64 {{
325 Fault %(class_name)s::completeAcc(
326 PacketPtr pkt, CPU_EXEC_CONTEXT *xc,
327 Trace::InstRecord *traceData) const
329 Fault fault = NoFault;
335 MemUnion &memUnion = *(MemUnion *)pkt->getPtr<uint8_t>();
337 if (fault == NoFault) {
341 if (fault == NoFault) {
349 def template NeonStoreExecute64 {{
350 Fault %(class_name)s::execute(
351 CPU_EXEC_CONTEXT *xc, Trace::InstRecord *traceData) const
354 Fault fault = NoFault;
362 uint8_t *dataPtr = memUnion.bytes;
364 if (fault == NoFault) {
368 if (fault == NoFault) {
369 fault = xc->writeMem(dataPtr, accSize, EA, memAccessFlags,
373 if (fault == NoFault) {
381 def template NeonStoreInitiateAcc64 {{
382 Fault %(class_name)s::initiateAcc(
383 CPU_EXEC_CONTEXT *xc, Trace::InstRecord *traceData) const
386 Fault fault = NoFault;
394 if (fault == NoFault) {
398 if (fault == NoFault) {
399 fault = xc->writeMem(memUnion.bytes, accSize, EA, memAccessFlags,
407 def template NeonStoreCompleteAcc64 {{
408 Fault %(class_name)s::completeAcc(
409 PacketPtr pkt, CPU_EXEC_CONTEXT *xc,
410 Trace::InstRecord *traceData) const
416 def template VMemMultDeclare64 {{
417 class %(class_name)s : public %(base_class)s
421 %(class_name)s(ExtMachInst machInst, RegIndex rn, RegIndex vd,
422 RegIndex rm, uint8_t eSize, uint8_t dataSize,
423 uint8_t numStructElems, uint8_t numRegs, bool wb);
428 def template VMemSingleDeclare64 {{
429 class %(class_name)s : public %(base_class)s
433 %(class_name)s(ExtMachInst machInst, RegIndex rn, RegIndex vd,
434 RegIndex rm, uint8_t eSize, uint8_t dataSize,
435 uint8_t numStructElems, uint8_t index, bool wb,
436 bool replicate = false);
441 def template VMemMultConstructor64 {{
442 %(class_name)s::%(class_name)s(
443 ExtMachInst machInst, RegIndex rn, RegIndex vd, RegIndex rm,
444 uint8_t _eSize, uint8_t _dataSize, uint8_t _numStructElems,
445 uint8_t _numRegs, bool _wb) :
447 "%(mnemonic)s", machInst, %(op_class)s, rn, vd, rm,
448 _eSize, _dataSize, _numStructElems, _numRegs, _wb)
454 def template VMemSingleConstructor64 {{
455 %(class_name)s::%(class_name)s(
456 ExtMachInst machInst, RegIndex rn, RegIndex vd, RegIndex rm,
457 uint8_t _eSize, uint8_t _dataSize, uint8_t _numStructElems,
458 uint8_t _index, bool _wb, bool _replicate) :
460 "%(mnemonic)s", machInst, %(op_class)s, rn, vd, rm,
461 _eSize, _dataSize, _numStructElems, _index, _wb,
468 def template MicroNeonMixDeclare64 {{
469 class %(class_name)s : public %(base_class)s
472 %(class_name)s(ExtMachInst machInst, RegIndex _dest, RegIndex _op1,
473 uint8_t _eSize, uint8_t _dataSize,
474 uint8_t _numStructElems, uint8_t _numRegs,
476 %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
477 _dest, _op1, _eSize, _dataSize, _numStructElems,
487 def template MicroNeonMixLaneDeclare64 {{
488 class %(class_name)s : public %(base_class)s
491 %(class_name)s(ExtMachInst machInst, RegIndex _dest, RegIndex _op1,
492 uint8_t _eSize, uint8_t _dataSize,
493 uint8_t _numStructElems, uint8_t _lane, uint8_t _step,
494 bool _replicate = false) :
495 %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
496 _dest, _op1, _eSize, _dataSize, _numStructElems,
497 _lane, _step, _replicate)
506 def template MicroNeonMixExecute64 {{
507 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
508 Trace::InstRecord *traceData) const
510 Fault fault = NoFault;
511 uint64_t resTemp = 0;
517 if (fault == NoFault)