unsigned rn = INSTR (9, 5);
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rt, NO_SP, aarch64_get_mem_u32
(cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK)
+ offset));
unsigned rn = INSTR (9, 5);
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rt, NO_SP, aarch64_get_mem_u64
(cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK)
+ offset));
unsigned rn = INSTR (9, 5);
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rt, NO_SP, aarch64_get_mem_u8
(cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK)
+ offset));
unsigned rn = INSTR (9, 5);
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rt, NO_SP, (uint32_t) aarch64_get_mem_s8
(cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK)
+ offset));
unsigned rn = INSTR (9, 5);
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_s64 (cpu, rt, NO_SP, aarch64_get_mem_s8
(cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK)
+ offset));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, aarch64_get_mem_u16
(cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK)
+ offset));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, (uint32_t) aarch64_get_mem_s16
(cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK)
+ offset));
unsigned rn = INSTR (9, 5);
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_s64 (cpu, rt, NO_SP, aarch64_get_mem_s16
(cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK)
+ offset));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, (uint32_t) aarch64_get_mem_s32
(cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK)
+ offset));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_mem_u32 (cpu,
aarch64_get_reg_u64 (cpu, rn, SP_OK) + offset,
aarch64_get_reg_u32 (cpu, rd, NO_SP));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_mem_u64 (cpu,
aarch64_get_reg_u64 (cpu, rn, SP_OK) + offset,
aarch64_get_reg_u64 (cpu, rd, NO_SP));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_mem_u8 (cpu,
aarch64_get_reg_u64 (cpu, rn, SP_OK) + offset,
aarch64_get_reg_u8 (cpu, rd, NO_SP));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_mem_u16 (cpu,
aarch64_get_reg_u64 (cpu, rn, SP_OK) + offset,
aarch64_get_reg_u16 (cpu, rd, NO_SP));
{
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP,
aarch64_get_mem_u32
(cpu, aarch64_get_PC (cpu) + offset * 4));
{
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP,
aarch64_get_mem_u64
(cpu, aarch64_get_PC (cpu) + offset * 4));
{
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP,
aarch64_get_mem_s32
(cpu, aarch64_get_PC (cpu) + offset * 4));
{
unsigned int rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_vec_u32 (cpu, rd, 0,
aarch64_get_mem_u32
(cpu, aarch64_get_PC (cpu) + offset * 4));
{
unsigned int st = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_vec_u64 (cpu, st, 0,
aarch64_get_mem_u64
(cpu, aarch64_get_PC (cpu) + offset * 4));
uint64_t addr = aarch64_get_PC (cpu) + offset * 4;
FRegister a;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_get_mem_long_double (cpu, addr, & a);
aarch64_set_FP_long_double (cpu, st, a);
}
if (wb != Post)
address += offset;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_vec_u32 (cpu, st, 0, aarch64_get_mem_u32 (cpu, address));
if (wb == Post)
address += offset;
unsigned rn = INSTR (9, 5);
uint64_t addr = aarch64_get_reg_u64 (cpu, rn, SP_OK) + offset;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_vec_u8 (cpu, rd, 0, aarch64_get_mem_u32 (cpu, addr));
}
unsigned rn = INSTR (9, 5);
uint64_t addr = aarch64_get_reg_u64 (cpu, rn, SP_OK) + SCALE (offset, 16);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_vec_u16 (cpu, rd, 0, aarch64_get_mem_u16 (cpu, addr));
}
unsigned rn = INSTR (9, 5);
uint64_t addr = aarch64_get_reg_u64 (cpu, rn, SP_OK) + SCALE (offset, 32);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_vec_u32 (cpu, rd, 0, aarch64_get_mem_u32 (cpu, addr));
}
unsigned rn = INSTR (9, 5);
uint64_t addr = aarch64_get_reg_u64 (cpu, rn, SP_OK) + SCALE (offset, 64);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_vec_u64 (cpu, rd, 0, aarch64_get_mem_u64 (cpu, addr));
}
unsigned rn = INSTR (9, 5);
uint64_t addr = aarch64_get_reg_u64 (cpu, rn, SP_OK) + SCALE (offset, 128);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_vec_u64 (cpu, rd, 0, aarch64_get_mem_u64 (cpu, addr));
aarch64_set_vec_u64 (cpu, rd, 1, aarch64_get_mem_u64 (cpu, addr + 8));
}
int64_t extended = extend (aarch64_get_reg_u32 (cpu, rm, NO_SP), extension);
uint64_t displacement = OPT_SCALE (extended, 32, scaling);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_vec_u32 (cpu, st, 0, aarch64_get_mem_u32
(cpu, address + displacement));
}
if (wb != Post)
address += offset;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_vec_u64 (cpu, st, 0, aarch64_get_mem_u64 (cpu, address));
if (wb == Post)
if (wb != Post)
address += offset;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_get_mem_long_double (cpu, address, & a);
aarch64_set_FP_long_double (cpu, st, a);
unsigned rn = INSTR (9, 5);
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* The target register may not be SP but the source may be. */
aarch64_set_reg_u64 (cpu, rt, NO_SP, aarch64_get_mem_u32
(cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK)
if (wb != Post)
address += offset;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rt, NO_SP, aarch64_get_mem_u32 (cpu, address));
if (wb == Post)
int64_t extended = extend (aarch64_get_reg_u32 (cpu, rm, NO_SP), extension);
uint64_t displacement = OPT_SCALE (extended, 32, scaling);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rt, NO_SP,
aarch64_get_mem_u32 (cpu, address + displacement));
}
unsigned rn = INSTR (9, 5);
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* The target register may not be SP but the source may be. */
aarch64_set_reg_u64 (cpu, rt, NO_SP, aarch64_get_mem_u64
(cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK)
if (wb != Post)
address += offset;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rt, NO_SP, aarch64_get_mem_u64 (cpu, address));
if (wb == Post)
int64_t extended = extend (aarch64_get_reg_u32 (cpu, rm, NO_SP), extension);
uint64_t displacement = OPT_SCALE (extended, 64, scaling);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rt, NO_SP,
aarch64_get_mem_u64 (cpu, address + displacement));
}
unsigned rn = INSTR (9, 5);
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* The target register may not be SP but the source may be
there is no scaling required for a byte load. */
aarch64_set_reg_u64 (cpu, rt, NO_SP,
if (wb != Post)
address += offset;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rt, NO_SP, aarch64_get_mem_u8 (cpu, address));
if (wb == Post)
int64_t displacement = extend (aarch64_get_reg_u32 (cpu, rm, NO_SP),
extension);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* There is no scaling required for a byte load. */
aarch64_set_reg_u64 (cpu, rt, NO_SP,
aarch64_get_mem_u8 (cpu, address + displacement));
if (wb != Post)
address += offset;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
val = aarch64_get_mem_s8 (cpu, address);
aarch64_set_reg_s64 (cpu, rt, NO_SP, val);
uint64_t address = aarch64_get_reg_u64 (cpu, rn, SP_OK);
int64_t displacement = extend (aarch64_get_reg_u32 (cpu, rm, NO_SP),
extension);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* There is no scaling required for a byte load. */
aarch64_set_reg_s64 (cpu, rt, NO_SP,
aarch64_get_mem_s8 (cpu, address + displacement));
unsigned rt = INSTR (4, 0);
uint32_t val;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* The target register may not be SP but the source may be. */
val = aarch64_get_mem_u16 (cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK)
+ SCALE (offset, 16));
if (wb != Post)
address += offset;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u32 (cpu, rt, NO_SP, aarch64_get_mem_u16 (cpu, address));
if (wb == Post)
int64_t extended = extend (aarch64_get_reg_u32 (cpu, rm, NO_SP), extension);
uint64_t displacement = OPT_SCALE (extended, 16, scaling);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u32 (cpu, rt, NO_SP,
aarch64_get_mem_u16 (cpu, address + displacement));
}
unsigned rt = INSTR (4, 0);
int32_t val;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* The target register may not be SP but the source may be. */
val = aarch64_get_mem_s16 (cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK)
+ SCALE (offset, 16));
if (wb != Post)
address += offset;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_s32 (cpu, rt, NO_SP,
(int32_t) aarch64_get_mem_s16 (cpu, address));
int64_t extended = extend (aarch64_get_reg_u32 (cpu, rm, NO_SP), extension);
uint64_t displacement = OPT_SCALE (extended, 16, scaling);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_s32 (cpu, rt, NO_SP,
(int32_t) aarch64_get_mem_s16
(cpu, address + displacement));
unsigned rt = INSTR (4, 0);
int64_t val;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* The target register may not be SP but the source may be. */
val = aarch64_get_mem_s16 (cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK)
+ SCALE (offset, 16));
if (rn == rt && wb != NoWriteBack)
HALT_UNALLOC;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
address = aarch64_get_reg_u64 (cpu, rn, SP_OK);
if (wb != Post)
uint64_t displacement = OPT_SCALE (extended, 16, scaling);
int64_t val;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
val = aarch64_get_mem_s16 (cpu, address + displacement);
aarch64_set_reg_s64 (cpu, rt, NO_SP, val);
}
unsigned rt = INSTR (4, 0);
int64_t val;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
val = aarch64_get_mem_s32 (cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK)
+ SCALE (offset, 32));
/* The target register may not be SP but the source may be. */
if (wb != Post)
address += offset;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_s64 (cpu, rt, NO_SP, aarch64_get_mem_s32 (cpu, address));
if (wb == Post)
int64_t extended = extend (aarch64_get_reg_u32 (cpu, rm, NO_SP), extension);
uint64_t displacement = OPT_SCALE (extended, 32, scaling);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_s64 (cpu, rt, NO_SP,
aarch64_get_mem_s32 (cpu, address + displacement));
}
unsigned rn = INSTR (9, 5);
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* The target register may not be SP but the source may be. */
aarch64_set_mem_u32 (cpu, (aarch64_get_reg_u64 (cpu, rn, SP_OK)
+ SCALE (offset, 32)),
if (wb != Post)
address += offset;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_mem_u32 (cpu, address, aarch64_get_reg_u32 (cpu, rt, NO_SP));
if (wb == Post)
int64_t extended = extend (aarch64_get_reg_u32 (cpu, rm, NO_SP), extension);
uint64_t displacement = OPT_SCALE (extended, 32, scaling);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_mem_u32 (cpu, address + displacement,
aarch64_get_reg_u64 (cpu, rt, NO_SP));
}
unsigned rn = INSTR (9, 5);
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_mem_u64 (cpu,
aarch64_get_reg_u64 (cpu, rn, SP_OK)
+ SCALE (offset, 64),
if (wb != Post)
address += offset;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_mem_u64 (cpu, address, aarch64_get_reg_u64 (cpu, rt, NO_SP));
if (wb == Post)
extension);
uint64_t displacement = OPT_SCALE (extended, 64, scaling);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_mem_u64 (cpu, address + displacement,
aarch64_get_reg_u64 (cpu, rt, NO_SP));
}
unsigned rn = INSTR (9, 5);
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* The target register may not be SP but the source may be.
There is no scaling required for a byte load. */
aarch64_set_mem_u8 (cpu,
if (wb != Post)
address += offset;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_mem_u8 (cpu, address, aarch64_get_reg_u8 (cpu, rt, NO_SP));
if (wb == Post)
int64_t displacement = extend (aarch64_get_reg_u32 (cpu, rm, NO_SP),
extension);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* There is no scaling required for a byte load. */
aarch64_set_mem_u8 (cpu, address + displacement,
aarch64_get_reg_u8 (cpu, rt, NO_SP));
unsigned rn = INSTR (9, 5);
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* The target register may not be SP but the source may be. */
aarch64_set_mem_u16 (cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK)
+ SCALE (offset, 16),
if (wb != Post)
address += offset;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_mem_u16 (cpu, address, aarch64_get_reg_u16 (cpu, rt, NO_SP));
if (wb == Post)
int64_t extended = extend (aarch64_get_reg_u32 (cpu, rm, NO_SP), extension);
uint64_t displacement = OPT_SCALE (extended, 16, scaling);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_mem_u16 (cpu, address + displacement,
aarch64_get_reg_u16 (cpu, rt, NO_SP));
}
/* int ordered = INSTR (15, 15); */
/* int exclusive = ! INSTR (23, 23); */
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (size)
{
case 0:
case 3: aarch64_set_mem_u64 (cpu, address, data); break;
}
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rs, NO_SP, 0); /* Always exclusive... */
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, SP_OK,
aarch64_get_reg_u32 (cpu, rn, SP_OK) + aimm);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, SP_OK,
aarch64_get_reg_u64 (cpu, rn, SP_OK) + aimm);
}
/* TODO : do we need to worry about signs here? */
int32_t value1 = aarch64_get_reg_s32 (cpu, rn, SP_OK);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 + aimm);
set_flags_for_add32 (cpu, value1, aimm);
}
uint64_t value1 = aarch64_get_reg_u64 (cpu, rn, SP_OK);
uint64_t value2 = aimm;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 + value2);
set_flags_for_add64 (cpu, value1, value2);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, SP_OK,
aarch64_get_reg_u32 (cpu, rn, SP_OK) - aimm);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, SP_OK,
aarch64_get_reg_u64 (cpu, rn, SP_OK) - aimm);
}
uint32_t value1 = aarch64_get_reg_u64 (cpu, rn, SP_OK);
uint32_t value2 = aimm;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 - value2);
set_flags_for_sub32 (cpu, value1, value2);
}
uint64_t value1 = aarch64_get_reg_u64 (cpu, rn, SP_OK);
uint32_t value2 = aimm;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 - value2);
set_flags_for_sub64 (cpu, value1, value2);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP,
aarch64_get_reg_u32 (cpu, rn, NO_SP)
+ shifted32 (aarch64_get_reg_u32 (cpu, rm, NO_SP),
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP,
aarch64_get_reg_u64 (cpu, rn, NO_SP)
+ shifted64 (aarch64_get_reg_u64 (cpu, rm, NO_SP),
uint32_t value2 = shifted32 (aarch64_get_reg_u32 (cpu, rm, NO_SP),
shift, count);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 + value2);
set_flags_for_add32 (cpu, value1, value2);
}
uint64_t value2 = shifted64 (aarch64_get_reg_u64 (cpu, rm, NO_SP),
shift, count);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 + value2);
set_flags_for_add64 (cpu, value1, value2);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP,
aarch64_get_reg_u32 (cpu, rn, NO_SP)
- shifted32 (aarch64_get_reg_u32 (cpu, rm, NO_SP),
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP,
aarch64_get_reg_u64 (cpu, rn, NO_SP)
- shifted64 (aarch64_get_reg_u64 (cpu, rm, NO_SP),
uint32_t value2 = shifted32 (aarch64_get_reg_u32 (cpu, rm, NO_SP),
shift, count);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 - value2);
set_flags_for_sub32 (cpu, value1, value2);
}
uint64_t value2 = shifted64 (aarch64_get_reg_u64 (cpu, rm, NO_SP),
shift, count);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 - value2);
set_flags_for_sub64 (cpu, value1, value2);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, SP_OK,
aarch64_get_reg_u32 (cpu, rn, SP_OK)
+ (extreg32 (cpu, rm, extension) << shift));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, SP_OK,
aarch64_get_reg_u64 (cpu, rn, SP_OK)
+ (extreg64 (cpu, rm, extension) << shift));
uint32_t value1 = aarch64_get_reg_u32 (cpu, rn, SP_OK);
uint32_t value2 = extreg32 (cpu, rm, extension) << shift;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 + value2);
set_flags_for_add32 (cpu, value1, value2);
}
uint64_t value1 = aarch64_get_reg_u64 (cpu, rn, SP_OK);
uint64_t value2 = extreg64 (cpu, rm, extension) << shift;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 + value2);
set_flags_for_add64 (cpu, value1, value2);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, SP_OK,
aarch64_get_reg_u32 (cpu, rn, SP_OK)
- (extreg32 (cpu, rm, extension) << shift));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, SP_OK,
aarch64_get_reg_u64 (cpu, rn, SP_OK)
- (extreg64 (cpu, rm, extension) << shift));
uint32_t value1 = aarch64_get_reg_u32 (cpu, rn, SP_OK);
uint32_t value2 = extreg32 (cpu, rm, extension) << shift;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 - value2);
set_flags_for_sub32 (cpu, value1, value2);
}
uint64_t value1 = aarch64_get_reg_u64 (cpu, rn, SP_OK);
uint64_t value2 = extreg64 (cpu, rm, extension) << shift;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 - value2);
set_flags_for_sub64 (cpu, value1, value2);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP,
aarch64_get_reg_u32 (cpu, rn, NO_SP)
+ aarch64_get_reg_u32 (cpu, rm, NO_SP)
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP,
aarch64_get_reg_u64 (cpu, rn, NO_SP)
+ aarch64_get_reg_u64 (cpu, rm, NO_SP)
uint32_t value2 = aarch64_get_reg_u32 (cpu, rm, NO_SP);
uint32_t carry = IS_SET (C);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 + value2 + carry);
set_flags_for_add32 (cpu, value1, value2 + carry);
}
uint64_t value2 = aarch64_get_reg_u64 (cpu, rm, NO_SP);
uint64_t carry = IS_SET (C);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 + value2 + carry);
set_flags_for_add64 (cpu, value1, value2 + carry);
}
unsigned rn = INSTR (9, 5); /* ngc iff rn == 31. */
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP,
aarch64_get_reg_u32 (cpu, rn, NO_SP)
- aarch64_get_reg_u32 (cpu, rm, NO_SP)
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP,
aarch64_get_reg_u64 (cpu, rn, NO_SP)
- aarch64_get_reg_u64 (cpu, rm, NO_SP)
uint32_t carry = IS_SET (C);
uint32_t result = value1 - value2 + 1 - carry;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, result);
set_flags_for_sub32 (cpu, value1, value2 + 1 - carry);
}
uint64_t carry = IS_SET (C);
uint64_t result = value1 - value2 + 1 - carry;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, result);
set_flags_for_sub64 (cpu, value1, value2 + 1 - carry);
}
NYI_assert (10, 10, 0);
NYI_assert (4, 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (! testConditionCode (cpu, INSTR (15, 12)))
{
aarch64_set_CPSR (cpu, INSTR (3, 0));
if (INSTR (20, 16) != vs)
HALT_NYI;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (30, 30))
aarch64_set_vec_u64 (cpu, vd, 1, aarch64_get_vec_u64 (cpu, vs, 1));
NYI_assert (29, 21, 0x070);
NYI_assert (17, 10, 0x0F);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (20, 18))
{
case 0x2:
NYI_assert (31, 21, 0x270);
NYI_assert (15, 10, 0x07);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (16, 16))
{
index = INSTR (20, 17);
NYI_assert (29, 21, 0x070);
NYI_assert (15, 10, 0x01);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (16, 16))
{
index = INSTR (20, 17);
NYI_assert (29, 21, 0x070);
NYI_assert (12, 10, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
for (i = 0; i < (full ? 16 : 8); i++)
{
unsigned int selector = aarch64_get_vec_u8 (cpu, vm, i);
NYI_assert (29, 24, 0x0E);
NYI_assert (13, 10, 0xA);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (23, 22))
{
case 0:
NYI_assert (29, 20, 0x0E0);
NYI_assert (15, 10, 0x03);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (19, 16))
{
case 1:
NYI_assert (15, 15, 0);
NYI_assert (13, 10, 6);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (23, 23))
{
case 0:
NYI_assert (15, 15, 0);
NYI_assert (13, 10, 0xE);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (23, 23))
{
case 0:
NYI_assert (29, 19, 0x1E0);
NYI_assert (11, 10, 1);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (15, 12))
{
case 0x0: /* 32-bit, no shift. */
NYI_assert (29, 19, 0x5E0);
NYI_assert (11, 10, 1);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (15, 12))
{
case 0x0: /* 32-bit, no shift. */
NYI_assert (29, 24, 0x0E);
NYI_assert (21, 10, 0x82E);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (23, 22))
{
case 0:
NYI_assert (29, 24, 0x0E);
NYI_assert (21, 10, 0xC6E);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (23, 22))
{
case 0:
NYI_assert (17, 14, 0);
NYI_assert (12, 10, 7);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (13, 13) == 1)
{
if (INSTR (18, 18) == 1)
NYI_assert (28, 24, 0x0E);
NYI_assert (15, 10, 0x30);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* NB: Read source values before writing results, in case
the source and destination vectors are the same. */
switch (INSTR (23, 22))
NYI_assert (21, 21, 1);
NYI_assert (15, 10, 0x35);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (23, 23))
{
if (INSTR (22, 22))
NYI_assert (21, 21, 1);
NYI_assert (15, 10, 0x21);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (23, 22))
{
case 0:
NYI_assert (21, 21, 1);
NYI_assert (15, 10, 0x27);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (23, 22))
{
case 0:
NYI_assert (21, 21, 1);
NYI_assert (15, 10, 0x25);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (23, 22))
{
case 0:
NYI_assert (21, 21, 1);
NYI_assert (15, 10, 0x31);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
{
double (* fn)(double, double) = INSTR (23, 23)
NYI_assert (29, 21, 0x071);
NYI_assert (15, 10, 0x07);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
for (i = 0; i < (full ? 4 : 2); i++)
aarch64_set_vec_u32 (cpu, vd, i,
aarch64_get_vec_u32 (cpu, vn, i)
NYI_assert (29, 21, 0x173);
NYI_assert (15, 10, 0x07);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
for (i = 0; i < (full ? 16 : 8); i++)
aarch64_set_vec_u8 (cpu, vd, i,
( aarch64_get_vec_u8 (cpu, vd, i)
NYI_assert (29, 21, 0x171);
NYI_assert (15, 10, 0x07);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
for (i = 0; i < (full ? 4 : 2); i++)
aarch64_set_vec_u32 (cpu, vd, i,
aarch64_get_vec_u32 (cpu, vn, i)
NYI_assert (21, 21, 1);
NYI_assert (15, 10, 0x07);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (test_false)
{
for (i = 0; i < (full ? 16 : 8); i++)
NYI_assert (29, 21, 0x077);
NYI_assert (15, 10, 0x07);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
for (i = 0; i < (full ? 16 : 8); i++)
aarch64_set_vec_u8 (cpu, vd, i,
aarch64_get_vec_u8 (cpu, vn, i)
NYI_assert (29, 21, 0x075);
NYI_assert (15, 10, 0x07);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
for (i = 0; i < (full ? 16 : 8); i++)
aarch64_set_vec_u8 (cpu, vd, i,
aarch64_get_vec_u8 (cpu, vn, i)
NYI_assert (29, 21, 0x073);
NYI_assert (15, 10, 0x07);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
for (i = 0; i < (full ? 16 : 8); i++)
aarch64_set_vec_u8 (cpu, vd, i,
aarch64_get_vec_u8 (cpu, vn, i)
NYI_assert (29, 24, 0x0E);
NYI_assert (21, 10, 0x84A);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (23, 22))
{
case 0:
NYI_assert (20, 17, 8);
NYI_assert (15, 10, 0x2A);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch ((INSTR (29, 29) << 1) | INSTR (16, 16))
{
case 0: /* SMAXV. */
NYI_assert (22, 14, 0x0C3);
NYI_assert (11, 10, 2);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (23, 23))
{
switch (INSTR (13, 12))
NYI_assert (15, 14, 3);
NYI_assert (11, 10, 1);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
{
double (* func)(double, double);
NYI_assert (29, 23, 0x1C);
NYI_assert (21, 10, 0x876);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (size)
{
if (! full)
{ \
if (vm != 0) \
HALT_NYI; \
- if (INSTR (22, 22)) \
+ if (INSTR (22, 22)) \
{ \
if (! full) \
HALT_NYI; \
#define VEC_FCMP(CMP) \
do \
{ \
- if (INSTR (22, 22)) \
+ if (INSTR (22, 22)) \
{ \
if (! full) \
HALT_NYI; \
NYI_assert (28, 24, 0x0E);
NYI_assert (21, 21, 1);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if ((INSTR (11, 11)
&& INSTR (14, 14))
|| ((INSTR (11, 11) == 0
/* FIXME: What is a signed shift left in this context ?. */
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (23, 22))
{
case 0:
NYI_assert (29, 24, 0x2E);
NYI_assert (15, 10, 0x11);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (23, 22))
{
case 0:
NYI_assert (21, 21, 1);
NYI_assert (15, 10, 0x33);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
{
if (! full)
NYI_assert (21, 21, 1);
NYI_assert (15, 10, 0x19);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (29, 29))
{
switch (INSTR (23, 22))
NYI_assert (21, 21, 1);
NYI_assert (15, 10, 0x1B);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (29, 29))
{
switch (INSTR (23, 22))
if (size == 3)
HALT_UNALLOC;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (30, 29))
{
case 2: /* SSUBL2. */
copy_vn = cpu->fr[vn];
copy_vm = cpu->fr[vm];
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (size)
{
case 0:
NYI_assert (29, 21, 0x070);
NYI_assert (15, 10, 0x0F);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (16, 16))
{
/* Byte transfer. */
NYI_assert (29, 23, 0x1D);
NYI_assert (21, 10, 0x83E);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
{
if (! full)
NYI_assert (29, 23, 0x1D);
NYI_assert (21, 10, 0x86E);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
{
if (! full)
NYI_assert (29, 24, 0x0E);
NYI_assert (21, 10, 0x802);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (size)
{
case 0:
NYI_assert (29, 24, 0x0E);
NYI_assert (21, 10, 0x806);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (size)
{
case 0:
NYI_assert (28, 22, 0x3C);
NYI_assert (15, 10, 0x29);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (30, 29))
{
case 2: /* SXTL2, SSHLL2. */
NYI_assert (29, 23, 0x1E);
NYI_assert (15, 10, 0x15);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
{
shift = INSTR (21, 16);
NYI_assert (28, 23, 0x1E);
NYI_assert (15, 10, 0x01);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
{
shift = 128 - shift;
NYI_assert (15, 12, 0x8);
NYI_assert (10, 10, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (size)
{
case 1:
NYI_assert (29, 24, 0x2E);
NYI_assert (21, 10, 0x82E);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (23, 22))
{
case 0:
NYI_assert (29, 23, 0x5B);
NYI_assert (21, 10, 0x87E);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22) == 0)
for (i = 0; i < (full ? 4 : 2); i++)
aarch64_set_vec_float (cpu, vd, i,
NYI_assert (15, 12, 4);
NYI_assert (10, 10, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (23, 22))
{
case 1:
NYI_assert (21, 21, 1);
NYI_assert (15, 10, 0x21);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (23, 22))
{
case 0:
NYI_assert (21, 21, 1);
NYI_assert (15, 10, 0x25);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (23, 22))
{
case 0:
NYI_assert (21, 21, 1);
NYI_assert (15, 10, 0x3F);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
{
if (! full)
NYI_assert (21, 21, 1);
NYI_assert (15, 10, 0x37);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
{
if (! full)
NYI_assert (21, 21, 1);
NYI_assert (15, 10, 0x35);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
{
/* Extract values before adding them incase vd == vn/vm. */
NYI_assert (29, 23, 0x5D);
NYI_assert (21, 10, 0x87E);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
{
if (! full)
NYI_assert (29, 23, 0x5D);
NYI_assert (21, 10, 0x83E);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
{
if (! full)
NYI_assert (29, 10, 0xB8816);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
for (i = 0; i < (full ? 16 : 8); i++)
aarch64_set_vec_u8 (cpu, vd, i, ~ aarch64_get_vec_u8 (cpu, vn, i));
}
NYI_assert (29, 24, 0x2E);
NYI_assert (21, 10, 0x812);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (23, 22))
{
case 0:
NYI_assert (15, 15, 0);
NYI_assert (10, 10, 1);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (16, 16))
{
/* Move a byte. */
NYI_assert (29, 24, 0x2E);
NYI_assert (21, 10, 0x802);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (size)
{
case 0:
j = 0;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
for (i = src_index; i < (full ? 16 : 8); i++)
val.b[j ++] = aarch64_get_vec_u8 (cpu, vn, i);
for (i = 0; i < src_index; i++)
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float (cpu, sd, aarch64_get_FP_float (cpu, sa)
+ aarch64_get_FP_float (cpu, sn)
* aarch64_get_FP_float (cpu, sm));
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double (cpu, sd, aarch64_get_FP_double (cpu, sa)
+ aarch64_get_FP_double (cpu, sn)
* aarch64_get_FP_double (cpu, sm));
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float (cpu, sd, aarch64_get_FP_float (cpu, sa)
- aarch64_get_FP_float (cpu, sn)
* aarch64_get_FP_float (cpu, sm));
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double (cpu, sd, aarch64_get_FP_double (cpu, sa)
- aarch64_get_FP_double (cpu, sn)
* aarch64_get_FP_double (cpu, sm));
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float (cpu, sd, - aarch64_get_FP_float (cpu, sa)
+ (- aarch64_get_FP_float (cpu, sn))
* aarch64_get_FP_float (cpu, sm));
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double (cpu, sd, - aarch64_get_FP_double (cpu, sa)
+ (- aarch64_get_FP_double (cpu, sn))
* aarch64_get_FP_double (cpu, sm));
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float (cpu, sd, - aarch64_get_FP_float (cpu, sa)
+ aarch64_get_FP_float (cpu, sn)
* aarch64_get_FP_float (cpu, sm));
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double (cpu, sd, - aarch64_get_FP_double (cpu, sa)
+ aarch64_get_FP_double (cpu, sn)
* aarch64_get_FP_double (cpu, sm));
NYI_assert (11, 10, 0x1);
NYI_assert (4, 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (! testConditionCode (cpu, INSTR (15, 12)))
{
aarch64_set_CPSR (cpu, INSTR (3, 0));
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float (cpu, sd, aarch64_get_FP_float (cpu, sn)
+ aarch64_get_FP_float (cpu, sm));
}
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double (cpu, sd, aarch64_get_FP_double (cpu, sn)
+ aarch64_get_FP_double (cpu, sm));
}
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float (cpu, sd, aarch64_get_FP_float (cpu, sn)
/ aarch64_get_FP_float (cpu, sm));
}
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double (cpu, sd, aarch64_get_FP_double (cpu, sn)
/ aarch64_get_FP_double (cpu, sm));
}
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float (cpu, sd, aarch64_get_FP_float (cpu, sn)
* aarch64_get_FP_float (cpu, sm));
}
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double (cpu, sd, aarch64_get_FP_double (cpu, sn)
* aarch64_get_FP_double (cpu, sm));
}
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float (cpu, sd, - (aarch64_get_FP_float (cpu, sn)
* aarch64_get_FP_float (cpu, sm)));
}
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double (cpu, sd, - (aarch64_get_FP_double (cpu, sn)
* aarch64_get_FP_double (cpu, sm)));
}
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float (cpu, sd, aarch64_get_FP_float (cpu, sn)
- aarch64_get_FP_float (cpu, sm));
}
unsigned sn = INSTR ( 9, 5);
unsigned sd = INSTR ( 4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double (cpu, sd, aarch64_get_FP_double (cpu, sn)
- aarch64_get_FP_double (cpu, sm));
}
NYI_assert (31, 23, 0x03C);
NYI_assert (15, 10, 0x1E);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
aarch64_set_FP_double (cpu, sd,
dminnm (aarch64_get_FP_double (cpu, sn),
NYI_assert (31, 23, 0x03C);
NYI_assert (15, 10, 0x1A);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
aarch64_set_FP_double (cpu, sd,
dmaxnm (aarch64_get_FP_double (cpu, sn),
NYI_assert (31, 23, 0x03C);
NYI_assert (11, 10, 0x3);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
aarch64_set_FP_double (cpu, sd, set ? sn : sm);
else
unsigned int rn = INSTR (9, 5);
unsigned int st = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_mem_u32 (cpu, aarch64_get_reg_u64 (cpu, st, 1) + offset,
aarch64_get_vec_u32 (cpu, rn, 0));
}
unsigned int rn = INSTR (9, 5);
unsigned int st = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_mem_u64 (cpu, aarch64_get_reg_u64 (cpu, st, 1) + offset,
aarch64_get_vec_u64 (cpu, rn, 0));
}
unsigned int st = INSTR (4, 0);
FRegister a;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_get_FP_long_double (cpu, rn, & a);
aarch64_set_mem_long_double (cpu,
aarch64_get_reg_u64 (cpu, st, 1)
unsigned int rn = INSTR (9, 5);
unsigned int st = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float (cpu, st, aarch64_get_FP_float (cpu, rn));
}
unsigned int rn = INSTR (9, 5);
unsigned int st = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double (cpu, st, aarch64_get_FP_double (cpu, rn));
}
unsigned int rn = INSTR (9, 5);
unsigned int st = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_vec_u32 (cpu, st, 0, aarch64_get_reg_u32 (cpu, rn, NO_SP));
}
unsigned int rn = INSTR (9, 5);
unsigned int st = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_vec_u64 (cpu, st, 0, aarch64_get_reg_u64 (cpu, rn, NO_SP));
}
unsigned int rn = INSTR (9, 5);
unsigned int st = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, st, NO_SP, aarch64_get_vec_u32 (cpu, rn, 0));
}
unsigned int rn = INSTR (9, 5);
unsigned int st = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, st, NO_SP, aarch64_get_vec_u64 (cpu, rn, 0));
}
uint32_t imm = INSTR (20, 13);
float f = fp_immediate_for_encoding_32 (imm);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float (cpu, sd, f);
}
uint32_t imm = INSTR (20, 13);
double d = fp_immediate_for_encoding_64 (imm);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double (cpu, sd, d);
}
unsigned int rn = INSTR (9, 5);
unsigned int st = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_vec_u32 (cpu, st, 0, aarch64_get_mem_u32
(cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK) + offset));
}
unsigned int rn = INSTR (9, 5);
unsigned int st = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_vec_u64 (cpu, st, 0, aarch64_get_mem_u64
(cpu, aarch64_get_reg_u64 (cpu, rn, SP_OK) + offset));
}
FRegister a;
uint64_t addr = aarch64_get_reg_u64 (cpu, rn, SP_OK) + offset;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_get_mem_long_double (cpu, addr, & a);
aarch64_set_FP_long_double (cpu, st, a);
}
unsigned sd = INSTR (4, 0);
float value = aarch64_get_FP_float (cpu, sn);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float (cpu, sd, fabsf (value));
}
unsigned sd = INSTR (4, 0);
double value = aarch64_get_FP_double (cpu, sn);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double (cpu, sd, fabs (value));
}
unsigned sn = INSTR (9, 5);
unsigned sd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float (cpu, sd, - aarch64_get_FP_float (cpu, sn));
}
unsigned sn = INSTR (9, 5);
unsigned sd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double (cpu, sd, - aarch64_get_FP_double (cpu, sn));
}
unsigned sn = INSTR (9, 5);
unsigned sd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float (cpu, sd, sqrt (aarch64_get_FP_float (cpu, sn)));
}
unsigned sn = INSTR (9, 5);
unsigned sd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double (cpu, sd,
sqrt (aarch64_get_FP_double (cpu, sn)));
}
unsigned sn = INSTR (9, 5);
unsigned sd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float (cpu, sd, (float) aarch64_get_FP_double (cpu, sn));
}
unsigned sn = INSTR (9, 5);
unsigned sd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double (cpu, sd, (double) aarch64_get_FP_float (cpu, sn));
}
/* FIXME: Add support for rmode == 6 exactness check. */
rmode = uimm (aarch64_get_FPSR (cpu), 23, 22);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
{
double val = aarch64_get_FP_double (cpu, rs);
NYI_assert (31, 10, 0x7B890);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float (cpu, rd, (float) aarch64_get_FP_half (cpu, rn));
}
NYI_assert (31, 10, 0x7B8B0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double (cpu, rd, (double) aarch64_get_FP_half (cpu, rn));
}
NYI_assert (31, 10, 0x788F0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_half (cpu, rd, aarch64_get_FP_float (cpu, rn));
}
NYI_assert (31, 10, 0x798F0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_half (cpu, rd, (float) aarch64_get_FP_double (cpu, rn));
}
unsigned rn = INSTR (9, 5);
unsigned sd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float
(cpu, sd, (float) aarch64_get_reg_s32 (cpu, rn, NO_SP));
}
unsigned rn = INSTR (9, 5);
unsigned sd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_float
(cpu, sd, (float) aarch64_get_reg_s64 (cpu, rn, NO_SP));
}
unsigned rn = INSTR (9, 5);
unsigned sd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double
(cpu, sd, (double) aarch64_get_reg_s32 (cpu, rn, NO_SP));
}
unsigned rn = INSTR (9, 5);
unsigned sd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_FP_double
(cpu, sd, (double) aarch64_get_reg_s64 (cpu, rn, NO_SP));
}
RAISE_EXCEPTIONS (f, value, FLOAT, INT);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* Avoid sign extension to 64 bit. */
aarch64_set_reg_u64 (cpu, rd, NO_SP, (uint32_t) value);
}
RAISE_EXCEPTIONS (f, value, FLOAT, LONG);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_s64 (cpu, rd, NO_SP, value);
}
RAISE_EXCEPTIONS (d, value, DOUBLE, INT);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* Avoid sign extension to 64 bit. */
aarch64_set_reg_u64 (cpu, rd, NO_SP, (uint32_t) value);
}
RAISE_EXCEPTIONS (d, value, DOUBLE, LONG);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_s64 (cpu, rd, NO_SP, value);
}
/* Convert to fixed point. */
HALT_NYI;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (31, 31))
{
/* Convert to unsigned 64-bit integer. */
HALT_NYI;
/* FIXME: Add exception raising. */
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (31, 31))
{
uint64_t value = aarch64_get_reg_u64 (cpu, rs, NO_SP);
if (INSTR (15, 10) != 0)
HALT_UNALLOC;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (16, 16))
aarch64_set_vec_u64 (cpu, rd, 1, aarch64_get_reg_u64 (cpu, rn, NO_SP));
else
float fvalue1 = aarch64_get_FP_float (cpu, sn);
float fvalue2 = aarch64_get_FP_float (cpu, sm);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
set_flags_for_float_compare (cpu, fvalue1, fvalue2);
}
unsigned sn = INSTR ( 9, 5);
float fvalue1 = aarch64_get_FP_float (cpu, sn);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
set_flags_for_float_compare (cpu, fvalue1, 0.0f);
}
float fvalue1 = aarch64_get_FP_float (cpu, sn);
float fvalue2 = aarch64_get_FP_float (cpu, sm);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
set_flags_for_float_compare (cpu, fvalue1, fvalue2);
}
unsigned sn = INSTR ( 9, 5);
float fvalue1 = aarch64_get_FP_float (cpu, sn);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
set_flags_for_float_compare (cpu, fvalue1, 0.0f);
}
double dvalue1 = aarch64_get_FP_double (cpu, sn);
double dvalue2 = aarch64_get_FP_double (cpu, sm);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
set_flags_for_double_compare (cpu, dvalue1, dvalue2);
}
unsigned sn = INSTR ( 9, 5);
double dvalue1 = aarch64_get_FP_double (cpu, sn);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
set_flags_for_double_compare (cpu, dvalue1, 0.0);
}
double dvalue1 = aarch64_get_FP_double (cpu, sn);
double dvalue2 = aarch64_get_FP_double (cpu, sm);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
set_flags_for_double_compare (cpu, dvalue1, dvalue2);
}
unsigned sn = INSTR ( 9, 5);
double dvalue1 = aarch64_get_FP_double (cpu, sn);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
set_flags_for_double_compare (cpu, dvalue1, 0.0);
}
NYI_assert (31, 23, 0x0FC);
NYI_assert (21, 10, 0xC36);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
{
double val1 = aarch64_get_vec_double (cpu, Fn, 0);
NYI_assert (21, 21, 1);
NYI_assert (15, 10, 0x35);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
aarch64_set_FP_double (cpu, rd,
fabs (aarch64_get_FP_double (cpu, rn)
NYI_assert (31, 21, 0x2F7);
NYI_assert (15, 10, 0x0D);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_vec_u64 (cpu, rd, 0,
aarch64_get_vec_u64 (cpu, rn, 0) >
aarch64_get_vec_u64 (cpu, rm, 0) ? -1L : 0L);
NYI_assert (31, 23, 0x0FE);
NYI_assert (15, 10, 0x01);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_vec_u64 (cpu, rd, 0,
aarch64_get_vec_u64 (cpu, rn, 0) >> amount);
}
NYI_assert (31, 21, 0x2F7);
NYI_assert (15, 10, 0x11);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (shift >= 0)
aarch64_set_vec_s64 (cpu, rd, 0,
aarch64_get_vec_s64 (cpu, rn, 0) << shift);
if (INSTR (22, 22) == 0)
HALT_UNALLOC;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
switch (INSTR (15, 10))
{
case 0x01: /* SSHR */
NYI_assert (15, 12, 0xE);
NYI_assert (10, 10, 1);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
{
double val1 = aarch64_get_FP_double (cpu, rn);
NYI_assert (31, 21, 0x2F0);
NYI_assert (15, 10, 0x01);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (16, 16))
{
/* 8-bit. */
NYI_assert (31, 10, 0x1FB82E);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_vec_u64 (cpu, rd, 0, - aarch64_get_vec_u64 (cpu, rn, 0));
}
NYI_assert (31, 21, 0x3F7);
NYI_assert (15, 10, 0x11);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (shift >= 0)
aarch64_set_vec_u64 (cpu, rd, 0, aarch64_get_vec_u64 (cpu, rn, 0) << shift);
else
Fm = INSTR (9, 5);
Fn = INSTR (20, 16);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
val1 = aarch64_get_FP_double (cpu, Fm);
val2 = aarch64_get_FP_double (cpu, Fn);
NYI_assert (31, 23, 0x0FC);
NYI_assert (21, 10, 0x876);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (INSTR (22, 22))
{
uint64_t val = aarch64_get_vec_u64 (cpu, rn, 0);
address &= ~0xfff;
}
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, address + offset);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, SP_OK,
aarch64_get_reg_u32 (cpu, rn, NO_SP) & bimm);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, SP_OK,
aarch64_get_reg_u64 (cpu, rn, NO_SP) & bimm);
}
uint32_t value1 = aarch64_get_reg_u32 (cpu, rn, NO_SP);
uint32_t value2 = bimm;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 & value2);
set_flags_for_binop32 (cpu, value1 & value2);
}
uint64_t value1 = aarch64_get_reg_u64 (cpu, rn, NO_SP);
uint64_t value2 = bimm;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 & value2);
set_flags_for_binop64 (cpu, value1 & value2);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, SP_OK,
aarch64_get_reg_u32 (cpu, rn, NO_SP) ^ bimm);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, SP_OK,
aarch64_get_reg_u64 (cpu, rn, NO_SP) ^ bimm);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, SP_OK,
aarch64_get_reg_u32 (cpu, rn, NO_SP) | bimm);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, SP_OK,
aarch64_get_reg_u64 (cpu, rn, NO_SP) | bimm);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64
(cpu, rd, NO_SP, aarch64_get_reg_u32 (cpu, rn, NO_SP)
& shifted32 (aarch64_get_reg_u32 (cpu, rm, NO_SP), shift, count));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64
(cpu, rd, NO_SP, aarch64_get_reg_u64 (cpu, rn, NO_SP)
& shifted64 (aarch64_get_reg_u64 (cpu, rm, NO_SP), shift, count));
uint32_t value2 = shifted32 (aarch64_get_reg_u32 (cpu, rm, NO_SP),
shift, count);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 & value2);
set_flags_for_binop32 (cpu, value1 & value2);
}
uint64_t value2 = shifted64 (aarch64_get_reg_u64 (cpu, rm, NO_SP),
shift, count);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 & value2);
set_flags_for_binop64 (cpu, value1 & value2);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64
(cpu, rd, NO_SP, aarch64_get_reg_u32 (cpu, rn, NO_SP)
& ~ shifted32 (aarch64_get_reg_u32 (cpu, rm, NO_SP), shift, count));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64
(cpu, rd, NO_SP, aarch64_get_reg_u64 (cpu, rn, NO_SP)
& ~ shifted64 (aarch64_get_reg_u64 (cpu, rm, NO_SP), shift, count));
uint32_t value2 = ~ shifted32 (aarch64_get_reg_u32 (cpu, rm, NO_SP),
shift, count);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 & value2);
set_flags_for_binop32 (cpu, value1 & value2);
}
uint64_t value2 = ~ shifted64 (aarch64_get_reg_u64 (cpu, rm, NO_SP),
shift, count);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value1 & value2);
set_flags_for_binop64 (cpu, value1 & value2);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64
(cpu, rd, NO_SP, aarch64_get_reg_u32 (cpu, rn, NO_SP)
^ ~ shifted32 (aarch64_get_reg_u32 (cpu, rm, NO_SP), shift, count));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64
(cpu, rd, NO_SP, aarch64_get_reg_u64 (cpu, rn, NO_SP)
^ ~ shifted64 (aarch64_get_reg_u64 (cpu, rm, NO_SP), shift, count));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64
(cpu, rd, NO_SP, aarch64_get_reg_u32 (cpu, rn, NO_SP)
^ shifted32 (aarch64_get_reg_u32 (cpu, rm, NO_SP), shift, count));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64
(cpu, rd, NO_SP, aarch64_get_reg_u64 (cpu, rn, NO_SP)
^ shifted64 (aarch64_get_reg_u64 (cpu, rm, NO_SP), shift, count));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64
(cpu, rd, NO_SP, aarch64_get_reg_u32 (cpu, rn, NO_SP)
| shifted32 (aarch64_get_reg_u32 (cpu, rm, NO_SP), shift, count));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64
(cpu, rd, NO_SP, aarch64_get_reg_u64 (cpu, rn, NO_SP)
| shifted64 (aarch64_get_reg_u64 (cpu, rm, NO_SP), shift, count));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64
(cpu, rd, NO_SP, aarch64_get_reg_u32 (cpu, rn, NO_SP)
| ~ shifted32 (aarch64_get_reg_u32 (cpu, rm, NO_SP), shift, count));
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64
(cpu, rd, NO_SP, aarch64_get_reg_u64 (cpu, rn, NO_SP)
| ~ shifted64 (aarch64_get_reg_u64 (cpu, rm, NO_SP), shift, count));
{
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, val << (pos * 16));
}
{
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, ((uint64_t) val) << (pos * 16));
}
{
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, ((val << (pos * 16)) ^ 0xffffffffU));
}
{
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64
(cpu, rd, NO_SP, ((((uint64_t) val) << (pos * 16))
^ 0xffffffffffffffffULL));
uint32_t value = val << (pos * 16);
uint32_t mask = ~(0xffffU << (pos * 16));
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, (value | (current & mask)));
}
uint64_t value = (uint64_t) val << (pos * 16);
uint64_t mask = ~(0xffffULL << (pos * 16));
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, (value | (current & mask)));
}
value >>= r - (s + 1);
}
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
rd = INSTR (4, 0);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value);
}
value >>= r - (s + 1);
}
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
rd = INSTR (4, 0);
aarch64_set_reg_u64 (cpu, rd, NO_SP, value);
}
value >>= r - (s + 1);
}
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
rd = INSTR (4, 0);
aarch64_set_reg_u64 (cpu, rd, NO_SP, (uint32_t) value);
}
value >>= r - (s + 1);
}
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
rd = INSTR (4, 0);
aarch64_set_reg_s64 (cpu, rd, NO_SP, value);
}
value2 &= ~mask;
value2 |= value;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64
(cpu, rd, NO_SP, (aarch64_get_reg_u32 (cpu, rd, NO_SP) & ~mask) | value);
}
mask >>= r - (s + 1);
}
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
rd = INSTR (4, 0);
aarch64_set_reg_u64
(cpu, rd, NO_SP, (aarch64_get_reg_u64 (cpu, rd, NO_SP) & ~mask) | value);
val2 = aarch64_get_reg_u32 (cpu, rn, NO_SP);
val2 <<= (32 - imms);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP, val1 | val2);
}
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP,
aarch64_get_reg_u32 (cpu, ra, NO_SP)
+ aarch64_get_reg_u32 (cpu, rn, NO_SP)
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP,
aarch64_get_reg_u64 (cpu, ra, NO_SP)
- + aarch64_get_reg_u64 (cpu, rn, NO_SP)
- * aarch64_get_reg_u64 (cpu, rm, NO_SP));
+ + (aarch64_get_reg_u64 (cpu, rn, NO_SP)
+ * aarch64_get_reg_u64 (cpu, rm, NO_SP)));
}
/* 32 bit multiply and sub. */
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP,
aarch64_get_reg_u32 (cpu, ra, NO_SP)
- aarch64_get_reg_u32 (cpu, rn, NO_SP)
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP,
aarch64_get_reg_u64 (cpu, ra, NO_SP)
- aarch64_get_reg_u64 (cpu, rn, NO_SP)
uint64_t value2_hi = highWordToU64 (value2);
/* Cross-multiply and collect results. */
-
uint64_t xproductlo = value1_lo * value2_lo;
uint64_t xproductmid1 = value1_lo * value2_hi;
uint64_t xproductmid2 = value1_hi * value2_lo;
uvalue2 = value2;
}
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
uresult = mul64hi (uvalue1, uvalue2);
result = uresult;
result *= signum;
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* N.B. we need to multiply the signed 32 bit values in rn, rm to
obtain a 64 bit product. */
aarch64_set_reg_u64
unsigned rn = INSTR (9, 5);
unsigned rd = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* N.B. we need to multiply the signed 32 bit values in rn, rm to
obtain a 64 bit product. */
aarch64_set_reg_u64
if (ra != R31)
HALT_UNALLOC;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rd, NO_SP,
mul64hi (aarch64_get_reg_u64 (cpu, rn, NO_SP),
aarch64_get_reg_u64 (cpu, rm, NO_SP)));
static void
bl (sim_cpu *cpu, int32_t offset)
{
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_save_LR (cpu);
aarch64_set_next_PC_by_offset (cpu, offset);
br (sim_cpu *cpu)
{
unsigned rn = INSTR (9, 5);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_next_PC (cpu, aarch64_get_reg_u64 (cpu, rn, NO_SP));
}
{
unsigned rn = INSTR (9, 5);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
/* The pseudo code in the spec says we update LR before fetching.
the value from the rn. */
aarch64_save_LR (cpu);
unsigned rn = INSTR (9, 5);
aarch64_set_next_PC (cpu, aarch64_get_reg_u64 (cpu, rn, NO_SP));
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (TRACE_BRANCH_P (cpu))
{
TRACE_BRANCH (cpu,
static void
nop (sim_cpu *cpu)
{
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
}
/* Data synchronization barrier. */
static void
dsb (sim_cpu *cpu)
{
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
}
/* Data memory barrier. */
static void
dmb (sim_cpu *cpu)
{
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
}
/* Instruction synchronization barrier. */
static void
isb (sim_cpu *cpu)
{
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
}
static void
static void
bcc (sim_cpu *cpu, int32_t offset, CondCode cc)
{
- /* the test returns TRUE if CC is met. */
+ /* The test returns TRUE if CC is met. */
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (testConditionCode (cpu, cc))
aarch64_set_next_PC_by_offset (cpu, offset);
}
{
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (aarch64_get_reg_u32 (cpu, rt, NO_SP) != 0)
aarch64_set_next_PC_by_offset (cpu, offset);
}
{
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (aarch64_get_reg_u64 (cpu, rt, NO_SP) != 0)
aarch64_set_next_PC_by_offset (cpu, offset);
}
{
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (aarch64_get_reg_u32 (cpu, rt, NO_SP) == 0)
aarch64_set_next_PC_by_offset (cpu, offset);
}
{
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (aarch64_get_reg_u64 (cpu, rt, NO_SP) == 0)
aarch64_set_next_PC_by_offset (cpu, offset);
}
{
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (aarch64_get_reg_u64 (cpu, rt, NO_SP) & (1 << pos))
aarch64_set_next_PC_by_offset (cpu, offset);
}
-/* branch on register bit test zero -- one size fits all. */
+/* Branch on register bit test zero -- one size fits all. */
static void
tbz (sim_cpu *cpu, uint32_t pos, int32_t offset)
{
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (!(aarch64_get_reg_u64 (cpu, rt, NO_SP) & (1 << pos)))
aarch64_set_next_PC_by_offset (cpu, offset);
}
{
uint64_t result = 0;
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
if (val != 0xf000)
{
TRACE_SYSCALL (cpu, " HLT [0x%x]", val);
unsigned sys_op2 = INSTR (7, 5);
unsigned rt = INSTR (4, 0);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
aarch64_set_reg_u64 (cpu, rt, NO_SP,
system_get (cpu, sys_op0, sys_op1, sys_crn, sys_crm, sys_op2));
}
NYI_assert (31, 20, 0xD51);
+ TRACE_DECODE (cpu, "emulated at line %d", __LINE__);
system_set (cpu, sys_op0, sys_op1, sys_crn, sys_crm, sys_op2,
aarch64_get_reg_u64 (cpu, rt, NO_SP));
}