{
public:
icache_sim_t(const char* config) : cache_memtracer_t(config, "I$") {}
- bool interested_in_range(uint64_t begin, uint64_t end, bool store, bool fetch)
+ bool interested_in_range(uint64_t begin, uint64_t end, access_type type)
{
- return fetch;
+ return type == FETCH;
}
- void trace(uint64_t addr, size_t bytes, bool store, bool fetch)
+ void trace(uint64_t addr, size_t bytes, access_type type)
{
- if (fetch) cache->access(addr, bytes, false);
+ if (type == FETCH) cache->access(addr, bytes, false);
}
};
{
public:
dcache_sim_t(const char* config) : cache_memtracer_t(config, "D$") {}
- bool interested_in_range(uint64_t begin, uint64_t end, bool store, bool fetch)
+ bool interested_in_range(uint64_t begin, uint64_t end, access_type type)
{
- return !fetch;
+ return type == LOAD || type == STORE;
}
- void trace(uint64_t addr, size_t bytes, bool store, bool fetch)
+ void trace(uint64_t addr, size_t bytes, access_type type)
{
- if (!fetch) cache->access(addr, bytes, store);
+ if (type == LOAD || type == STORE) cache->access(addr, bytes, type == STORE);
}
};
#include <string.h>
#include <vector>
+enum access_type {
+ LOAD,
+ STORE,
+ FETCH,
+};
+
class memtracer_t
{
public:
memtracer_t() {}
virtual ~memtracer_t() {}
- virtual bool interested_in_range(uint64_t begin, uint64_t end, bool store, bool fetch) = 0;
- virtual void trace(uint64_t addr, size_t bytes, bool store, bool fetch) = 0;
+ virtual bool interested_in_range(uint64_t begin, uint64_t end, access_type type) = 0;
+ virtual void trace(uint64_t addr, size_t bytes, access_type type) = 0;
};
class memtracer_list_t : public memtracer_t
{
public:
bool empty() { return list.empty(); }
- bool interested_in_range(uint64_t begin, uint64_t end, bool store, bool fetch)
+ bool interested_in_range(uint64_t begin, uint64_t end, access_type type)
{
for (std::vector<memtracer_t*>::iterator it = list.begin(); it != list.end(); ++it)
- if ((*it)->interested_in_range(begin, end, store, fetch))
+ if ((*it)->interested_in_range(begin, end, type))
return true;
return false;
}
- void trace(uint64_t addr, size_t bytes, bool store, bool fetch)
+ void trace(uint64_t addr, size_t bytes, access_type type)
{
for (std::vector<memtracer_t*>::iterator it = list.begin(); it != list.end(); ++it)
- (*it)->trace(addr, bytes, store, fetch);
+ (*it)->trace(addr, bytes, type);
}
void hook(memtracer_t* h)
{
flush_icache();
}
-void* mmu_t::refill_tlb(reg_t addr, reg_t bytes, bool store, bool fetch)
+void* mmu_t::refill_tlb(reg_t addr, reg_t bytes, access_type type)
{
reg_t idx = (addr >> PGSHIFT) % TLB_ENTRIES;
reg_t expected_tag = addr >> PGSHIFT;
pgbase = addr & -PGSIZE;
} else {
reg_t mode = get_field(proc->state.mstatus, MSTATUS_PRV);
- if (!fetch && get_field(proc->state.mstatus, MSTATUS_MPRV))
+ if (type != FETCH && get_field(proc->state.mstatus, MSTATUS_MPRV))
mode = get_field(proc->state.mstatus, MSTATUS_PRV1);
if (get_field(proc->state.mstatus, MSTATUS_VM) == VM_MBARE)
mode = PRV_M;
reg_t msb_mask = (reg_t(2) << (proc->xlen-1))-1; // zero-extend from xlen
pgbase = addr & -PGSIZE & msb_mask;
} else {
- pgbase = walk(addr, mode > PRV_U, store, fetch);
+ pgbase = walk(addr, mode > PRV_U, type);
}
}
reg_t paddr = pgbase + pgoff;
if (pgbase >= memsz) {
- if (fetch) throw trap_instruction_access_fault(addr);
- else if (store) throw trap_store_access_fault(addr);
+ if (type == FETCH) throw trap_instruction_access_fault(addr);
+ else if (type == STORE) throw trap_store_access_fault(addr);
else throw trap_load_access_fault(addr);
}
- bool trace = tracer.interested_in_range(pgbase, pgbase + PGSIZE, store, fetch);
- if (unlikely(!fetch && trace))
- tracer.trace(paddr, bytes, store, fetch);
+ bool trace = tracer.interested_in_range(pgbase, pgbase + PGSIZE, type);
+ if (unlikely(type != FETCH && trace))
+ tracer.trace(paddr, bytes, type);
else
{
if (tlb_load_tag[idx] != expected_tag) tlb_load_tag[idx] = -1;
if (tlb_store_tag[idx] != expected_tag) tlb_store_tag[idx] = -1;
if (tlb_insn_tag[idx] != expected_tag) tlb_insn_tag[idx] = -1;
- if (fetch) tlb_insn_tag[idx] = expected_tag;
- else if (store) tlb_store_tag[idx] = expected_tag;
+ if (type == FETCH) tlb_insn_tag[idx] = expected_tag;
+ else if (type == STORE) tlb_store_tag[idx] = expected_tag;
else tlb_load_tag[idx] = expected_tag;
tlb_data[idx] = mem + pgbase - (addr & -PGSIZE);
return mem + paddr;
}
-reg_t mmu_t::walk(reg_t addr, bool supervisor, bool store, bool fetch)
+reg_t mmu_t::walk(reg_t addr, bool supervisor, access_type type)
{
int levels, ptidxbits, ptesize;
switch (get_field(proc->get_state()->mstatus, MSTATUS_VM))
if (PTE_TABLE(pte)) { // next level of page table
base = ppn << PGSHIFT;
- } else if (!PTE_CHECK_PERM(pte, supervisor, store, fetch)) {
+ } else if (!PTE_CHECK_PERM(pte, supervisor, type == STORE, type == FETCH)) {
break;
} else {
// set referenced and possibly dirty bits.
- *(uint32_t*)ppte |= PTE_R | (store * PTE_D);
+ *(uint32_t*)ppte |= PTE_R | ((type == STORE) * PTE_D);
// for superpage mappings, make a fake leaf PTE for the TLB's benefit.
reg_t vpn = addr >> PGSHIFT;
reg_t addr = (ppn | (vpn & ((reg_t(1) << ptshift) - 1))) << PGSHIFT;
// template for functions that load an aligned value from memory
#define load_func(type) \
type##_t load_##type(reg_t addr) __attribute__((always_inline)) { \
- void* paddr = translate(addr, sizeof(type##_t), false, false); \
+ void* paddr = translate(addr, sizeof(type##_t), LOAD); \
return *(type##_t*)paddr; \
}
// template for functions that store an aligned value to memory
#define store_func(type) \
void store_##type(reg_t addr, type##_t val) { \
- void* paddr = translate(addr, sizeof(type##_t), true, false); \
+ void* paddr = translate(addr, sizeof(type##_t), STORE); \
*(type##_t*)paddr = val; \
}
inline icache_entry_t* refill_icache(reg_t addr, icache_entry_t* entry)
{
- char* iaddr = (char*)translate(addr, 1, false, true);
+ char* iaddr = (char*)translate(addr, 1, FETCH);
insn_bits_t insn = *(uint16_t*)iaddr;
int length = insn_length(insn);
if (likely(addr % PGSIZE < PGSIZE-2))
insn |= (insn_bits_t)*(int16_t*)(iaddr + 2) << 16;
else
- insn |= (insn_bits_t)*(int16_t*)translate(addr + 2, 1, false, true) << 16;
+ insn |= (insn_bits_t)*(int16_t*)translate(addr + 2, 1, FETCH) << 16;
} else if (length == 2) {
insn = (int16_t)insn;
} else if (length == 6) {
- insn |= (insn_bits_t)*(int16_t*)translate(addr + 4, 1, false, true) << 32;
- insn |= (insn_bits_t)*(uint16_t*)translate(addr + 2, 1, false, true) << 16;
+ insn |= (insn_bits_t)*(int16_t*)translate(addr + 4, 1, FETCH) << 32;
+ insn |= (insn_bits_t)*(uint16_t*)translate(addr + 2, 1, FETCH) << 16;
} else {
static_assert(sizeof(insn_bits_t) == 8, "insn_bits_t must be uint64_t");
- insn |= (insn_bits_t)*(int16_t*)translate(addr + 6, 1, false, true) << 48;
- insn |= (insn_bits_t)*(uint16_t*)translate(addr + 4, 1, false, true) << 32;
- insn |= (insn_bits_t)*(uint16_t*)translate(addr + 2, 1, false, true) << 16;
+ insn |= (insn_bits_t)*(int16_t*)translate(addr + 6, 1, FETCH) << 48;
+ insn |= (insn_bits_t)*(uint16_t*)translate(addr + 4, 1, FETCH) << 32;
+ insn |= (insn_bits_t)*(uint16_t*)translate(addr + 2, 1, FETCH) << 16;
}
insn_fetch_t fetch = {proc->decode_insn(insn), insn};
entry->data = fetch;
reg_t paddr = iaddr - mem;
- if (tracer.interested_in_range(paddr, paddr + 1, false, true)) {
+ if (tracer.interested_in_range(paddr, paddr + 1, FETCH)) {
entry->tag = -1;
- tracer.trace(paddr, length, false, true);
+ tracer.trace(paddr, length, FETCH);
}
return entry;
}
reg_t tlb_store_tag[TLB_ENTRIES];
// finish translation on a TLB miss and upate the TLB
- void* refill_tlb(reg_t addr, reg_t bytes, bool store, bool fetch);
+ void* refill_tlb(reg_t addr, reg_t bytes, access_type type);
// perform a page table walk for a given VA; set referenced/dirty bits
- reg_t walk(reg_t addr, bool supervisor, bool store, bool fetch);
+ reg_t walk(reg_t addr, bool supervisor, access_type type);
// translate a virtual address to a physical address
- void* translate(reg_t addr, reg_t bytes, bool store, bool fetch)
+ void* translate(reg_t addr, reg_t bytes, access_type type)
__attribute__((always_inline))
{
reg_t idx = (addr >> PGSHIFT) % TLB_ENTRIES;
reg_t expected_tag = addr >> PGSHIFT;
- reg_t* tags = fetch ? tlb_insn_tag : store ? tlb_store_tag :tlb_load_tag;
+ reg_t* tags = type == FETCH ? tlb_insn_tag :
+ type == STORE ? tlb_store_tag :
+ tlb_load_tag;
reg_t tag = tags[idx];
void* data = tlb_data[idx] + addr;
if (unlikely(addr & (bytes-1)))
- store ? throw trap_store_address_misaligned(addr) :
- fetch ? throw trap_instruction_address_misaligned(addr) :
- throw trap_load_address_misaligned(addr);
+ type == FETCH ? throw trap_instruction_address_misaligned(addr) :
+ type == STORE ? throw trap_store_address_misaligned(addr) :
+ /* LOAD */ throw trap_load_address_misaligned(addr);
if (likely(tag == expected_tag))
return data;
- return refill_tlb(addr, bytes, store, fetch);
+ return refill_tlb(addr, bytes, type);
}
friend class processor_t;