}
Fault
-TLB::translateFs(const RequestPtr &req, ThreadContext *tc, Mode mode,
- Translation *translation, bool &delay, bool timing,
- TLB::ArmTranslationType tranType, bool functional)
+TLB::translateMmuOff(ThreadContext *tc, const RequestPtr &req, Mode mode,
+ TLB::ArmTranslationType tranType, Addr vaddr, bool long_desc_format)
{
- // No such thing as a functional timing access
- assert(!(timing && functional));
-
- updateMiscReg(tc, tranType);
-
- Addr vaddr_tainted = req->getVaddr();
- Addr vaddr = 0;
- if (aarch64)
- vaddr = purifyTaggedAddr(vaddr_tainted, tc, aarch64EL, ttbcr);
- else
- vaddr = vaddr_tainted;
- Request::Flags flags = req->getFlags();
-
bool is_fetch = (mode == Execute);
- bool is_write = (mode == Write);
- bool long_desc_format = aarch64 || longDescFormatInUse(tc);
- ArmFault::TranMethod tranMethod = long_desc_format ? ArmFault::LpaeTran
- : ArmFault::VmsaTran;
-
- req->setAsid(asid);
-
- DPRINTF(TLBVerbose, "CPSR is priv:%d UserMode:%d secure:%d S1S2NsTran:%d\n",
- isPriv, flags & UserMode, isSecure, tranType & S1S2NsTran);
-
- DPRINTF(TLB, "translateFs addr %#x, mode %d, st2 %d, scr %#x sctlr %#x "
- "flags %#lx tranType 0x%x\n", vaddr_tainted, mode, isStage2,
- scr, sctlr, flags, tranType);
-
- if ((req->isInstFetch() && (!sctlr.i)) ||
- ((!req->isInstFetch()) && (!sctlr.c))){
+ req->setPaddr(vaddr);
+ // When the MMU is off the security attribute corresponds to the
+ // security state of the processor
+ if (isSecure)
+ req->setFlags(Request::SECURE);
+
+ // @todo: double check this (ARM ARM issue C B3.2.1)
+ if (long_desc_format || sctlr.tre == 0 || nmrr.ir0 == 0 ||
+ nmrr.or0 == 0 || prrr.tr0 != 0x2) {
if (!req->isCacheMaintenance()) {
req->setFlags(Request::UNCACHEABLE);
}
req->setFlags(Request::STRICT_ORDER);
}
- if (!is_fetch) {
- assert(flags & MustBeOne || req->isPrefetch());
- if (sctlr.a || !(flags & AllowUnaligned)) {
- if (vaddr & mask(flags & AlignmentMask)) {
- alignFaults++;
- return std::make_shared<DataAbort>(
- vaddr_tainted,
- TlbEntry::DomainType::NoAccess, is_write,
- ArmFault::AlignmentFault, isStage2,
- tranMethod);
- }
- }
- }
-
- // If guest MMU is off or hcr.vm=0 go straight to stage2
- if ((isStage2 && !hcr.vm) || (!isStage2 && !sctlr.m)) {
- req->setPaddr(vaddr);
- // When the MMU is off the security attribute corresponds to the
- // security state of the processor
- if (isSecure)
- req->setFlags(Request::SECURE);
-
- // @todo: double check this (ARM ARM issue C B3.2.1)
- if (long_desc_format || sctlr.tre == 0 || nmrr.ir0 == 0 ||
- nmrr.or0 == 0 || prrr.tr0 != 0x2) {
- if (!req->isCacheMaintenance()) {
- req->setFlags(Request::UNCACHEABLE);
- }
- req->setFlags(Request::STRICT_ORDER);
- }
-
- // Set memory attributes
- TlbEntry temp_te;
- temp_te.ns = !isSecure;
- if (isStage2 || hcr.dc == 0 || isSecure ||
- (isHyp && !(tranType & S1CTran))) {
-
- temp_te.mtype = is_fetch ? TlbEntry::MemoryType::Normal
- : TlbEntry::MemoryType::StronglyOrdered;
- temp_te.innerAttrs = 0x0;
- temp_te.outerAttrs = 0x0;
- temp_te.shareable = true;
- temp_te.outerShareable = true;
- } else {
- temp_te.mtype = TlbEntry::MemoryType::Normal;
- temp_te.innerAttrs = 0x3;
- temp_te.outerAttrs = 0x3;
- temp_te.shareable = false;
- temp_te.outerShareable = false;
- }
- temp_te.setAttributes(long_desc_format);
- DPRINTF(TLBVerbose, "(No MMU) setting memory attributes: shareable: "
- "%d, innerAttrs: %d, outerAttrs: %d, isStage2: %d\n",
- temp_te.shareable, temp_te.innerAttrs, temp_te.outerAttrs,
- isStage2);
- setAttr(temp_te.attributes);
-
- return testTranslation(req, mode, TlbEntry::DomainType::NoAccess);
+ // Set memory attributes
+ TlbEntry temp_te;
+ temp_te.ns = !isSecure;
+ if (isStage2 || hcr.dc == 0 || isSecure ||
+ (isHyp && !(tranType & S1CTran))) {
+
+ temp_te.mtype = is_fetch ? TlbEntry::MemoryType::Normal
+ : TlbEntry::MemoryType::StronglyOrdered;
+ temp_te.innerAttrs = 0x0;
+ temp_te.outerAttrs = 0x0;
+ temp_te.shareable = true;
+ temp_te.outerShareable = true;
+ } else {
+ temp_te.mtype = TlbEntry::MemoryType::Normal;
+ temp_te.innerAttrs = 0x3;
+ temp_te.outerAttrs = 0x3;
+ temp_te.shareable = false;
+ temp_te.outerShareable = false;
}
+ temp_te.setAttributes(long_desc_format);
+ DPRINTF(TLBVerbose, "(No MMU) setting memory attributes: shareable: "
+ "%d, innerAttrs: %d, outerAttrs: %d, isStage2: %d\n",
+ temp_te.shareable, temp_te.innerAttrs, temp_te.outerAttrs,
+ isStage2);
+ setAttr(temp_te.attributes);
+
+ return testTranslation(req, mode, TlbEntry::DomainType::NoAccess);
+}
- DPRINTF(TLBVerbose, "Translating %s=%#x context=%d\n",
- isStage2 ? "IPA" : "VA", vaddr_tainted, asid);
- // Translation enabled
-
+Fault
+TLB::translateMmuOn(ThreadContext* tc, const RequestPtr &req, Mode mode,
+ Translation *translation, bool &delay, bool timing,
+ bool functional, Addr vaddr,
+ ArmFault::TranMethod tranMethod)
+{
TlbEntry *te = NULL;
+ bool is_fetch = (mode == Execute);
TlbEntry mergeTe;
+
+ Request::Flags flags = req->getFlags();
+ Addr vaddr_tainted = req->getVaddr();
+
Fault fault = getResultTe(&te, req, tc, mode, translation, timing,
functional, &mergeTe);
// only proceed if we have a valid table entry
// Unaligned accesses to Device memory should always cause an
// abort regardless of sctlr.a
alignFaults++;
+ bool is_write = (mode == Write);
return std::make_shared<DataAbort>(
vaddr_tainted,
TlbEntry::DomainType::NoAccess, is_write,
}
}
+Fault
+TLB::translateFs(const RequestPtr &req, ThreadContext *tc, Mode mode,
+ Translation *translation, bool &delay, bool timing,
+ TLB::ArmTranslationType tranType, bool functional)
+{
+ // No such thing as a functional timing access
+ assert(!(timing && functional));
+
+ updateMiscReg(tc, tranType);
+
+ Addr vaddr_tainted = req->getVaddr();
+ Addr vaddr = 0;
+ if (aarch64)
+ vaddr = purifyTaggedAddr(vaddr_tainted, tc, aarch64EL, ttbcr);
+ else
+ vaddr = vaddr_tainted;
+ Request::Flags flags = req->getFlags();
+
+ bool is_fetch = (mode == Execute);
+ bool is_write = (mode == Write);
+ bool long_desc_format = aarch64 || longDescFormatInUse(tc);
+ ArmFault::TranMethod tranMethod = long_desc_format ? ArmFault::LpaeTran
+ : ArmFault::VmsaTran;
+
+ req->setAsid(asid);
+
+ DPRINTF(TLBVerbose, "CPSR is priv:%d UserMode:%d secure:%d S1S2NsTran:%d\n",
+ isPriv, flags & UserMode, isSecure, tranType & S1S2NsTran);
+
+ DPRINTF(TLB, "translateFs addr %#x, mode %d, st2 %d, scr %#x sctlr %#x "
+ "flags %#lx tranType 0x%x\n", vaddr_tainted, mode, isStage2,
+ scr, sctlr, flags, tranType);
+
+ if ((req->isInstFetch() && (!sctlr.i)) ||
+ ((!req->isInstFetch()) && (!sctlr.c))){
+ if (!req->isCacheMaintenance()) {
+ req->setFlags(Request::UNCACHEABLE);
+ }
+ req->setFlags(Request::STRICT_ORDER);
+ }
+ if (!is_fetch) {
+ assert(flags & MustBeOne || req->isPrefetch());
+ if (sctlr.a || !(flags & AllowUnaligned)) {
+ if (vaddr & mask(flags & AlignmentMask)) {
+ alignFaults++;
+ return std::make_shared<DataAbort>(
+ vaddr_tainted,
+ TlbEntry::DomainType::NoAccess, is_write,
+ ArmFault::AlignmentFault, isStage2,
+ tranMethod);
+ }
+ }
+ }
+
+ // If guest MMU is off or hcr.vm=0 go straight to stage2
+ if ((isStage2 && !hcr.vm) || (!isStage2 && !sctlr.m)) {
+ return translateMmuOff(tc, req, mode, tranType, vaddr,
+ long_desc_format);
+ } else {
+ DPRINTF(TLBVerbose, "Translating %s=%#x context=%d\n",
+ isStage2 ? "IPA" : "VA", vaddr_tainted, asid);
+ // Translation enabled
+ return translateMmuOn(tc, req, mode, translation, delay, timing,
+ functional, vaddr, tranMethod);
+ }
+}
+
Fault
TLB::translateAtomic(const RequestPtr &req, ThreadContext *tc, Mode mode,
TLB::ArmTranslationType tranType)