3 // Copyright (c) 2015 Riscv Developers
4 // Copyright (c) 2016 The University of Virginia
5 // All rights reserved.
7 // Redistribution and use in source and binary forms, with or without
8 // modification, are permitted provided that the following conditions are
9 // met: redistributions of source code must retain the above copyright
10 // notice, this list of conditions and the following disclaimer;
11 // redistributions in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the distribution;
14 // neither the name of the copyright holders nor the names of its
15 // contributors may be used to endorse or promote products derived from
16 // this software without specific prior written permission.
18 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 // Authors: Alec Roelke
32 ////////////////////////////////////////////////////////////////////
34 // Atomic memory operation instructions
37 class AtomicMemOp : public RiscvMacroInst
41 // Each AtomicMemOp has a load and a store phase
42 AtomicMemOp(const char *mnem, ExtMachInst _machInst, OpClass __opClass)
43 : RiscvMacroInst(mnem, _machInst, __opClass)
46 std::string generateDisassembly(Addr pc,
47 const SymbolTable *symtab) const;
50 class AtomicMemOpMicro : public RiscvMicroInst
53 /// Memory request flags. See mem/request.hh.
54 Request::Flags memAccessFlags;
57 AtomicMemOpMicro(const char *mnem, ExtMachInst _machInst,
59 : RiscvMicroInst(mnem, _machInst, __opClass)
62 std::string generateDisassembly(Addr pc,
63 const SymbolTable *symtab) const;
68 std::string AtomicMemOp::generateDisassembly(Addr pc,
69 const SymbolTable *symtab) const
72 ss << csprintf("0x%08x", machInst) << ' ';
73 ss << mnemonic << ' ' << regName(_destRegIdx[0]) << ", "
74 << regName(_srcRegIdx[1]) << ", ("
75 << regName(_srcRegIdx[0]) << ')';
79 std::string AtomicMemOpMicro::generateDisassembly(Addr pc,
80 const SymbolTable *symtab) const
83 ss << csprintf("0x%08x", machInst) << ' ' << mnemonic;
88 def template AtomicMemOpDeclare {{
90 * Static instruction class for an AtomicMemOp operation
92 class %(class_name)s : public %(base_class)s
96 %(class_name)s(ExtMachInst machInst);
100 class %(class_name)sLoad : public %(base_class)sMicro
104 %(class_name)sLoad(ExtMachInst machInst, %(class_name)s *_p);
110 %(InitiateAccDeclare)s
112 %(CompleteAccDeclare)s
115 class %(class_name)sStore : public %(base_class)sMicro
119 %(class_name)sStore(ExtMachInst machInst, %(class_name)s *_p);
125 %(InitiateAccDeclare)s
127 %(CompleteAccDeclare)s
132 def template AtomicMemOpMacroConstructor {{
133 %(class_name)s::%(class_name)s(ExtMachInst machInst)
134 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
137 microops = {new %(class_name)sLoad(machInst, this),
138 new %(class_name)sStore(machInst, this)};
142 def template AtomicMemOpLoadConstructor {{
143 %(class_name)s::%(class_name)sLoad::%(class_name)sLoad(
144 ExtMachInst machInst, %(class_name)s *_p)
145 : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s)
148 flags[IsFirstMicroop] = true;
149 flags[IsDelayedCommit] = true;
151 memAccessFlags = Request::ACQUIRE;
155 def template AtomicMemOpStoreConstructor {{
156 %(class_name)s::%(class_name)sStore::%(class_name)sStore(
157 ExtMachInst machInst, %(class_name)s *_p)
158 : %(base_class)s("%(mnemonic)s[s]", machInst, %(op_class)s)
161 flags[IsLastMicroop] = true;
162 flags[IsNonSpeculative] = true;
164 memAccessFlags = Request::RELEASE;
168 def template AtomicMemOpMacroDecode {{
169 return new %(class_name)s(machInst);
172 def template AtomicMemOpLoadExecute {{
173 Fault %(class_name)s::%(class_name)sLoad::execute(CPU_EXEC_CONTEXT *xc,
174 Trace::InstRecord *traceData) const
177 Fault fault = NoFault;
183 if (fault == NoFault) {
184 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
187 if (fault == NoFault) {
191 if (fault == NoFault) {
199 def template AtomicMemOpStoreExecute {{
200 Fault %(class_name)s::%(class_name)sStore::execute(CPU_EXEC_CONTEXT *xc,
201 Trace::InstRecord *traceData) const
204 Fault fault = NoFault;
210 if (fault == NoFault) {
214 if (fault == NoFault) {
215 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
219 if (fault == NoFault) {
227 def template AtomicMemOpLoadEACompExecute {{
228 Fault %(class_name)s::%(class_name)sLoad::eaComp(CPU_EXEC_CONTEXT *xc,
229 Trace::InstRecord *traceData) const
232 Fault fault = NoFault;
238 if (fault == NoFault) {
247 def template AtomicMemOpStoreEACompExecute {{
248 Fault %(class_name)s::%(class_name)sStore::eaComp(CPU_EXEC_CONTEXT *xc,
249 Trace::InstRecord *traceData) const
252 Fault fault = NoFault;
258 if (fault == NoFault) {
267 def template AtomicMemOpLoadInitiateAcc {{
268 Fault %(class_name)s::%(class_name)sLoad::initiateAcc(CPU_EXEC_CONTEXT *xc,
269 Trace::InstRecord *traceData) const
272 Fault fault = NoFault;
278 if (fault == NoFault) {
279 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
286 def template AtomicMemOpStoreInitiateAcc {{
287 Fault %(class_name)s::%(class_name)sStore::initiateAcc(
288 CPU_EXEC_CONTEXT *xc, Trace::InstRecord *traceData) const
291 Fault fault = NoFault;
297 if (fault == NoFault) {
301 if (fault == NoFault) {
302 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
306 if (fault == NoFault) {
314 def template AtomicMemOpLoadCompleteAcc {{
315 Fault %(class_name)s::%(class_name)sLoad::completeAcc(PacketPtr pkt,
316 CPU_EXEC_CONTEXT *xc, Trace::InstRecord *traceData) const
318 Fault fault = NoFault;
323 getMem(pkt, Mem, traceData);
325 if (fault == NoFault) {
329 if (fault == NoFault) {
337 def template AtomicMemOpStoreCompleteAcc {{
338 Fault %(class_name)s::%(class_name)sStore::completeAcc(PacketPtr pkt,
339 CPU_EXEC_CONTEXT *xc, Trace::InstRecord *traceData) const
345 def format AtomicMemOp(load_code, store_code, ea_code, load_flags=[],
346 store_flags=[], inst_flags=[]) {{
347 macro_iop = InstObjParams(name, Name, 'AtomicMemOp', ea_code, inst_flags)
348 header_output = AtomicMemOpDeclare.subst(macro_iop)
349 decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop)
350 decode_block = AtomicMemOpMacroDecode.subst(macro_iop)
353 load_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsLoad"]
354 load_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
355 {'ea_code': ea_code, 'code': load_code}, load_inst_flags)
356 decoder_output += AtomicMemOpLoadConstructor.subst(load_iop)
357 exec_output += AtomicMemOpLoadExecute.subst(load_iop) \
358 + AtomicMemOpLoadEACompExecute.subst(load_iop) \
359 + AtomicMemOpLoadInitiateAcc.subst(load_iop) \
360 + AtomicMemOpLoadCompleteAcc.subst(load_iop)
362 store_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsStore"]
363 store_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
364 {'ea_code': ea_code, 'code': store_code}, store_inst_flags)
365 decoder_output += AtomicMemOpStoreConstructor.subst(store_iop)
366 exec_output += AtomicMemOpStoreExecute.subst(store_iop) \
367 + AtomicMemOpStoreEACompExecute.subst(store_iop) \
368 + AtomicMemOpStoreInitiateAcc.subst(store_iop) \
369 + AtomicMemOpStoreCompleteAcc.subst(store_iop)