b837cc9c3266bada32ef7376e257875b87080ea8
[gem5.git] / src / arch / riscv / isa / formats / amo.isa
1 // -*- mode:c++ -*-
2
3 // Copyright (c) 2015 Riscv Developers
4 // Copyright (c) 2016 The University of Virginia
5 // All rights reserved.
6 //
7 // Redistribution and use in source and binary forms, with or without
8 // modification, are permitted provided that the following conditions are
9 // met: redistributions of source code must retain the above copyright
10 // notice, this list of conditions and the following disclaimer;
11 // redistributions in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the distribution;
14 // neither the name of the copyright holders nor the names of its
15 // contributors may be used to endorse or promote products derived from
16 // this software without specific prior written permission.
17 //
18 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 //
30 // Authors: Alec Roelke
31
32 ////////////////////////////////////////////////////////////////////
33 //
34 // Atomic memory operation instructions
35 //
36 output header {{
37 class AtomicMemOp : public RiscvMacroInst
38 {
39 protected:
40 /// Constructor
41 // Each AtomicMemOp has a load and a store phase
42 AtomicMemOp(const char *mnem, ExtMachInst _machInst, OpClass __opClass)
43 : RiscvMacroInst(mnem, _machInst, __opClass)
44 {}
45
46 std::string generateDisassembly(Addr pc,
47 const SymbolTable *symtab) const;
48 };
49
50 class AtomicMemOpMicro : public RiscvMicroInst
51 {
52 protected:
53 /// Memory request flags. See mem/request.hh.
54 Request::Flags memAccessFlags;
55
56 /// Constructor
57 AtomicMemOpMicro(const char *mnem, ExtMachInst _machInst,
58 OpClass __opClass)
59 : RiscvMicroInst(mnem, _machInst, __opClass)
60 {}
61
62 std::string generateDisassembly(Addr pc,
63 const SymbolTable *symtab) const;
64 };
65 }};
66
67 output decoder {{
68 std::string AtomicMemOp::generateDisassembly(Addr pc,
69 const SymbolTable *symtab) const
70 {
71 std::stringstream ss;
72 ss << csprintf("0x%08x", machInst) << ' ';
73 ss << mnemonic << ' ' << regName(_destRegIdx[0]) << ", "
74 << regName(_srcRegIdx[1]) << ", ("
75 << regName(_srcRegIdx[0]) << ')';
76 return ss.str();
77 }
78
79 std::string AtomicMemOpMicro::generateDisassembly(Addr pc,
80 const SymbolTable *symtab) const
81 {
82 std::stringstream ss;
83 ss << csprintf("0x%08x", machInst) << ' ' << mnemonic;
84 return ss.str();
85 }
86 }};
87
88 def template AtomicMemOpDeclare {{
89 /**
90 * Static instruction class for an AtomicMemOp operation
91 */
92 class %(class_name)s : public %(base_class)s
93 {
94 public:
95 // Constructor
96 %(class_name)s(ExtMachInst machInst);
97
98 protected:
99
100 class %(class_name)sLoad : public %(base_class)sMicro
101 {
102 public:
103 // Constructor
104 %(class_name)sLoad(ExtMachInst machInst, %(class_name)s *_p);
105
106 %(BasicExecDeclare)s
107
108 %(EACompDeclare)s
109
110 %(InitiateAccDeclare)s
111
112 %(CompleteAccDeclare)s
113 };
114
115 class %(class_name)sStore : public %(base_class)sMicro
116 {
117 public:
118 // Constructor
119 %(class_name)sStore(ExtMachInst machInst, %(class_name)s *_p);
120
121 %(BasicExecDeclare)s
122
123 %(EACompDeclare)s
124
125 %(InitiateAccDeclare)s
126
127 %(CompleteAccDeclare)s
128 };
129 };
130 }};
131
132 def template AtomicMemOpMacroConstructor {{
133 %(class_name)s::%(class_name)s(ExtMachInst machInst)
134 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
135 {
136 %(constructor)s;
137 microops = {new %(class_name)sLoad(machInst, this),
138 new %(class_name)sStore(machInst, this)};
139 }
140 }};
141
142 def template AtomicMemOpLoadConstructor {{
143 %(class_name)s::%(class_name)sLoad::%(class_name)sLoad(
144 ExtMachInst machInst, %(class_name)s *_p)
145 : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s)
146 {
147 %(constructor)s;
148 flags[IsFirstMicroop] = true;
149 flags[IsDelayedCommit] = true;
150 if (AQ)
151 memAccessFlags = Request::ACQUIRE;
152 }
153 }};
154
155 def template AtomicMemOpStoreConstructor {{
156 %(class_name)s::%(class_name)sStore::%(class_name)sStore(
157 ExtMachInst machInst, %(class_name)s *_p)
158 : %(base_class)s("%(mnemonic)s[s]", machInst, %(op_class)s)
159 {
160 %(constructor)s;
161 flags[IsLastMicroop] = true;
162 flags[IsNonSpeculative] = true;
163 if (RL)
164 memAccessFlags = Request::RELEASE;
165 }
166 }};
167
168 def template AtomicMemOpMacroDecode {{
169 return new %(class_name)s(machInst);
170 }};
171
172 def template AtomicMemOpLoadExecute {{
173 Fault %(class_name)s::%(class_name)sLoad::execute(CPU_EXEC_CONTEXT *xc,
174 Trace::InstRecord *traceData) const
175 {
176 Addr EA;
177 Fault fault = NoFault;
178
179 %(op_decl)s;
180 %(op_rd)s;
181 %(ea_code)s;
182
183 if (fault == NoFault) {
184 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
185 }
186
187 if (fault == NoFault) {
188 %(code)s;
189 }
190
191 if (fault == NoFault) {
192 %(op_wb)s;
193 }
194
195 return fault;
196 }
197 }};
198
199 def template AtomicMemOpStoreExecute {{
200 Fault %(class_name)s::%(class_name)sStore::execute(CPU_EXEC_CONTEXT *xc,
201 Trace::InstRecord *traceData) const
202 {
203 Addr EA;
204 Fault fault = NoFault;
205
206 %(op_decl)s;
207 %(op_rd)s;
208 %(ea_code)s;
209
210 if (fault == NoFault) {
211 %(code)s;
212 }
213
214 if (fault == NoFault) {
215 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
216 nullptr);
217 }
218
219 if (fault == NoFault) {
220 %(op_wb)s;
221 }
222
223 return fault;
224 }
225 }};
226
227 def template AtomicMemOpLoadEACompExecute {{
228 Fault %(class_name)s::%(class_name)sLoad::eaComp(CPU_EXEC_CONTEXT *xc,
229 Trace::InstRecord *traceData) const
230 {
231 Addr EA;
232 Fault fault = NoFault;
233
234 %(op_decl)s;
235 %(op_rd)s;
236 %(ea_code)s;
237
238 if (fault == NoFault) {
239 %(op_wb)s;
240 xc->setEA(EA);
241 }
242
243 return fault;
244 }
245 }};
246
247 def template AtomicMemOpStoreEACompExecute {{
248 Fault %(class_name)s::%(class_name)sStore::eaComp(CPU_EXEC_CONTEXT *xc,
249 Trace::InstRecord *traceData) const
250 {
251 Addr EA;
252 Fault fault = NoFault;
253
254 %(op_decl)s;
255 %(op_rd)s;
256 %(ea_code)s;
257
258 if (fault == NoFault) {
259 %(op_wb)s;
260 xc->setEA(EA);
261 }
262
263 return fault;
264 }
265 }};
266
267 def template AtomicMemOpLoadInitiateAcc {{
268 Fault %(class_name)s::%(class_name)sLoad::initiateAcc(CPU_EXEC_CONTEXT *xc,
269 Trace::InstRecord *traceData) const
270 {
271 Addr EA;
272 Fault fault = NoFault;
273
274 %(op_src_decl)s;
275 %(op_rd)s;
276 %(ea_code)s;
277
278 if (fault == NoFault) {
279 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
280 }
281
282 return fault;
283 }
284 }};
285
286 def template AtomicMemOpStoreInitiateAcc {{
287 Fault %(class_name)s::%(class_name)sStore::initiateAcc(
288 CPU_EXEC_CONTEXT *xc, Trace::InstRecord *traceData) const
289 {
290 Addr EA;
291 Fault fault = NoFault;
292
293 %(op_decl)s;
294 %(op_rd)s;
295 %(ea_code)s;
296
297 if (fault == NoFault) {
298 %(code)s;
299 }
300
301 if (fault == NoFault) {
302 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
303 nullptr);
304 }
305
306 if (fault == NoFault) {
307 %(op_wb)s;
308 }
309
310 return fault;
311 }
312 }};
313
314 def template AtomicMemOpLoadCompleteAcc {{
315 Fault %(class_name)s::%(class_name)sLoad::completeAcc(PacketPtr pkt,
316 CPU_EXEC_CONTEXT *xc, Trace::InstRecord *traceData) const
317 {
318 Fault fault = NoFault;
319
320 %(op_decl)s;
321 %(op_rd)s;
322
323 getMem(pkt, Mem, traceData);
324
325 if (fault == NoFault) {
326 %(code)s;
327 }
328
329 if (fault == NoFault) {
330 %(op_wb)s;
331 }
332
333 return fault;
334 }
335 }};
336
337 def template AtomicMemOpStoreCompleteAcc {{
338 Fault %(class_name)s::%(class_name)sStore::completeAcc(PacketPtr pkt,
339 CPU_EXEC_CONTEXT *xc, Trace::InstRecord *traceData) const
340 {
341 return NoFault;
342 }
343 }};
344
345 def format AtomicMemOp(load_code, store_code, ea_code, load_flags=[],
346 store_flags=[], inst_flags=[]) {{
347 macro_iop = InstObjParams(name, Name, 'AtomicMemOp', ea_code, inst_flags)
348 header_output = AtomicMemOpDeclare.subst(macro_iop)
349 decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop)
350 decode_block = AtomicMemOpMacroDecode.subst(macro_iop)
351 exec_output = ''
352
353 load_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsLoad"]
354 load_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
355 {'ea_code': ea_code, 'code': load_code}, load_inst_flags)
356 decoder_output += AtomicMemOpLoadConstructor.subst(load_iop)
357 exec_output += AtomicMemOpLoadExecute.subst(load_iop) \
358 + AtomicMemOpLoadEACompExecute.subst(load_iop) \
359 + AtomicMemOpLoadInitiateAcc.subst(load_iop) \
360 + AtomicMemOpLoadCompleteAcc.subst(load_iop)
361
362 store_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsStore"]
363 store_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
364 {'ea_code': ea_code, 'code': store_code}, store_inst_flags)
365 decoder_output += AtomicMemOpStoreConstructor.subst(store_iop)
366 exec_output += AtomicMemOpStoreExecute.subst(store_iop) \
367 + AtomicMemOpStoreEACompExecute.subst(store_iop) \
368 + AtomicMemOpStoreInitiateAcc.subst(store_iop) \
369 + AtomicMemOpStoreCompleteAcc.subst(store_iop)
370 }};