7d01145e3cacd3bcdd36779f1d4e982bda2e75c4
[gem5.git] / src / arch / riscv / isa / formats / amo.isa
1 // -*- mode:c++ -*-
2
3 // Copyright (c) 2015 Riscv Developers
4 // Copyright (c) 2016 The University of Virginia
5 // All rights reserved.
6 //
7 // Redistribution and use in source and binary forms, with or without
8 // modification, are permitted provided that the following conditions are
9 // met: redistributions of source code must retain the above copyright
10 // notice, this list of conditions and the following disclaimer;
11 // redistributions in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the distribution;
14 // neither the name of the copyright holders nor the names of its
15 // contributors may be used to endorse or promote products derived from
16 // this software without specific prior written permission.
17 //
18 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30 // Declaration templates
31 def template AtomicMemOpDeclare {{
32 /**
33 * Static instruction class for an AtomicMemOp operation
34 */
35 class %(class_name)s : public %(base_class)s
36 {
37 public:
38 // Constructor
39 %(class_name)s(ExtMachInst machInst);
40
41 protected:
42
43 /*
44 * The main RMW part of an AMO
45 */
46 class %(class_name)sRMW : public %(base_class)sMicro
47 {
48 public:
49 // Constructor
50 %(class_name)sRMW(ExtMachInst machInst, %(class_name)s *_p);
51
52 Fault execute(ExecContext *, Trace::InstRecord *) const override;
53 Fault initiateAcc(ExecContext *,
54 Trace::InstRecord *) const override;
55 Fault completeAcc(PacketPtr, ExecContext *,
56 Trace::InstRecord *) const override;
57 };
58 };
59 }};
60
61 def template LRSCDeclare {{
62 /**
63 * Static instruction class for an AtomicMemOp operation
64 */
65 class %(class_name)s : public %(base_class)s
66 {
67 public:
68 // Constructor
69 %(class_name)s(ExtMachInst machInst);
70
71 protected:
72
73 class %(class_name)sMicro : public %(base_class)sMicro
74 {
75 public:
76 // Constructor
77 %(class_name)sMicro(ExtMachInst machInst, %(class_name)s *_p);
78
79 Fault execute(ExecContext *, Trace::InstRecord *) const override;
80 Fault initiateAcc(ExecContext *,
81 Trace::InstRecord *) const override;
82 Fault completeAcc(PacketPtr, ExecContext *,
83 Trace::InstRecord *) const override;
84 };
85 };
86 }};
87
88 // Constructor templates
89 def template LRSCMacroConstructor {{
90 %(class_name)s::%(class_name)s(ExtMachInst machInst):
91 %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
92 {
93 %(constructor)s;
94
95 StaticInstPtr rel_fence;
96 StaticInstPtr lrsc;
97 StaticInstPtr acq_fence;
98
99 // set up release fence
100 if (RL) {
101 rel_fence = new MemFenceMicro(machInst, No_OpClass);
102 rel_fence->setFlag(IsFirstMicroop);
103 rel_fence->setFlag(IsReadBarrier);
104 rel_fence->setFlag(IsWriteBarrier);
105 rel_fence->setFlag(IsDelayedCommit);
106 }
107
108 // set up atomic rmw op
109 lrsc = new %(class_name)sMicro(machInst, this);
110
111 if (!RL) {
112 lrsc->setFlag(IsFirstMicroop);
113 }
114
115 if (!AQ) {
116 lrsc->setFlag(IsLastMicroop);
117 } else {
118 lrsc->setFlag(IsDelayedCommit);
119 }
120
121 // set up acquire fence
122 if (AQ) {
123 acq_fence = new MemFenceMicro(machInst, No_OpClass);
124 acq_fence->setFlag(IsLastMicroop);
125 acq_fence->setFlag(IsReadBarrier);
126 acq_fence->setFlag(IsWriteBarrier);
127 }
128
129 if (RL && AQ) {
130 microops = {rel_fence, lrsc, acq_fence};
131 } else if (RL) {
132 microops = {rel_fence, lrsc};
133 } else if (AQ) {
134 microops = {lrsc, acq_fence};
135 } else {
136 microops = {lrsc};
137 }
138 }
139 }};
140
141 def template LRSCMicroConstructor {{
142 %(class_name)s::%(class_name)sMicro::%(class_name)sMicro(
143 ExtMachInst machInst, %(class_name)s *_p)
144 : %(base_class)sMicro("%(mnemonic)s", machInst, %(op_class)s)
145 {
146 %(constructor)s;
147 }
148 }};
149
150 def template AtomicMemOpMacroConstructor {{
151 %(class_name)s::%(class_name)s(ExtMachInst machInst)
152 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
153 {
154 %(constructor)s;
155
156 StaticInstPtr rel_fence;
157 StaticInstPtr rmw_op;
158 StaticInstPtr acq_fence;
159
160 // set up release fence
161 if (RL) {
162 rel_fence = new MemFenceMicro(machInst, No_OpClass);
163 rel_fence->setFlag(IsFirstMicroop);
164 rel_fence->setFlag(IsReadBarrier);
165 rel_fence->setFlag(IsWriteBarrier);
166 rel_fence->setFlag(IsDelayedCommit);
167 }
168
169 // set up atomic rmw op
170 rmw_op = new %(class_name)sRMW(machInst, this);
171
172 if (!RL) {
173 rmw_op->setFlag(IsFirstMicroop);
174 }
175
176 if (!AQ) {
177 rmw_op->setFlag(IsLastMicroop);
178 } else {
179 rmw_op->setFlag(IsDelayedCommit);
180 }
181
182 // set up acquire fence
183 if (AQ) {
184 acq_fence = new MemFenceMicro(machInst, No_OpClass);
185 acq_fence->setFlag(IsLastMicroop);
186 acq_fence->setFlag(IsReadBarrier);
187 acq_fence->setFlag(IsWriteBarrier);
188 }
189
190 if (RL && AQ) {
191 microops = {rel_fence, rmw_op, acq_fence};
192 } else if (RL) {
193 microops = {rel_fence, rmw_op};
194 } else if (AQ) {
195 microops = {rmw_op, acq_fence};
196 } else {
197 microops = {rmw_op};
198 }
199 }
200 }};
201
202 def template AtomicMemOpRMWConstructor {{
203 %(class_name)s::%(class_name)sRMW::%(class_name)sRMW(
204 ExtMachInst machInst, %(class_name)s *_p)
205 : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s)
206 {
207 %(constructor)s;
208
209 // overwrite default flags
210 flags[IsLoad] = false;
211 flags[IsStore] = false;
212 flags[IsAtomic] = true;
213 }
214 }};
215
216 // execute() templates
217
218 def template LoadReservedExecute {{
219 Fault
220 %(class_name)s::%(class_name)sMicro::execute(
221 ExecContext *xc, Trace::InstRecord *traceData) const
222 {
223 Addr EA;
224 Fault fault = NoFault;
225
226 %(op_decl)s;
227 %(op_rd)s;
228 %(ea_code)s;
229
230 if (fault == NoFault) {
231 fault = readMemAtomicLE(xc, traceData, EA, Mem, memAccessFlags);
232 %(memacc_code)s;
233 }
234
235 if (fault == NoFault) {
236 %(op_wb)s;
237 }
238
239 return fault;
240 }
241 }};
242
243 def template StoreCondExecute {{
244 Fault %(class_name)s::%(class_name)sMicro::execute(ExecContext *xc,
245 Trace::InstRecord *traceData) const
246 {
247 Addr EA;
248 Fault fault = NoFault;
249 uint64_t result;
250
251 %(op_decl)s;
252 %(op_rd)s;
253 %(ea_code)s;
254
255 if (fault == NoFault) {
256 %(memacc_code)s;
257 }
258
259 if (fault == NoFault) {
260 fault = writeMemAtomicLE(xc, traceData, Mem, EA, memAccessFlags,
261 &result);
262 // RISC-V has the opposite convention gem5 has for success flags,
263 // so we invert the result here.
264 result = !result;
265 }
266
267 if (fault == NoFault) {
268 %(postacc_code)s;
269 }
270
271 if (fault == NoFault) {
272 %(op_wb)s;
273 }
274
275 return fault;
276 }
277 }};
278
279 def template AtomicMemOpRMWExecute {{
280 Fault %(class_name)s::%(class_name)sRMW::execute(ExecContext *xc,
281 Trace::InstRecord *traceData) const
282 {
283 Addr EA;
284 Fault fault = NoFault;
285
286 %(op_decl)s;
287 %(op_rd)s;
288 %(ea_code)s;
289 %(amoop_code)s;
290
291 assert(amo_op);
292
293 if (fault == NoFault) {
294 fault = amoMemAtomicLE(xc, traceData, Mem, EA, memAccessFlags,
295 amo_op);
296 %(memacc_code)s;
297 }
298
299 if (fault == NoFault) {
300 %(postacc_code)s;
301 }
302
303 if (fault == NoFault) {
304 %(op_wb)s;
305 }
306
307 return fault;
308 }
309 }};
310
311 // initiateAcc() templates
312
313 def template LoadReservedInitiateAcc {{
314 Fault
315 %(class_name)s::%(class_name)sMicro::initiateAcc(ExecContext *xc,
316 Trace::InstRecord *traceData) const
317 {
318 Addr EA;
319 Fault fault = NoFault;
320
321 %(op_src_decl)s;
322 %(op_rd)s;
323 %(ea_code)s;
324
325 if (fault == NoFault) {
326 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
327 }
328
329 return fault;
330 }
331 }};
332
333 def template StoreCondInitiateAcc {{
334 Fault
335 %(class_name)s::%(class_name)sMicro::initiateAcc(ExecContext *xc,
336 Trace::InstRecord *traceData) const
337 {
338 Addr EA;
339 Fault fault = NoFault;
340
341 %(op_decl)s;
342 %(op_rd)s;
343 %(ea_code)s;
344
345 if (fault == NoFault) {
346 %(memacc_code)s;
347 }
348
349 if (fault == NoFault) {
350 fault = writeMemTimingLE(xc, traceData, Mem, EA,
351 memAccessFlags, nullptr);
352 }
353
354 if (fault == NoFault) {
355 %(op_wb)s;
356 }
357
358 return fault;
359 }
360 }};
361
362 def template AtomicMemOpRMWInitiateAcc {{
363 Fault
364 %(class_name)s::%(class_name)sRMW::initiateAcc(ExecContext *xc,
365 Trace::InstRecord *traceData) const
366 {
367 Addr EA;
368 Fault fault = NoFault;
369
370 %(op_src_decl)s;
371 %(op_rd)s;
372 %(ea_code)s;
373 %(amoop_code)s;
374
375 assert(amo_op);
376
377 if (fault == NoFault) {
378 fault = initiateMemAMO(xc, traceData, EA, Mem, memAccessFlags,
379 amo_op);
380 }
381
382 return fault;
383 }
384 }};
385
386 // completeAcc() templates
387
388 def template LoadReservedCompleteAcc {{
389 Fault
390 %(class_name)s::%(class_name)sMicro::completeAcc(PacketPtr pkt,
391 ExecContext *xc, Trace::InstRecord *traceData) const
392 {
393 Fault fault = NoFault;
394
395 %(op_decl)s;
396 %(op_rd)s;
397
398 getMemLE(pkt, Mem, traceData);
399
400 if (fault == NoFault) {
401 %(memacc_code)s;
402 }
403
404 if (fault == NoFault) {
405 %(op_wb)s;
406 }
407
408 return fault;
409 }
410 }};
411
412 def template StoreCondCompleteAcc {{
413 Fault %(class_name)s::%(class_name)sMicro::completeAcc(Packet *pkt,
414 ExecContext *xc, Trace::InstRecord *traceData) const
415 {
416 Fault fault = NoFault;
417
418 %(op_dest_decl)s;
419
420 // RISC-V has the opposite convention gem5 has for success flags,
421 // so we invert the result here.
422 uint64_t result = !pkt->req->getExtraData();
423
424 if (fault == NoFault) {
425 %(postacc_code)s;
426 }
427
428 if (fault == NoFault) {
429 %(op_wb)s;
430 }
431
432 return fault;
433 }
434 }};
435
436 def template AtomicMemOpRMWCompleteAcc {{
437 Fault %(class_name)s::%(class_name)sRMW::completeAcc(Packet *pkt,
438 ExecContext *xc, Trace::InstRecord *traceData) const
439 {
440 Fault fault = NoFault;
441
442 %(op_decl)s;
443 %(op_rd)s;
444
445 getMemLE(pkt, Mem, traceData);
446
447 if (fault == NoFault) {
448 %(memacc_code)s;
449 }
450
451 if (fault == NoFault) {
452 %(op_wb)s;
453 }
454
455 return fault;
456 }
457 }};
458
459 // LR/SC/AMO decode formats
460
461 def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
462 mem_flags=[], inst_flags=[]) {{
463 macro_ea_code = ''
464 macro_inst_flags = []
465 macro_iop = InstObjParams(name, Name, 'LoadReserved', macro_ea_code,
466 macro_inst_flags)
467 header_output = LRSCDeclare.subst(macro_iop)
468 decoder_output = LRSCMacroConstructor.subst(macro_iop)
469 decode_block = BasicDecode.subst(macro_iop)
470
471 exec_output = ''
472
473 mem_flags = makeList(mem_flags)
474 inst_flags = makeList(inst_flags)
475 iop = InstObjParams(name, Name, 'LoadReserved',
476 {'ea_code': ea_code, 'memacc_code': memacc_code,
477 'postacc_code': postacc_code}, inst_flags)
478 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
479 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
480
481 decoder_output += LRSCMicroConstructor.subst(iop)
482 decode_block += BasicDecode.subst(iop)
483 exec_output += LoadReservedExecute.subst(iop) \
484 + LoadReservedInitiateAcc.subst(iop) \
485 + LoadReservedCompleteAcc.subst(iop)
486 }};
487
488 def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
489 mem_flags=[], inst_flags=[]) {{
490 macro_ea_code = ''
491 macro_inst_flags = []
492 macro_iop = InstObjParams(name, Name, 'StoreCond', macro_ea_code,
493 macro_inst_flags)
494 header_output = LRSCDeclare.subst(macro_iop)
495 decoder_output = LRSCMacroConstructor.subst(macro_iop)
496 decode_block = BasicDecode.subst(macro_iop)
497
498 exec_output = ''
499
500 mem_flags = makeList(mem_flags)
501 inst_flags = makeList(inst_flags)
502 iop = InstObjParams(name, Name, 'StoreCond',
503 {'ea_code': ea_code, 'memacc_code': memacc_code,
504 'postacc_code': postacc_code}, inst_flags)
505 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
506 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
507
508 decoder_output += LRSCMicroConstructor.subst(iop)
509 decode_block += BasicDecode.subst(iop)
510 exec_output += StoreCondExecute.subst(iop) \
511 + StoreCondInitiateAcc.subst(iop) \
512 + StoreCondCompleteAcc.subst(iop)
513 }};
514
515 def format AtomicMemOp(memacc_code, amoop_code, postacc_code={{ }},
516 ea_code={{EA = Rs1;}}, mem_flags=[], inst_flags=[]) {{
517 macro_ea_code = ''
518 macro_inst_flags = []
519 macro_iop = InstObjParams(name, Name, 'AtomicMemOp', macro_ea_code,
520 macro_inst_flags)
521 header_output = AtomicMemOpDeclare.subst(macro_iop)
522 decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop)
523 decode_block = BasicDecode.subst(macro_iop)
524
525 exec_output = ''
526
527 rmw_mem_flags = makeList(mem_flags)
528 rmw_inst_flags = makeList(inst_flags)
529 rmw_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
530 {'ea_code': ea_code,
531 'memacc_code': memacc_code,
532 'postacc_code': postacc_code,
533 'amoop_code': amoop_code},
534 rmw_inst_flags)
535
536 rmw_iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
537 '|'.join(['Request::%s' % flag for flag in rmw_mem_flags]) + ';'
538
539 decoder_output += AtomicMemOpRMWConstructor.subst(rmw_iop)
540 decode_block += BasicDecode.subst(rmw_iop)
541 exec_output += AtomicMemOpRMWExecute.subst(rmw_iop) \
542 + AtomicMemOpRMWInitiateAcc.subst(rmw_iop) \
543 + AtomicMemOpRMWCompleteAcc.subst(rmw_iop)
544 }};