arch: Bump MaxVecRegLenInBytes to 4096
[gem5.git] / src / arch / isa_parser.py
1 # Copyright (c) 2014, 2016, 2018-2019 ARM Limited
2 # All rights reserved
3 #
4 # The license below extends only to copyright in the software and shall
5 # not be construed as granting a license to any other intellectual
6 # property including but not limited to intellectual property relating
7 # to a hardware implementation of the functionality of the software
8 # licensed hereunder. You may use the software subject to the license
9 # terms below provided that you ensure that this notice is replicated
10 # unmodified and in its entirety in all distributions of the software,
11 # modified or unmodified, in source code or in binary form.
12 #
13 # Copyright (c) 2003-2005 The Regents of The University of Michigan
14 # Copyright (c) 2013,2015 Advanced Micro Devices, Inc.
15 # All rights reserved.
16 #
17 # Redistribution and use in source and binary forms, with or without
18 # modification, are permitted provided that the following conditions are
19 # met: redistributions of source code must retain the above copyright
20 # notice, this list of conditions and the following disclaimer;
21 # redistributions in binary form must reproduce the above copyright
22 # notice, this list of conditions and the following disclaimer in the
23 # documentation and/or other materials provided with the distribution;
24 # neither the name of the copyright holders nor the names of its
25 # contributors may be used to endorse or promote products derived from
26 # this software without specific prior written permission.
27 #
28 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
31 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
32 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
33 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
34 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
35 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
36 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
38 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39 #
40 # Authors: Steve Reinhardt
41
42 from __future__ import with_statement, print_function
43 import os
44 import sys
45 import re
46 import string
47 import inspect, traceback
48 # get type names
49 from types import *
50
51 from m5.util.grammar import Grammar
52
53 debug=False
54
55 ###################
56 # Utility functions
57
58 #
59 # Indent every line in string 's' by two spaces
60 # (except preprocessor directives).
61 # Used to make nested code blocks look pretty.
62 #
63 def indent(s):
64 return re.sub(r'(?m)^(?!#)', ' ', s)
65
66 #
67 # Munge a somewhat arbitrarily formatted piece of Python code
68 # (e.g. from a format 'let' block) into something whose indentation
69 # will get by the Python parser.
70 #
71 # The two keys here are that Python will give a syntax error if
72 # there's any whitespace at the beginning of the first line, and that
73 # all lines at the same lexical nesting level must have identical
74 # indentation. Unfortunately the way code literals work, an entire
75 # let block tends to have some initial indentation. Rather than
76 # trying to figure out what that is and strip it off, we prepend 'if
77 # 1:' to make the let code the nested block inside the if (and have
78 # the parser automatically deal with the indentation for us).
79 #
80 # We don't want to do this if (1) the code block is empty or (2) the
81 # first line of the block doesn't have any whitespace at the front.
82
83 def fixPythonIndentation(s):
84 # get rid of blank lines first
85 s = re.sub(r'(?m)^\s*\n', '', s);
86 if (s != '' and re.match(r'[ \t]', s[0])):
87 s = 'if 1:\n' + s
88 return s
89
90 class ISAParserError(Exception):
91 """Exception class for parser errors"""
92 def __init__(self, first, second=None):
93 if second is None:
94 self.lineno = 0
95 self.string = first
96 else:
97 self.lineno = first
98 self.string = second
99
100 def __str__(self):
101 return self.string
102
103 def error(*args):
104 raise ISAParserError(*args)
105
106 ####################
107 # Template objects.
108 #
109 # Template objects are format strings that allow substitution from
110 # the attribute spaces of other objects (e.g. InstObjParams instances).
111
112 labelRE = re.compile(r'(?<!%)%\(([^\)]+)\)[sd]')
113
114 class Template(object):
115 def __init__(self, parser, t):
116 self.parser = parser
117 self.template = t
118
119 def subst(self, d):
120 myDict = None
121
122 # Protect non-Python-dict substitutions (e.g. if there's a printf
123 # in the templated C++ code)
124 template = self.parser.protectNonSubstPercents(self.template)
125
126 # Build a dict ('myDict') to use for the template substitution.
127 # Start with the template namespace. Make a copy since we're
128 # going to modify it.
129 myDict = self.parser.templateMap.copy()
130
131 if isinstance(d, InstObjParams):
132 # If we're dealing with an InstObjParams object, we need
133 # to be a little more sophisticated. The instruction-wide
134 # parameters are already formed, but the parameters which
135 # are only function wide still need to be generated.
136 compositeCode = ''
137
138 myDict.update(d.__dict__)
139 # The "operands" and "snippets" attributes of the InstObjParams
140 # objects are for internal use and not substitution.
141 del myDict['operands']
142 del myDict['snippets']
143
144 snippetLabels = [l for l in labelRE.findall(template)
145 if l in d.snippets]
146
147 snippets = dict([(s, self.parser.mungeSnippet(d.snippets[s]))
148 for s in snippetLabels])
149
150 myDict.update(snippets)
151
152 compositeCode = ' '.join(map(str, snippets.values()))
153
154 # Add in template itself in case it references any
155 # operands explicitly (like Mem)
156 compositeCode += ' ' + template
157
158 operands = SubOperandList(self.parser, compositeCode, d.operands)
159
160 myDict['op_decl'] = operands.concatAttrStrings('op_decl')
161 if operands.readPC or operands.setPC:
162 myDict['op_decl'] += 'TheISA::PCState __parserAutoPCState;\n'
163
164 # In case there are predicated register reads and write, declare
165 # the variables for register indicies. It is being assumed that
166 # all the operands in the OperandList are also in the
167 # SubOperandList and in the same order. Otherwise, it is
168 # expected that predication would not be used for the operands.
169 if operands.predRead:
170 myDict['op_decl'] += 'uint8_t _sourceIndex = 0;\n'
171 if operands.predWrite:
172 myDict['op_decl'] += 'uint8_t M5_VAR_USED _destIndex = 0;\n'
173
174 is_src = lambda op: op.is_src
175 is_dest = lambda op: op.is_dest
176
177 myDict['op_src_decl'] = \
178 operands.concatSomeAttrStrings(is_src, 'op_src_decl')
179 myDict['op_dest_decl'] = \
180 operands.concatSomeAttrStrings(is_dest, 'op_dest_decl')
181 if operands.readPC:
182 myDict['op_src_decl'] += \
183 'TheISA::PCState __parserAutoPCState;\n'
184 if operands.setPC:
185 myDict['op_dest_decl'] += \
186 'TheISA::PCState __parserAutoPCState;\n'
187
188 myDict['op_rd'] = operands.concatAttrStrings('op_rd')
189 if operands.readPC:
190 myDict['op_rd'] = '__parserAutoPCState = xc->pcState();\n' + \
191 myDict['op_rd']
192
193 # Compose the op_wb string. If we're going to write back the
194 # PC state because we changed some of its elements, we'll need to
195 # do that as early as possible. That allows later uncoordinated
196 # modifications to the PC to layer appropriately.
197 reordered = list(operands.items)
198 reordered.reverse()
199 op_wb_str = ''
200 pcWbStr = 'xc->pcState(__parserAutoPCState);\n'
201 for op_desc in reordered:
202 if op_desc.isPCPart() and op_desc.is_dest:
203 op_wb_str = op_desc.op_wb + pcWbStr + op_wb_str
204 pcWbStr = ''
205 else:
206 op_wb_str = op_desc.op_wb + op_wb_str
207 myDict['op_wb'] = op_wb_str
208
209 elif isinstance(d, dict):
210 # if the argument is a dictionary, we just use it.
211 myDict.update(d)
212 elif hasattr(d, '__dict__'):
213 # if the argument is an object, we use its attribute map.
214 myDict.update(d.__dict__)
215 else:
216 raise TypeError, "Template.subst() arg must be or have dictionary"
217 return template % myDict
218
219 # Convert to string.
220 def __str__(self):
221 return self.template
222
223 ################
224 # Format object.
225 #
226 # A format object encapsulates an instruction format. It must provide
227 # a defineInst() method that generates the code for an instruction
228 # definition.
229
230 class Format(object):
231 def __init__(self, id, params, code):
232 self.id = id
233 self.params = params
234 label = 'def format ' + id
235 self.user_code = compile(fixPythonIndentation(code), label, 'exec')
236 param_list = string.join(params, ", ")
237 f = '''def defInst(_code, _context, %s):
238 my_locals = vars().copy()
239 exec _code in _context, my_locals
240 return my_locals\n''' % param_list
241 c = compile(f, label + ' wrapper', 'exec')
242 exec c
243 self.func = defInst
244
245 def defineInst(self, parser, name, args, lineno):
246 parser.updateExportContext()
247 context = parser.exportContext.copy()
248 if len(name):
249 Name = name[0].upper()
250 if len(name) > 1:
251 Name += name[1:]
252 context.update({ 'name' : name, 'Name' : Name })
253 try:
254 vars = self.func(self.user_code, context, *args[0], **args[1])
255 except Exception, exc:
256 if debug:
257 raise
258 error(lineno, 'error defining "%s": %s.' % (name, exc))
259 for k in vars.keys():
260 if k not in ('header_output', 'decoder_output',
261 'exec_output', 'decode_block'):
262 del vars[k]
263 return GenCode(parser, **vars)
264
265 # Special null format to catch an implicit-format instruction
266 # definition outside of any format block.
267 class NoFormat(object):
268 def __init__(self):
269 self.defaultInst = ''
270
271 def defineInst(self, parser, name, args, lineno):
272 error(lineno,
273 'instruction definition "%s" with no active format!' % name)
274
275 ###############
276 # GenCode class
277 #
278 # The GenCode class encapsulates generated code destined for various
279 # output files. The header_output and decoder_output attributes are
280 # strings containing code destined for decoder.hh and decoder.cc
281 # respectively. The decode_block attribute contains code to be
282 # incorporated in the decode function itself (that will also end up in
283 # decoder.cc). The exec_output attribute is the string of code for the
284 # exec.cc file. The has_decode_default attribute is used in the decode block
285 # to allow explicit default clauses to override default default clauses.
286
287 class GenCode(object):
288 # Constructor.
289 def __init__(self, parser,
290 header_output = '', decoder_output = '', exec_output = '',
291 decode_block = '', has_decode_default = False):
292 self.parser = parser
293 self.header_output = header_output
294 self.decoder_output = decoder_output
295 self.exec_output = exec_output
296 self.decode_block = decode_block
297 self.has_decode_default = has_decode_default
298
299 # Write these code chunks out to the filesystem. They will be properly
300 # interwoven by the write_top_level_files().
301 def emit(self):
302 if self.header_output:
303 self.parser.get_file('header').write(self.header_output)
304 if self.decoder_output:
305 self.parser.get_file('decoder').write(self.decoder_output)
306 if self.exec_output:
307 self.parser.get_file('exec').write(self.exec_output)
308 if self.decode_block:
309 self.parser.get_file('decode_block').write(self.decode_block)
310
311 # Override '+' operator: generate a new GenCode object that
312 # concatenates all the individual strings in the operands.
313 def __add__(self, other):
314 return GenCode(self.parser,
315 self.header_output + other.header_output,
316 self.decoder_output + other.decoder_output,
317 self.exec_output + other.exec_output,
318 self.decode_block + other.decode_block,
319 self.has_decode_default or other.has_decode_default)
320
321 # Prepend a string (typically a comment) to all the strings.
322 def prepend_all(self, pre):
323 self.header_output = pre + self.header_output
324 self.decoder_output = pre + self.decoder_output
325 self.decode_block = pre + self.decode_block
326 self.exec_output = pre + self.exec_output
327
328 # Wrap the decode block in a pair of strings (e.g., 'case foo:'
329 # and 'break;'). Used to build the big nested switch statement.
330 def wrap_decode_block(self, pre, post = ''):
331 self.decode_block = pre + indent(self.decode_block) + post
332
333 #####################################################################
334 #
335 # Bitfield Operator Support
336 #
337 #####################################################################
338
339 bitOp1ArgRE = re.compile(r'<\s*(\w+)\s*:\s*>')
340
341 bitOpWordRE = re.compile(r'(?<![\w\.])([\w\.]+)<\s*(\w+)\s*:\s*(\w+)\s*>')
342 bitOpExprRE = re.compile(r'\)<\s*(\w+)\s*:\s*(\w+)\s*>')
343
344 def substBitOps(code):
345 # first convert single-bit selectors to two-index form
346 # i.e., <n> --> <n:n>
347 code = bitOp1ArgRE.sub(r'<\1:\1>', code)
348 # simple case: selector applied to ID (name)
349 # i.e., foo<a:b> --> bits(foo, a, b)
350 code = bitOpWordRE.sub(r'bits(\1, \2, \3)', code)
351 # if selector is applied to expression (ending in ')'),
352 # we need to search backward for matching '('
353 match = bitOpExprRE.search(code)
354 while match:
355 exprEnd = match.start()
356 here = exprEnd - 1
357 nestLevel = 1
358 while nestLevel > 0:
359 if code[here] == '(':
360 nestLevel -= 1
361 elif code[here] == ')':
362 nestLevel += 1
363 here -= 1
364 if here < 0:
365 sys.exit("Didn't find '('!")
366 exprStart = here+1
367 newExpr = r'bits(%s, %s, %s)' % (code[exprStart:exprEnd+1],
368 match.group(1), match.group(2))
369 code = code[:exprStart] + newExpr + code[match.end():]
370 match = bitOpExprRE.search(code)
371 return code
372
373
374 #####################################################################
375 #
376 # Code Parser
377 #
378 # The remaining code is the support for automatically extracting
379 # instruction characteristics from pseudocode.
380 #
381 #####################################################################
382
383 # Force the argument to be a list. Useful for flags, where a caller
384 # can specify a singleton flag or a list of flags. Also usful for
385 # converting tuples to lists so they can be modified.
386 def makeList(arg):
387 if isinstance(arg, list):
388 return arg
389 elif isinstance(arg, tuple):
390 return list(arg)
391 elif not arg:
392 return []
393 else:
394 return [ arg ]
395
396 class Operand(object):
397 '''Base class for operand descriptors. An instance of this class
398 (or actually a class derived from this one) represents a specific
399 operand for a code block (e.g, "Rc.sq" as a dest). Intermediate
400 derived classes encapsulates the traits of a particular operand
401 type (e.g., "32-bit integer register").'''
402
403 def buildReadCode(self, func = None):
404 subst_dict = {"name": self.base_name,
405 "func": func,
406 "reg_idx": self.reg_spec,
407 "ctype": self.ctype}
408 if hasattr(self, 'src_reg_idx'):
409 subst_dict['op_idx'] = self.src_reg_idx
410 code = self.read_code % subst_dict
411 return '%s = %s;\n' % (self.base_name, code)
412
413 def buildWriteCode(self, func = None):
414 subst_dict = {"name": self.base_name,
415 "func": func,
416 "reg_idx": self.reg_spec,
417 "ctype": self.ctype,
418 "final_val": self.base_name}
419 if hasattr(self, 'dest_reg_idx'):
420 subst_dict['op_idx'] = self.dest_reg_idx
421 code = self.write_code % subst_dict
422 return '''
423 {
424 %s final_val = %s;
425 %s;
426 if (traceData) { traceData->setData(final_val); }
427 }''' % (self.dflt_ctype, self.base_name, code)
428
429 def __init__(self, parser, full_name, ext, is_src, is_dest):
430 self.full_name = full_name
431 self.ext = ext
432 self.is_src = is_src
433 self.is_dest = is_dest
434 # The 'effective extension' (eff_ext) is either the actual
435 # extension, if one was explicitly provided, or the default.
436 if ext:
437 self.eff_ext = ext
438 elif hasattr(self, 'dflt_ext'):
439 self.eff_ext = self.dflt_ext
440
441 if hasattr(self, 'eff_ext'):
442 self.ctype = parser.operandTypeMap[self.eff_ext]
443
444 # Finalize additional fields (primarily code fields). This step
445 # is done separately since some of these fields may depend on the
446 # register index enumeration that hasn't been performed yet at the
447 # time of __init__(). The register index enumeration is affected
448 # by predicated register reads/writes. Hence, we forward the flags
449 # that indicate whether or not predication is in use.
450 def finalize(self, predRead, predWrite):
451 self.flags = self.getFlags()
452 self.constructor = self.makeConstructor(predRead, predWrite)
453 self.op_decl = self.makeDecl()
454
455 if self.is_src:
456 self.op_rd = self.makeRead(predRead)
457 self.op_src_decl = self.makeDecl()
458 else:
459 self.op_rd = ''
460 self.op_src_decl = ''
461
462 if self.is_dest:
463 self.op_wb = self.makeWrite(predWrite)
464 self.op_dest_decl = self.makeDecl()
465 else:
466 self.op_wb = ''
467 self.op_dest_decl = ''
468
469 def isMem(self):
470 return 0
471
472 def isReg(self):
473 return 0
474
475 def isFloatReg(self):
476 return 0
477
478 def isIntReg(self):
479 return 0
480
481 def isCCReg(self):
482 return 0
483
484 def isControlReg(self):
485 return 0
486
487 def isVecReg(self):
488 return 0
489
490 def isVecElem(self):
491 return 0
492
493 def isVecPredReg(self):
494 return 0
495
496 def isPCState(self):
497 return 0
498
499 def isPCPart(self):
500 return self.isPCState() and self.reg_spec
501
502 def hasReadPred(self):
503 return self.read_predicate != None
504
505 def hasWritePred(self):
506 return self.write_predicate != None
507
508 def getFlags(self):
509 # note the empty slice '[:]' gives us a copy of self.flags[0]
510 # instead of a reference to it
511 my_flags = self.flags[0][:]
512 if self.is_src:
513 my_flags += self.flags[1]
514 if self.is_dest:
515 my_flags += self.flags[2]
516 return my_flags
517
518 def makeDecl(self):
519 # Note that initializations in the declarations are solely
520 # to avoid 'uninitialized variable' errors from the compiler.
521 return self.ctype + ' ' + self.base_name + ' = 0;\n';
522
523
524 src_reg_constructor = '\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s);'
525 dst_reg_constructor = '\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s);'
526
527
528 class IntRegOperand(Operand):
529 reg_class = 'IntRegClass'
530
531 def isReg(self):
532 return 1
533
534 def isIntReg(self):
535 return 1
536
537 def makeConstructor(self, predRead, predWrite):
538 c_src = ''
539 c_dest = ''
540
541 if self.is_src:
542 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
543 if self.hasReadPred():
544 c_src = '\n\tif (%s) {%s\n\t}' % \
545 (self.read_predicate, c_src)
546
547 if self.is_dest:
548 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
549 c_dest += '\n\t_numIntDestRegs++;'
550 if self.hasWritePred():
551 c_dest = '\n\tif (%s) {%s\n\t}' % \
552 (self.write_predicate, c_dest)
553
554 return c_src + c_dest
555
556 def makeRead(self, predRead):
557 if (self.ctype == 'float' or self.ctype == 'double'):
558 error('Attempt to read integer register as FP')
559 if self.read_code != None:
560 return self.buildReadCode('readIntRegOperand')
561
562 int_reg_val = ''
563 if predRead:
564 int_reg_val = 'xc->readIntRegOperand(this, _sourceIndex++)'
565 if self.hasReadPred():
566 int_reg_val = '(%s) ? %s : 0' % \
567 (self.read_predicate, int_reg_val)
568 else:
569 int_reg_val = 'xc->readIntRegOperand(this, %d)' % self.src_reg_idx
570
571 return '%s = %s;\n' % (self.base_name, int_reg_val)
572
573 def makeWrite(self, predWrite):
574 if (self.ctype == 'float' or self.ctype == 'double'):
575 error('Attempt to write integer register as FP')
576 if self.write_code != None:
577 return self.buildWriteCode('setIntRegOperand')
578
579 if predWrite:
580 wp = 'true'
581 if self.hasWritePred():
582 wp = self.write_predicate
583
584 wcond = 'if (%s)' % (wp)
585 windex = '_destIndex++'
586 else:
587 wcond = ''
588 windex = '%d' % self.dest_reg_idx
589
590 wb = '''
591 %s
592 {
593 %s final_val = %s;
594 xc->setIntRegOperand(this, %s, final_val);\n
595 if (traceData) { traceData->setData(final_val); }
596 }''' % (wcond, self.ctype, self.base_name, windex)
597
598 return wb
599
600 class FloatRegOperand(Operand):
601 reg_class = 'FloatRegClass'
602
603 def isReg(self):
604 return 1
605
606 def isFloatReg(self):
607 return 1
608
609 def makeConstructor(self, predRead, predWrite):
610 c_src = ''
611 c_dest = ''
612
613 if self.is_src:
614 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
615
616 if self.is_dest:
617 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
618 c_dest += '\n\t_numFPDestRegs++;'
619
620 return c_src + c_dest
621
622 def makeRead(self, predRead):
623 if self.read_code != None:
624 return self.buildReadCode('readFloatRegOperandBits')
625
626 if predRead:
627 rindex = '_sourceIndex++'
628 else:
629 rindex = '%d' % self.src_reg_idx
630
631 code = 'xc->readFloatRegOperandBits(this, %s)' % rindex
632 if self.ctype == 'float':
633 code = 'bitsToFloat32(%s)' % code
634 elif self.ctype == 'double':
635 code = 'bitsToFloat64(%s)' % code
636 return '%s = %s;\n' % (self.base_name, code)
637
638 def makeWrite(self, predWrite):
639 if self.write_code != None:
640 return self.buildWriteCode('setFloatRegOperandBits')
641
642 if predWrite:
643 wp = '_destIndex++'
644 else:
645 wp = '%d' % self.dest_reg_idx
646
647 val = 'final_val'
648 if self.ctype == 'float':
649 val = 'floatToBits32(%s)' % val
650 elif self.ctype == 'double':
651 val = 'floatToBits64(%s)' % val
652
653 wp = 'xc->setFloatRegOperandBits(this, %s, %s);' % (wp, val)
654
655 wb = '''
656 {
657 %s final_val = %s;
658 %s\n
659 if (traceData) { traceData->setData(final_val); }
660 }''' % (self.ctype, self.base_name, wp)
661 return wb
662
663 class VecRegOperand(Operand):
664 reg_class = 'VecRegClass'
665
666 def __init__(self, parser, full_name, ext, is_src, is_dest):
667 Operand.__init__(self, parser, full_name, ext, is_src, is_dest)
668 self.elemExt = None
669 self.parser = parser
670
671 def isReg(self):
672 return 1
673
674 def isVecReg(self):
675 return 1
676
677 def makeDeclElem(self, elem_op):
678 (elem_name, elem_ext) = elem_op
679 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
680 if elem_ext:
681 ext = elem_ext
682 else:
683 ext = dflt_elem_ext
684 ctype = self.parser.operandTypeMap[ext]
685 return '\n\t%s %s = 0;' % (ctype, elem_name)
686
687 def makeDecl(self):
688 if not self.is_dest and self.is_src:
689 c_decl = '\t/* Vars for %s*/' % (self.base_name)
690 if hasattr(self, 'active_elems'):
691 if self.active_elems:
692 for elem in self.active_elems:
693 c_decl += self.makeDeclElem(elem)
694 return c_decl + '\t/* End vars for %s */\n' % (self.base_name)
695 else:
696 return ''
697
698 def makeConstructor(self, predRead, predWrite):
699 c_src = ''
700 c_dest = ''
701
702 numAccessNeeded = 1
703
704 if self.is_src:
705 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
706
707 if self.is_dest:
708 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
709 c_dest += '\n\t_numVecDestRegs++;'
710
711 return c_src + c_dest
712
713 # Read destination register to write
714 def makeReadWElem(self, elem_op):
715 (elem_name, elem_ext) = elem_op
716 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
717 if elem_ext:
718 ext = elem_ext
719 else:
720 ext = dflt_elem_ext
721 ctype = self.parser.operandTypeMap[ext]
722 c_read = '\t\t%s& %s = %s[%s];\n' % \
723 (ctype, elem_name, self.base_name, elem_spec)
724 return c_read
725
726 def makeReadW(self, predWrite):
727 func = 'getWritableVecRegOperand'
728 if self.read_code != None:
729 return self.buildReadCode(func)
730
731 if predWrite:
732 rindex = '_destIndex++'
733 else:
734 rindex = '%d' % self.dest_reg_idx
735
736 c_readw = '\t\t%s& tmp_d%s = xc->%s(this, %s);\n'\
737 % ('TheISA::VecRegContainer', rindex, func, rindex)
738 if self.elemExt:
739 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name,
740 rindex, self.parser.operandTypeMap[self.elemExt])
741 if self.ext:
742 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name,
743 rindex, self.parser.operandTypeMap[self.ext])
744 if hasattr(self, 'active_elems'):
745 if self.active_elems:
746 for elem in self.active_elems:
747 c_readw += self.makeReadWElem(elem)
748 return c_readw
749
750 # Normal source operand read
751 def makeReadElem(self, elem_op, name):
752 (elem_name, elem_ext) = elem_op
753 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
754
755 if elem_ext:
756 ext = elem_ext
757 else:
758 ext = dflt_elem_ext
759 ctype = self.parser.operandTypeMap[ext]
760 c_read = '\t\t%s = %s[%s];\n' % \
761 (elem_name, name, elem_spec)
762 return c_read
763
764 def makeRead(self, predRead):
765 func = 'readVecRegOperand'
766 if self.read_code != None:
767 return self.buildReadCode(func)
768
769 if predRead:
770 rindex = '_sourceIndex++'
771 else:
772 rindex = '%d' % self.src_reg_idx
773
774 name = self.base_name
775 if self.is_dest and self.is_src:
776 name += '_merger'
777
778 c_read = '\t\t%s& tmp_s%s = xc->%s(this, %s);\n' \
779 % ('const TheISA::VecRegContainer', rindex, func, rindex)
780 # If the parser has detected that elements are being access, create
781 # the appropriate view
782 if self.elemExt:
783 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \
784 (name, rindex, self.parser.operandTypeMap[self.elemExt])
785 if self.ext:
786 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \
787 (name, rindex, self.parser.operandTypeMap[self.ext])
788 if hasattr(self, 'active_elems'):
789 if self.active_elems:
790 for elem in self.active_elems:
791 c_read += self.makeReadElem(elem, name)
792 return c_read
793
794 def makeWrite(self, predWrite):
795 func = 'setVecRegOperand'
796 if self.write_code != None:
797 return self.buildWriteCode(func)
798
799 wb = '''
800 if (traceData) {
801 traceData->setData(tmp_d%d);
802 }
803 ''' % self.dest_reg_idx
804 return wb
805
806 def finalize(self, predRead, predWrite):
807 super(VecRegOperand, self).finalize(predRead, predWrite)
808 if self.is_dest:
809 self.op_rd = self.makeReadW(predWrite) + self.op_rd
810
811 class VecElemOperand(Operand):
812 reg_class = 'VecElemClass'
813
814 def isReg(self):
815 return 1
816
817 def isVecElem(self):
818 return 1
819
820 def makeDecl(self):
821 if self.is_dest and not self.is_src:
822 return '\n\t%s %s;' % (self.ctype, self.base_name)
823 else:
824 return ''
825
826 def makeConstructor(self, predRead, predWrite):
827 c_src = ''
828 c_dest = ''
829
830 numAccessNeeded = 1
831
832 if self.is_src:
833 c_src = ('\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s, %s);' %
834 (self.reg_class, self.reg_spec, self.elem_spec))
835
836 if self.is_dest:
837 c_dest = ('\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s, %s);' %
838 (self.reg_class, self.reg_spec, self.elem_spec))
839 c_dest += '\n\t_numVecElemDestRegs++;'
840 return c_src + c_dest
841
842 def makeRead(self, predRead):
843 c_read = 'xc->readVecElemOperand(this, %d)' % self.src_reg_idx
844
845 if self.ctype == 'float':
846 c_read = 'bitsToFloat32(%s)' % c_read
847 elif self.ctype == 'double':
848 c_read = 'bitsToFloat64(%s)' % c_read
849
850 return '\n\t%s %s = %s;\n' % (self.ctype, self.base_name, c_read)
851
852 def makeWrite(self, predWrite):
853 if self.ctype == 'float':
854 c_write = 'floatToBits32(%s)' % self.base_name
855 elif self.ctype == 'double':
856 c_write = 'floatToBits64(%s)' % self.base_name
857 else:
858 c_write = self.base_name
859
860 c_write = ('\n\txc->setVecElemOperand(this, %d, %s);' %
861 (self.dest_reg_idx, c_write))
862
863 return c_write
864
865 class VecPredRegOperand(Operand):
866 reg_class = 'VecPredRegClass'
867
868 def __init__(self, parser, full_name, ext, is_src, is_dest):
869 Operand.__init__(self, parser, full_name, ext, is_src, is_dest)
870 self.parser = parser
871
872 def isReg(self):
873 return 1
874
875 def isVecPredReg(self):
876 return 1
877
878 def makeDecl(self):
879 return ''
880
881 def makeConstructor(self, predRead, predWrite):
882 c_src = ''
883 c_dest = ''
884
885 if self.is_src:
886 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
887
888 if self.is_dest:
889 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
890 c_dest += '\n\t_numVecPredDestRegs++;'
891
892 return c_src + c_dest
893
894 def makeRead(self, predRead):
895 func = 'readVecPredRegOperand'
896 if self.read_code != None:
897 return self.buildReadCode(func)
898
899 if predRead:
900 rindex = '_sourceIndex++'
901 else:
902 rindex = '%d' % self.src_reg_idx
903
904 c_read = '\t\t%s& tmp_s%s = xc->%s(this, %s);\n' % (
905 'const TheISA::VecPredRegContainer', rindex, func, rindex)
906 if self.ext:
907 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % (
908 self.base_name, rindex,
909 self.parser.operandTypeMap[self.ext])
910 return c_read
911
912 def makeReadW(self, predWrite):
913 func = 'getWritableVecPredRegOperand'
914 if self.read_code != None:
915 return self.buildReadCode(func)
916
917 if predWrite:
918 rindex = '_destIndex++'
919 else:
920 rindex = '%d' % self.dest_reg_idx
921
922 c_readw = '\t\t%s& tmp_d%s = xc->%s(this, %s);\n' % (
923 'TheISA::VecPredRegContainer', rindex, func, rindex)
924 if self.ext:
925 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (
926 self.base_name, rindex,
927 self.parser.operandTypeMap[self.ext])
928 return c_readw
929
930 def makeWrite(self, predWrite):
931 func = 'setVecPredRegOperand'
932 if self.write_code != None:
933 return self.buildWriteCode(func)
934
935 wb = '''
936 if (traceData) {
937 traceData->setData(tmp_d%d);
938 }
939 ''' % self.dest_reg_idx
940 return wb
941
942 def finalize(self, predRead, predWrite):
943 super(VecPredRegOperand, self).finalize(predRead, predWrite)
944 if self.is_dest:
945 self.op_rd = self.makeReadW(predWrite) + self.op_rd
946
947 class CCRegOperand(Operand):
948 reg_class = 'CCRegClass'
949
950 def isReg(self):
951 return 1
952
953 def isCCReg(self):
954 return 1
955
956 def makeConstructor(self, predRead, predWrite):
957 c_src = ''
958 c_dest = ''
959
960 if self.is_src:
961 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
962 if self.hasReadPred():
963 c_src = '\n\tif (%s) {%s\n\t}' % \
964 (self.read_predicate, c_src)
965
966 if self.is_dest:
967 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
968 c_dest += '\n\t_numCCDestRegs++;'
969 if self.hasWritePred():
970 c_dest = '\n\tif (%s) {%s\n\t}' % \
971 (self.write_predicate, c_dest)
972
973 return c_src + c_dest
974
975 def makeRead(self, predRead):
976 if (self.ctype == 'float' or self.ctype == 'double'):
977 error('Attempt to read condition-code register as FP')
978 if self.read_code != None:
979 return self.buildReadCode('readCCRegOperand')
980
981 int_reg_val = ''
982 if predRead:
983 int_reg_val = 'xc->readCCRegOperand(this, _sourceIndex++)'
984 if self.hasReadPred():
985 int_reg_val = '(%s) ? %s : 0' % \
986 (self.read_predicate, int_reg_val)
987 else:
988 int_reg_val = 'xc->readCCRegOperand(this, %d)' % self.src_reg_idx
989
990 return '%s = %s;\n' % (self.base_name, int_reg_val)
991
992 def makeWrite(self, predWrite):
993 if (self.ctype == 'float' or self.ctype == 'double'):
994 error('Attempt to write condition-code register as FP')
995 if self.write_code != None:
996 return self.buildWriteCode('setCCRegOperand')
997
998 if predWrite:
999 wp = 'true'
1000 if self.hasWritePred():
1001 wp = self.write_predicate
1002
1003 wcond = 'if (%s)' % (wp)
1004 windex = '_destIndex++'
1005 else:
1006 wcond = ''
1007 windex = '%d' % self.dest_reg_idx
1008
1009 wb = '''
1010 %s
1011 {
1012 %s final_val = %s;
1013 xc->setCCRegOperand(this, %s, final_val);\n
1014 if (traceData) { traceData->setData(final_val); }
1015 }''' % (wcond, self.ctype, self.base_name, windex)
1016
1017 return wb
1018
1019 class ControlRegOperand(Operand):
1020 reg_class = 'MiscRegClass'
1021
1022 def isReg(self):
1023 return 1
1024
1025 def isControlReg(self):
1026 return 1
1027
1028 def makeConstructor(self, predRead, predWrite):
1029 c_src = ''
1030 c_dest = ''
1031
1032 if self.is_src:
1033 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
1034
1035 if self.is_dest:
1036 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
1037
1038 return c_src + c_dest
1039
1040 def makeRead(self, predRead):
1041 bit_select = 0
1042 if (self.ctype == 'float' or self.ctype == 'double'):
1043 error('Attempt to read control register as FP')
1044 if self.read_code != None:
1045 return self.buildReadCode('readMiscRegOperand')
1046
1047 if predRead:
1048 rindex = '_sourceIndex++'
1049 else:
1050 rindex = '%d' % self.src_reg_idx
1051
1052 return '%s = xc->readMiscRegOperand(this, %s);\n' % \
1053 (self.base_name, rindex)
1054
1055 def makeWrite(self, predWrite):
1056 if (self.ctype == 'float' or self.ctype == 'double'):
1057 error('Attempt to write control register as FP')
1058 if self.write_code != None:
1059 return self.buildWriteCode('setMiscRegOperand')
1060
1061 if predWrite:
1062 windex = '_destIndex++'
1063 else:
1064 windex = '%d' % self.dest_reg_idx
1065
1066 wb = 'xc->setMiscRegOperand(this, %s, %s);\n' % \
1067 (windex, self.base_name)
1068 wb += 'if (traceData) { traceData->setData(%s); }' % \
1069 self.base_name
1070
1071 return wb
1072
1073 class MemOperand(Operand):
1074 def isMem(self):
1075 return 1
1076
1077 def makeConstructor(self, predRead, predWrite):
1078 return ''
1079
1080 def makeDecl(self):
1081 # Declare memory data variable.
1082 return '%s %s;\n' % (self.ctype, self.base_name)
1083
1084 def makeRead(self, predRead):
1085 if self.read_code != None:
1086 return self.buildReadCode()
1087 return ''
1088
1089 def makeWrite(self, predWrite):
1090 if self.write_code != None:
1091 return self.buildWriteCode()
1092 return ''
1093
1094 class PCStateOperand(Operand):
1095 def makeConstructor(self, predRead, predWrite):
1096 return ''
1097
1098 def makeRead(self, predRead):
1099 if self.reg_spec:
1100 # A component of the PC state.
1101 return '%s = __parserAutoPCState.%s();\n' % \
1102 (self.base_name, self.reg_spec)
1103 else:
1104 # The whole PC state itself.
1105 return '%s = xc->pcState();\n' % self.base_name
1106
1107 def makeWrite(self, predWrite):
1108 if self.reg_spec:
1109 # A component of the PC state.
1110 return '__parserAutoPCState.%s(%s);\n' % \
1111 (self.reg_spec, self.base_name)
1112 else:
1113 # The whole PC state itself.
1114 return 'xc->pcState(%s);\n' % self.base_name
1115
1116 def makeDecl(self):
1117 ctype = 'TheISA::PCState'
1118 if self.isPCPart():
1119 ctype = self.ctype
1120 # Note that initializations in the declarations are solely
1121 # to avoid 'uninitialized variable' errors from the compiler.
1122 return '%s %s = 0;\n' % (ctype, self.base_name)
1123
1124 def isPCState(self):
1125 return 1
1126
1127 class OperandList(object):
1128 '''Find all the operands in the given code block. Returns an operand
1129 descriptor list (instance of class OperandList).'''
1130 def __init__(self, parser, code):
1131 self.items = []
1132 self.bases = {}
1133 # delete strings and comments so we don't match on operands inside
1134 for regEx in (stringRE, commentRE):
1135 code = regEx.sub('', code)
1136 # search for operands
1137 next_pos = 0
1138 while 1:
1139 match = parser.operandsRE.search(code, next_pos)
1140 if not match:
1141 # no more matches: we're done
1142 break
1143 op = match.groups()
1144 # regexp groups are operand full name, base, and extension
1145 (op_full, op_base, op_ext) = op
1146 # If is a elem operand, define or update the corresponding
1147 # vector operand
1148 isElem = False
1149 if op_base in parser.elemToVector:
1150 isElem = True
1151 elem_op = (op_base, op_ext)
1152 op_base = parser.elemToVector[op_base]
1153 op_ext = '' # use the default one
1154 # if the token following the operand is an assignment, this is
1155 # a destination (LHS), else it's a source (RHS)
1156 is_dest = (assignRE.match(code, match.end()) != None)
1157 is_src = not is_dest
1158
1159 # see if we've already seen this one
1160 op_desc = self.find_base(op_base)
1161 if op_desc:
1162 if op_ext and op_ext != '' and op_desc.ext != op_ext:
1163 error ('Inconsistent extensions for operand %s: %s - %s' \
1164 % (op_base, op_desc.ext, op_ext))
1165 op_desc.is_src = op_desc.is_src or is_src
1166 op_desc.is_dest = op_desc.is_dest or is_dest
1167 if isElem:
1168 (elem_base, elem_ext) = elem_op
1169 found = False
1170 for ae in op_desc.active_elems:
1171 (ae_base, ae_ext) = ae
1172 if ae_base == elem_base:
1173 if ae_ext != elem_ext:
1174 error('Inconsistent extensions for elem'
1175 ' operand %s' % elem_base)
1176 else:
1177 found = True
1178 if not found:
1179 op_desc.active_elems.append(elem_op)
1180 else:
1181 # new operand: create new descriptor
1182 op_desc = parser.operandNameMap[op_base](parser,
1183 op_full, op_ext, is_src, is_dest)
1184 # if operand is a vector elem, add the corresponding vector
1185 # operand if not already done
1186 if isElem:
1187 op_desc.elemExt = elem_op[1]
1188 op_desc.active_elems = [elem_op]
1189 self.append(op_desc)
1190 # start next search after end of current match
1191 next_pos = match.end()
1192 self.sort()
1193 # enumerate source & dest register operands... used in building
1194 # constructor later
1195 self.numSrcRegs = 0
1196 self.numDestRegs = 0
1197 self.numFPDestRegs = 0
1198 self.numIntDestRegs = 0
1199 self.numVecDestRegs = 0
1200 self.numVecPredDestRegs = 0
1201 self.numCCDestRegs = 0
1202 self.numMiscDestRegs = 0
1203 self.memOperand = None
1204
1205 # Flags to keep track if one or more operands are to be read/written
1206 # conditionally.
1207 self.predRead = False
1208 self.predWrite = False
1209
1210 for op_desc in self.items:
1211 if op_desc.isReg():
1212 if op_desc.is_src:
1213 op_desc.src_reg_idx = self.numSrcRegs
1214 self.numSrcRegs += 1
1215 if op_desc.is_dest:
1216 op_desc.dest_reg_idx = self.numDestRegs
1217 self.numDestRegs += 1
1218 if op_desc.isFloatReg():
1219 self.numFPDestRegs += 1
1220 elif op_desc.isIntReg():
1221 self.numIntDestRegs += 1
1222 elif op_desc.isVecReg():
1223 self.numVecDestRegs += 1
1224 elif op_desc.isVecPredReg():
1225 self.numVecPredDestRegs += 1
1226 elif op_desc.isCCReg():
1227 self.numCCDestRegs += 1
1228 elif op_desc.isControlReg():
1229 self.numMiscDestRegs += 1
1230 elif op_desc.isMem():
1231 if self.memOperand:
1232 error("Code block has more than one memory operand.")
1233 self.memOperand = op_desc
1234
1235 # Check if this operand has read/write predication. If true, then
1236 # the microop will dynamically index source/dest registers.
1237 self.predRead = self.predRead or op_desc.hasReadPred()
1238 self.predWrite = self.predWrite or op_desc.hasWritePred()
1239
1240 if parser.maxInstSrcRegs < self.numSrcRegs:
1241 parser.maxInstSrcRegs = self.numSrcRegs
1242 if parser.maxInstDestRegs < self.numDestRegs:
1243 parser.maxInstDestRegs = self.numDestRegs
1244 if parser.maxMiscDestRegs < self.numMiscDestRegs:
1245 parser.maxMiscDestRegs = self.numMiscDestRegs
1246
1247 # now make a final pass to finalize op_desc fields that may depend
1248 # on the register enumeration
1249 for op_desc in self.items:
1250 op_desc.finalize(self.predRead, self.predWrite)
1251
1252 def __len__(self):
1253 return len(self.items)
1254
1255 def __getitem__(self, index):
1256 return self.items[index]
1257
1258 def append(self, op_desc):
1259 self.items.append(op_desc)
1260 self.bases[op_desc.base_name] = op_desc
1261
1262 def find_base(self, base_name):
1263 # like self.bases[base_name], but returns None if not found
1264 # (rather than raising exception)
1265 return self.bases.get(base_name)
1266
1267 # internal helper function for concat[Some]Attr{Strings|Lists}
1268 def __internalConcatAttrs(self, attr_name, filter, result):
1269 for op_desc in self.items:
1270 if filter(op_desc):
1271 result += getattr(op_desc, attr_name)
1272 return result
1273
1274 # return a single string that is the concatenation of the (string)
1275 # values of the specified attribute for all operands
1276 def concatAttrStrings(self, attr_name):
1277 return self.__internalConcatAttrs(attr_name, lambda x: 1, '')
1278
1279 # like concatAttrStrings, but only include the values for the operands
1280 # for which the provided filter function returns true
1281 def concatSomeAttrStrings(self, filter, attr_name):
1282 return self.__internalConcatAttrs(attr_name, filter, '')
1283
1284 # return a single list that is the concatenation of the (list)
1285 # values of the specified attribute for all operands
1286 def concatAttrLists(self, attr_name):
1287 return self.__internalConcatAttrs(attr_name, lambda x: 1, [])
1288
1289 # like concatAttrLists, but only include the values for the operands
1290 # for which the provided filter function returns true
1291 def concatSomeAttrLists(self, filter, attr_name):
1292 return self.__internalConcatAttrs(attr_name, filter, [])
1293
1294 def sort(self):
1295 self.items.sort(lambda a, b: a.sort_pri - b.sort_pri)
1296
1297 class SubOperandList(OperandList):
1298 '''Find all the operands in the given code block. Returns an operand
1299 descriptor list (instance of class OperandList).'''
1300 def __init__(self, parser, code, master_list):
1301 self.items = []
1302 self.bases = {}
1303 # delete strings and comments so we don't match on operands inside
1304 for regEx in (stringRE, commentRE):
1305 code = regEx.sub('', code)
1306 # search for operands
1307 next_pos = 0
1308 while 1:
1309 match = parser.operandsRE.search(code, next_pos)
1310 if not match:
1311 # no more matches: we're done
1312 break
1313 op = match.groups()
1314 # regexp groups are operand full name, base, and extension
1315 (op_full, op_base, op_ext) = op
1316 # If is a elem operand, define or update the corresponding
1317 # vector operand
1318 if op_base in parser.elemToVector:
1319 elem_op = op_base
1320 op_base = parser.elemToVector[elem_op]
1321 # find this op in the master list
1322 op_desc = master_list.find_base(op_base)
1323 if not op_desc:
1324 error('Found operand %s which is not in the master list!'
1325 % op_base)
1326 else:
1327 # See if we've already found this operand
1328 op_desc = self.find_base(op_base)
1329 if not op_desc:
1330 # if not, add a reference to it to this sub list
1331 self.append(master_list.bases[op_base])
1332
1333 # start next search after end of current match
1334 next_pos = match.end()
1335 self.sort()
1336 self.memOperand = None
1337 # Whether the whole PC needs to be read so parts of it can be accessed
1338 self.readPC = False
1339 # Whether the whole PC needs to be written after parts of it were
1340 # changed
1341 self.setPC = False
1342 # Whether this instruction manipulates the whole PC or parts of it.
1343 # Mixing the two is a bad idea and flagged as an error.
1344 self.pcPart = None
1345
1346 # Flags to keep track if one or more operands are to be read/written
1347 # conditionally.
1348 self.predRead = False
1349 self.predWrite = False
1350
1351 for op_desc in self.items:
1352 if op_desc.isPCPart():
1353 self.readPC = True
1354 if op_desc.is_dest:
1355 self.setPC = True
1356
1357 if op_desc.isPCState():
1358 if self.pcPart is not None:
1359 if self.pcPart and not op_desc.isPCPart() or \
1360 not self.pcPart and op_desc.isPCPart():
1361 error("Mixed whole and partial PC state operands.")
1362 self.pcPart = op_desc.isPCPart()
1363
1364 if op_desc.isMem():
1365 if self.memOperand:
1366 error("Code block has more than one memory operand.")
1367 self.memOperand = op_desc
1368
1369 # Check if this operand has read/write predication. If true, then
1370 # the microop will dynamically index source/dest registers.
1371 self.predRead = self.predRead or op_desc.hasReadPred()
1372 self.predWrite = self.predWrite or op_desc.hasWritePred()
1373
1374 # Regular expression object to match C++ strings
1375 stringRE = re.compile(r'"([^"\\]|\\.)*"')
1376
1377 # Regular expression object to match C++ comments
1378 # (used in findOperands())
1379 commentRE = re.compile(r'(^)?[^\S\n]*/(?:\*(.*?)\*/[^\S\n]*|/[^\n]*)($)?',
1380 re.DOTALL | re.MULTILINE)
1381
1382 # Regular expression object to match assignment statements (used in
1383 # findOperands()). If the code immediately following the first
1384 # appearance of the operand matches this regex, then the operand
1385 # appears to be on the LHS of an assignment, and is thus a
1386 # destination. basically we're looking for an '=' that's not '=='.
1387 # The heinous tangle before that handles the case where the operand
1388 # has an array subscript.
1389 assignRE = re.compile(r'(\[[^\]]+\])?\s*=(?!=)', re.MULTILINE)
1390
1391 def makeFlagConstructor(flag_list):
1392 if len(flag_list) == 0:
1393 return ''
1394 # filter out repeated flags
1395 flag_list.sort()
1396 i = 1
1397 while i < len(flag_list):
1398 if flag_list[i] == flag_list[i-1]:
1399 del flag_list[i]
1400 else:
1401 i += 1
1402 pre = '\n\tflags['
1403 post = '] = true;'
1404 code = pre + string.join(flag_list, post + pre) + post
1405 return code
1406
1407 # Assume all instruction flags are of the form 'IsFoo'
1408 instFlagRE = re.compile(r'Is.*')
1409
1410 # OpClass constants end in 'Op' except No_OpClass
1411 opClassRE = re.compile(r'.*Op|No_OpClass')
1412
1413 class InstObjParams(object):
1414 def __init__(self, parser, mnem, class_name, base_class = '',
1415 snippets = {}, opt_args = []):
1416 self.mnemonic = mnem
1417 self.class_name = class_name
1418 self.base_class = base_class
1419 if not isinstance(snippets, dict):
1420 snippets = {'code' : snippets}
1421 compositeCode = ' '.join(map(str, snippets.values()))
1422 self.snippets = snippets
1423
1424 self.operands = OperandList(parser, compositeCode)
1425
1426 # The header of the constructor declares the variables to be used
1427 # in the body of the constructor.
1428 header = ''
1429 header += '\n\t_numSrcRegs = 0;'
1430 header += '\n\t_numDestRegs = 0;'
1431 header += '\n\t_numFPDestRegs = 0;'
1432 header += '\n\t_numVecDestRegs = 0;'
1433 header += '\n\t_numVecElemDestRegs = 0;'
1434 header += '\n\t_numVecPredDestRegs = 0;'
1435 header += '\n\t_numIntDestRegs = 0;'
1436 header += '\n\t_numCCDestRegs = 0;'
1437
1438 self.constructor = header + \
1439 self.operands.concatAttrStrings('constructor')
1440
1441 self.flags = self.operands.concatAttrLists('flags')
1442
1443 self.op_class = None
1444
1445 # Optional arguments are assumed to be either StaticInst flags
1446 # or an OpClass value. To avoid having to import a complete
1447 # list of these values to match against, we do it ad-hoc
1448 # with regexps.
1449 for oa in opt_args:
1450 if instFlagRE.match(oa):
1451 self.flags.append(oa)
1452 elif opClassRE.match(oa):
1453 self.op_class = oa
1454 else:
1455 error('InstObjParams: optional arg "%s" not recognized '
1456 'as StaticInst::Flag or OpClass.' % oa)
1457
1458 # Make a basic guess on the operand class if not set.
1459 # These are good enough for most cases.
1460 if not self.op_class:
1461 if 'IsStore' in self.flags:
1462 # The order matters here: 'IsFloating' and 'IsInteger' are
1463 # usually set in FP instructions because of the base
1464 # register
1465 if 'IsFloating' in self.flags:
1466 self.op_class = 'FloatMemWriteOp'
1467 else:
1468 self.op_class = 'MemWriteOp'
1469 elif 'IsLoad' in self.flags or 'IsPrefetch' in self.flags:
1470 # The order matters here: 'IsFloating' and 'IsInteger' are
1471 # usually set in FP instructions because of the base
1472 # register
1473 if 'IsFloating' in self.flags:
1474 self.op_class = 'FloatMemReadOp'
1475 else:
1476 self.op_class = 'MemReadOp'
1477 elif 'IsFloating' in self.flags:
1478 self.op_class = 'FloatAddOp'
1479 elif 'IsVector' in self.flags:
1480 self.op_class = 'SimdAddOp'
1481 else:
1482 self.op_class = 'IntAluOp'
1483
1484 # add flag initialization to contructor here to include
1485 # any flags added via opt_args
1486 self.constructor += makeFlagConstructor(self.flags)
1487
1488 # if 'IsFloating' is set, add call to the FP enable check
1489 # function (which should be provided by isa_desc via a declare)
1490 # if 'IsVector' is set, add call to the Vector enable check
1491 # function (which should be provided by isa_desc via a declare)
1492 if 'IsFloating' in self.flags:
1493 self.fp_enable_check = 'fault = checkFpEnableFault(xc);'
1494 elif 'IsVector' in self.flags:
1495 self.fp_enable_check = 'fault = checkVecEnableFault(xc);'
1496 else:
1497 self.fp_enable_check = ''
1498
1499 ##############
1500 # Stack: a simple stack object. Used for both formats (formatStack)
1501 # and default cases (defaultStack). Simply wraps a list to give more
1502 # stack-like syntax and enable initialization with an argument list
1503 # (as opposed to an argument that's a list).
1504
1505 class Stack(list):
1506 def __init__(self, *items):
1507 list.__init__(self, items)
1508
1509 def push(self, item):
1510 self.append(item);
1511
1512 def top(self):
1513 return self[-1]
1514
1515 # Format a file include stack backtrace as a string
1516 def backtrace(filename_stack):
1517 fmt = "In file included from %s:"
1518 return "\n".join([fmt % f for f in filename_stack])
1519
1520
1521 #######################
1522 #
1523 # LineTracker: track filenames along with line numbers in PLY lineno fields
1524 # PLY explicitly doesn't do anything with 'lineno' except propagate
1525 # it. This class lets us tie filenames with the line numbers with a
1526 # minimum of disruption to existing increment code.
1527 #
1528
1529 class LineTracker(object):
1530 def __init__(self, filename, lineno=1):
1531 self.filename = filename
1532 self.lineno = lineno
1533
1534 # Overload '+=' for increments. We need to create a new object on
1535 # each update else every token ends up referencing the same
1536 # constantly incrementing instance.
1537 def __iadd__(self, incr):
1538 return LineTracker(self.filename, self.lineno + incr)
1539
1540 def __str__(self):
1541 return "%s:%d" % (self.filename, self.lineno)
1542
1543 # In case there are places where someone really expects a number
1544 def __int__(self):
1545 return self.lineno
1546
1547
1548 #######################
1549 #
1550 # ISA Parser
1551 # parses ISA DSL and emits C++ headers and source
1552 #
1553
1554 class ISAParser(Grammar):
1555 def __init__(self, output_dir):
1556 super(ISAParser, self).__init__()
1557 self.output_dir = output_dir
1558
1559 self.filename = None # for output file watermarking/scaremongering
1560
1561 # variable to hold templates
1562 self.templateMap = {}
1563
1564 # This dictionary maps format name strings to Format objects.
1565 self.formatMap = {}
1566
1567 # Track open files and, if applicable, how many chunks it has been
1568 # split into so far.
1569 self.files = {}
1570 self.splits = {}
1571
1572 # isa_name / namespace identifier from namespace declaration.
1573 # before the namespace declaration, None.
1574 self.isa_name = None
1575 self.namespace = None
1576
1577 # The format stack.
1578 self.formatStack = Stack(NoFormat())
1579
1580 # The default case stack.
1581 self.defaultStack = Stack(None)
1582
1583 # Stack that tracks current file and line number. Each
1584 # element is a tuple (filename, lineno) that records the
1585 # *current* filename and the line number in the *previous*
1586 # file where it was included.
1587 self.fileNameStack = Stack()
1588
1589 symbols = ('makeList', 're', 'string')
1590 self.exportContext = dict([(s, eval(s)) for s in symbols])
1591
1592 self.maxInstSrcRegs = 0
1593 self.maxInstDestRegs = 0
1594 self.maxMiscDestRegs = 0
1595
1596 def __getitem__(self, i): # Allow object (self) to be
1597 return getattr(self, i) # passed to %-substitutions
1598
1599 # Change the file suffix of a base filename:
1600 # (e.g.) decoder.cc -> decoder-g.cc.inc for 'global' outputs
1601 def suffixize(self, s, sec):
1602 extn = re.compile('(\.[^\.]+)$') # isolate extension
1603 if self.namespace:
1604 return extn.sub(r'-ns\1.inc', s) # insert some text on either side
1605 else:
1606 return extn.sub(r'-g\1.inc', s)
1607
1608 # Get the file object for emitting code into the specified section
1609 # (header, decoder, exec, decode_block).
1610 def get_file(self, section):
1611 if section == 'decode_block':
1612 filename = 'decode-method.cc.inc'
1613 else:
1614 if section == 'header':
1615 file = 'decoder.hh'
1616 else:
1617 file = '%s.cc' % section
1618 filename = self.suffixize(file, section)
1619 try:
1620 return self.files[filename]
1621 except KeyError: pass
1622
1623 f = self.open(filename)
1624 self.files[filename] = f
1625
1626 # The splittable files are the ones with many independent
1627 # per-instruction functions - the decoder's instruction constructors
1628 # and the instruction execution (execute()) methods. These both have
1629 # the suffix -ns.cc.inc, meaning they are within the namespace part
1630 # of the ISA, contain object-emitting C++ source, and are included
1631 # into other top-level files. These are the files that need special
1632 # #define's to allow parts of them to be compiled separately. Rather
1633 # than splitting the emissions into separate files, the monolithic
1634 # output of the ISA parser is maintained, but the value (or lack
1635 # thereof) of the __SPLIT definition during C preprocessing will
1636 # select the different chunks. If no 'split' directives are used,
1637 # the cpp emissions have no effect.
1638 if re.search('-ns.cc.inc$', filename):
1639 print('#if !defined(__SPLIT) || (__SPLIT == 1)', file=f)
1640 self.splits[f] = 1
1641 # ensure requisite #include's
1642 elif filename == 'decoder-g.hh.inc':
1643 print('#include "base/bitfield.hh"', file=f)
1644
1645 return f
1646
1647 # Weave together the parts of the different output sections by
1648 # #include'ing them into some very short top-level .cc/.hh files.
1649 # These small files make it much clearer how this tool works, since
1650 # you directly see the chunks emitted as files that are #include'd.
1651 def write_top_level_files(self):
1652 # decoder header - everything depends on this
1653 file = 'decoder.hh'
1654 with self.open(file) as f:
1655 f.write('#ifndef __ARCH_%(isa)s_GENERATED_DECODER_HH__\n'
1656 '#define __ARCH_%(isa)s_GENERATED_DECODER_HH__\n\n' %
1657 {'isa': self.isa_name.upper()})
1658 fn = 'decoder-g.hh.inc'
1659 assert(fn in self.files)
1660 f.write('#include "%s"\n' % fn)
1661
1662 fn = 'decoder-ns.hh.inc'
1663 assert(fn in self.files)
1664 f.write('namespace %s {\n#include "%s"\n}\n'
1665 % (self.namespace, fn))
1666 f.write('\n#endif // __ARCH_%s_GENERATED_DECODER_HH__\n' %
1667 self.isa_name.upper())
1668
1669 # decoder method - cannot be split
1670 file = 'decoder.cc'
1671 with self.open(file) as f:
1672 fn = 'base/compiler.hh'
1673 f.write('#include "%s"\n' % fn)
1674
1675 fn = 'decoder-g.cc.inc'
1676 assert(fn in self.files)
1677 f.write('#include "%s"\n' % fn)
1678
1679 fn = 'decoder.hh'
1680 f.write('#include "%s"\n' % fn)
1681
1682 fn = 'decode-method.cc.inc'
1683 # is guaranteed to have been written for parse to complete
1684 f.write('#include "%s"\n' % fn)
1685
1686 extn = re.compile('(\.[^\.]+)$')
1687
1688 # instruction constructors
1689 splits = self.splits[self.get_file('decoder')]
1690 file_ = 'inst-constrs.cc'
1691 for i in range(1, splits+1):
1692 if splits > 1:
1693 file = extn.sub(r'-%d\1' % i, file_)
1694 else:
1695 file = file_
1696 with self.open(file) as f:
1697 fn = 'decoder-g.cc.inc'
1698 assert(fn in self.files)
1699 f.write('#include "%s"\n' % fn)
1700
1701 fn = 'decoder.hh'
1702 f.write('#include "%s"\n' % fn)
1703
1704 fn = 'decoder-ns.cc.inc'
1705 assert(fn in self.files)
1706 print('namespace %s {' % self.namespace, file=f)
1707 if splits > 1:
1708 print('#define __SPLIT %u' % i, file=f)
1709 print('#include "%s"' % fn, file=f)
1710 print('}', file=f)
1711
1712 # instruction execution
1713 splits = self.splits[self.get_file('exec')]
1714 for i in range(1, splits+1):
1715 file = 'generic_cpu_exec.cc'
1716 if splits > 1:
1717 file = extn.sub(r'_%d\1' % i, file)
1718 with self.open(file) as f:
1719 fn = 'exec-g.cc.inc'
1720 assert(fn in self.files)
1721 f.write('#include "%s"\n' % fn)
1722 f.write('#include "cpu/exec_context.hh"\n')
1723 f.write('#include "decoder.hh"\n')
1724
1725 fn = 'exec-ns.cc.inc'
1726 assert(fn in self.files)
1727 print('namespace %s {' % self.namespace, file=f)
1728 if splits > 1:
1729 print('#define __SPLIT %u' % i, file=f)
1730 print('#include "%s"' % fn, file=f)
1731 print('}', file=f)
1732
1733 # max_inst_regs.hh
1734 self.update('max_inst_regs.hh',
1735 '''namespace %(namespace)s {
1736 const int MaxInstSrcRegs = %(maxInstSrcRegs)d;
1737 const int MaxInstDestRegs = %(maxInstDestRegs)d;
1738 const int MaxMiscDestRegs = %(maxMiscDestRegs)d;\n}\n''' % self)
1739
1740 scaremonger_template ='''// DO NOT EDIT
1741 // This file was automatically generated from an ISA description:
1742 // %(filename)s
1743
1744 ''';
1745
1746 #####################################################################
1747 #
1748 # Lexer
1749 #
1750 # The PLY lexer module takes two things as input:
1751 # - A list of token names (the string list 'tokens')
1752 # - A regular expression describing a match for each token. The
1753 # regexp for token FOO can be provided in two ways:
1754 # - as a string variable named t_FOO
1755 # - as the doc string for a function named t_FOO. In this case,
1756 # the function is also executed, allowing an action to be
1757 # associated with each token match.
1758 #
1759 #####################################################################
1760
1761 # Reserved words. These are listed separately as they are matched
1762 # using the same regexp as generic IDs, but distinguished in the
1763 # t_ID() function. The PLY documentation suggests this approach.
1764 reserved = (
1765 'BITFIELD', 'DECODE', 'DECODER', 'DEFAULT', 'DEF', 'EXEC', 'FORMAT',
1766 'HEADER', 'LET', 'NAMESPACE', 'OPERAND_TYPES', 'OPERANDS',
1767 'OUTPUT', 'SIGNED', 'SPLIT', 'TEMPLATE'
1768 )
1769
1770 # List of tokens. The lex module requires this.
1771 tokens = reserved + (
1772 # identifier
1773 'ID',
1774
1775 # integer literal
1776 'INTLIT',
1777
1778 # string literal
1779 'STRLIT',
1780
1781 # code literal
1782 'CODELIT',
1783
1784 # ( ) [ ] { } < > , ; . : :: *
1785 'LPAREN', 'RPAREN',
1786 'LBRACKET', 'RBRACKET',
1787 'LBRACE', 'RBRACE',
1788 'LESS', 'GREATER', 'EQUALS',
1789 'COMMA', 'SEMI', 'DOT', 'COLON', 'DBLCOLON',
1790 'ASTERISK',
1791
1792 # C preprocessor directives
1793 'CPPDIRECTIVE'
1794
1795 # The following are matched but never returned. commented out to
1796 # suppress PLY warning
1797 # newfile directive
1798 # 'NEWFILE',
1799
1800 # endfile directive
1801 # 'ENDFILE'
1802 )
1803
1804 # Regular expressions for token matching
1805 t_LPAREN = r'\('
1806 t_RPAREN = r'\)'
1807 t_LBRACKET = r'\['
1808 t_RBRACKET = r'\]'
1809 t_LBRACE = r'\{'
1810 t_RBRACE = r'\}'
1811 t_LESS = r'\<'
1812 t_GREATER = r'\>'
1813 t_EQUALS = r'='
1814 t_COMMA = r','
1815 t_SEMI = r';'
1816 t_DOT = r'\.'
1817 t_COLON = r':'
1818 t_DBLCOLON = r'::'
1819 t_ASTERISK = r'\*'
1820
1821 # Identifiers and reserved words
1822 reserved_map = { }
1823 for r in reserved:
1824 reserved_map[r.lower()] = r
1825
1826 def t_ID(self, t):
1827 r'[A-Za-z_]\w*'
1828 t.type = self.reserved_map.get(t.value, 'ID')
1829 return t
1830
1831 # Integer literal
1832 def t_INTLIT(self, t):
1833 r'-?(0x[\da-fA-F]+)|\d+'
1834 try:
1835 t.value = int(t.value,0)
1836 except ValueError:
1837 error(t.lexer.lineno, 'Integer value "%s" too large' % t.value)
1838 t.value = 0
1839 return t
1840
1841 # String literal. Note that these use only single quotes, and
1842 # can span multiple lines.
1843 def t_STRLIT(self, t):
1844 r"(?m)'([^'])+'"
1845 # strip off quotes
1846 t.value = t.value[1:-1]
1847 t.lexer.lineno += t.value.count('\n')
1848 return t
1849
1850
1851 # "Code literal"... like a string literal, but delimiters are
1852 # '{{' and '}}' so they get formatted nicely under emacs c-mode
1853 def t_CODELIT(self, t):
1854 r"(?m)\{\{([^\}]|}(?!\}))+\}\}"
1855 # strip off {{ & }}
1856 t.value = t.value[2:-2]
1857 t.lexer.lineno += t.value.count('\n')
1858 return t
1859
1860 def t_CPPDIRECTIVE(self, t):
1861 r'^\#[^\#].*\n'
1862 t.lexer.lineno += t.value.count('\n')
1863 return t
1864
1865 def t_NEWFILE(self, t):
1866 r'^\#\#newfile\s+"[^"]*"\n'
1867 self.fileNameStack.push(t.lexer.lineno)
1868 t.lexer.lineno = LineTracker(t.value[11:-2])
1869
1870 def t_ENDFILE(self, t):
1871 r'^\#\#endfile\n'
1872 t.lexer.lineno = self.fileNameStack.pop()
1873
1874 #
1875 # The functions t_NEWLINE, t_ignore, and t_error are
1876 # special for the lex module.
1877 #
1878
1879 # Newlines
1880 def t_NEWLINE(self, t):
1881 r'\n+'
1882 t.lexer.lineno += t.value.count('\n')
1883
1884 # Comments
1885 def t_comment(self, t):
1886 r'//.*'
1887
1888 # Completely ignored characters
1889 t_ignore = ' \t\x0c'
1890
1891 # Error handler
1892 def t_error(self, t):
1893 error(t.lexer.lineno, "illegal character '%s'" % t.value[0])
1894 t.skip(1)
1895
1896 #####################################################################
1897 #
1898 # Parser
1899 #
1900 # Every function whose name starts with 'p_' defines a grammar
1901 # rule. The rule is encoded in the function's doc string, while
1902 # the function body provides the action taken when the rule is
1903 # matched. The argument to each function is a list of the values
1904 # of the rule's symbols: t[0] for the LHS, and t[1..n] for the
1905 # symbols on the RHS. For tokens, the value is copied from the
1906 # t.value attribute provided by the lexer. For non-terminals, the
1907 # value is assigned by the producing rule; i.e., the job of the
1908 # grammar rule function is to set the value for the non-terminal
1909 # on the LHS (by assigning to t[0]).
1910 #####################################################################
1911
1912 # The LHS of the first grammar rule is used as the start symbol
1913 # (in this case, 'specification'). Note that this rule enforces
1914 # that there will be exactly one namespace declaration, with 0 or
1915 # more global defs/decls before and after it. The defs & decls
1916 # before the namespace decl will be outside the namespace; those
1917 # after will be inside. The decoder function is always inside the
1918 # namespace.
1919 def p_specification(self, t):
1920 'specification : opt_defs_and_outputs top_level_decode_block'
1921
1922 for f in self.splits.iterkeys():
1923 f.write('\n#endif\n')
1924
1925 for f in self.files.itervalues(): # close ALL the files;
1926 f.close() # not doing so can cause compilation to fail
1927
1928 self.write_top_level_files()
1929
1930 t[0] = True
1931
1932 # 'opt_defs_and_outputs' is a possibly empty sequence of def and/or
1933 # output statements. Its productions do the hard work of eventually
1934 # instantiating a GenCode, which are generally emitted (written to disk)
1935 # as soon as possible, except for the decode_block, which has to be
1936 # accumulated into one large function of nested switch/case blocks.
1937 def p_opt_defs_and_outputs_0(self, t):
1938 'opt_defs_and_outputs : empty'
1939
1940 def p_opt_defs_and_outputs_1(self, t):
1941 'opt_defs_and_outputs : defs_and_outputs'
1942
1943 def p_defs_and_outputs_0(self, t):
1944 'defs_and_outputs : def_or_output'
1945
1946 def p_defs_and_outputs_1(self, t):
1947 'defs_and_outputs : defs_and_outputs def_or_output'
1948
1949 # The list of possible definition/output statements.
1950 # They are all processed as they are seen.
1951 def p_def_or_output(self, t):
1952 '''def_or_output : name_decl
1953 | def_format
1954 | def_bitfield
1955 | def_bitfield_struct
1956 | def_template
1957 | def_operand_types
1958 | def_operands
1959 | output
1960 | global_let
1961 | split'''
1962
1963 # Utility function used by both invocations of splitting - explicit
1964 # 'split' keyword and split() function inside "let {{ }};" blocks.
1965 def split(self, sec, write=False):
1966 assert(sec != 'header' and "header cannot be split")
1967
1968 f = self.get_file(sec)
1969 self.splits[f] += 1
1970 s = '\n#endif\n#if __SPLIT == %u\n' % self.splits[f]
1971 if write:
1972 f.write(s)
1973 else:
1974 return s
1975
1976 # split output file to reduce compilation time
1977 def p_split(self, t):
1978 'split : SPLIT output_type SEMI'
1979 assert(self.isa_name and "'split' not allowed before namespace decl")
1980
1981 self.split(t[2], True)
1982
1983 def p_output_type(self, t):
1984 '''output_type : DECODER
1985 | HEADER
1986 | EXEC'''
1987 t[0] = t[1]
1988
1989 # ISA name declaration looks like "namespace <foo>;"
1990 def p_name_decl(self, t):
1991 'name_decl : NAMESPACE ID SEMI'
1992 assert(self.isa_name == None and "Only 1 namespace decl permitted")
1993 self.isa_name = t[2]
1994 self.namespace = t[2] + 'Inst'
1995
1996 # Output blocks 'output <foo> {{...}}' (C++ code blocks) are copied
1997 # directly to the appropriate output section.
1998
1999 # Massage output block by substituting in template definitions and
2000 # bit operators. We handle '%'s embedded in the string that don't
2001 # indicate template substitutions by doubling them first so that the
2002 # format operation will reduce them back to single '%'s.
2003 def process_output(self, s):
2004 s = self.protectNonSubstPercents(s)
2005 return substBitOps(s % self.templateMap)
2006
2007 def p_output(self, t):
2008 'output : OUTPUT output_type CODELIT SEMI'
2009 kwargs = { t[2]+'_output' : self.process_output(t[3]) }
2010 GenCode(self, **kwargs).emit()
2011
2012 # global let blocks 'let {{...}}' (Python code blocks) are
2013 # executed directly when seen. Note that these execute in a
2014 # special variable context 'exportContext' to prevent the code
2015 # from polluting this script's namespace.
2016 def p_global_let(self, t):
2017 'global_let : LET CODELIT SEMI'
2018 def _split(sec):
2019 return self.split(sec)
2020 self.updateExportContext()
2021 self.exportContext["header_output"] = ''
2022 self.exportContext["decoder_output"] = ''
2023 self.exportContext["exec_output"] = ''
2024 self.exportContext["decode_block"] = ''
2025 self.exportContext["split"] = _split
2026 split_setup = '''
2027 def wrap(func):
2028 def split(sec):
2029 globals()[sec + '_output'] += func(sec)
2030 return split
2031 split = wrap(split)
2032 del wrap
2033 '''
2034 # This tricky setup (immediately above) allows us to just write
2035 # (e.g.) "split('exec')" in the Python code and the split #ifdef's
2036 # will automatically be added to the exec_output variable. The inner
2037 # Python execution environment doesn't know about the split points,
2038 # so we carefully inject and wrap a closure that can retrieve the
2039 # next split's #define from the parser and add it to the current
2040 # emission-in-progress.
2041 try:
2042 exec split_setup+fixPythonIndentation(t[2]) in self.exportContext
2043 except Exception, exc:
2044 traceback.print_exc(file=sys.stdout)
2045 if debug:
2046 raise
2047 error(t.lineno(1), 'In global let block: %s' % exc)
2048 GenCode(self,
2049 header_output=self.exportContext["header_output"],
2050 decoder_output=self.exportContext["decoder_output"],
2051 exec_output=self.exportContext["exec_output"],
2052 decode_block=self.exportContext["decode_block"]).emit()
2053
2054 # Define the mapping from operand type extensions to C++ types and
2055 # bit widths (stored in operandTypeMap).
2056 def p_def_operand_types(self, t):
2057 'def_operand_types : DEF OPERAND_TYPES CODELIT SEMI'
2058 try:
2059 self.operandTypeMap = eval('{' + t[3] + '}')
2060 except Exception, exc:
2061 if debug:
2062 raise
2063 error(t.lineno(1),
2064 'In def operand_types: %s' % exc)
2065
2066 # Define the mapping from operand names to operand classes and
2067 # other traits. Stored in operandNameMap.
2068 def p_def_operands(self, t):
2069 'def_operands : DEF OPERANDS CODELIT SEMI'
2070 if not hasattr(self, 'operandTypeMap'):
2071 error(t.lineno(1),
2072 'error: operand types must be defined before operands')
2073 try:
2074 user_dict = eval('{' + t[3] + '}', self.exportContext)
2075 except Exception, exc:
2076 if debug:
2077 raise
2078 error(t.lineno(1), 'In def operands: %s' % exc)
2079 self.buildOperandNameMap(user_dict, t.lexer.lineno)
2080
2081 # A bitfield definition looks like:
2082 # 'def [signed] bitfield <ID> [<first>:<last>]'
2083 # This generates a preprocessor macro in the output file.
2084 def p_def_bitfield_0(self, t):
2085 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT COLON INTLIT GREATER SEMI'
2086 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[8])
2087 if (t[2] == 'signed'):
2088 expr = 'sext<%d>(%s)' % (t[6] - t[8] + 1, expr)
2089 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2090 GenCode(self, header_output=hash_define).emit()
2091
2092 # alternate form for single bit: 'def [signed] bitfield <ID> [<bit>]'
2093 def p_def_bitfield_1(self, t):
2094 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT GREATER SEMI'
2095 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[6])
2096 if (t[2] == 'signed'):
2097 expr = 'sext<%d>(%s)' % (1, expr)
2098 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2099 GenCode(self, header_output=hash_define).emit()
2100
2101 # alternate form for structure member: 'def bitfield <ID> <ID>'
2102 def p_def_bitfield_struct(self, t):
2103 'def_bitfield_struct : DEF opt_signed BITFIELD ID id_with_dot SEMI'
2104 if (t[2] != ''):
2105 error(t.lineno(1),
2106 'error: structure bitfields are always unsigned.')
2107 expr = 'machInst.%s' % t[5]
2108 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2109 GenCode(self, header_output=hash_define).emit()
2110
2111 def p_id_with_dot_0(self, t):
2112 'id_with_dot : ID'
2113 t[0] = t[1]
2114
2115 def p_id_with_dot_1(self, t):
2116 'id_with_dot : ID DOT id_with_dot'
2117 t[0] = t[1] + t[2] + t[3]
2118
2119 def p_opt_signed_0(self, t):
2120 'opt_signed : SIGNED'
2121 t[0] = t[1]
2122
2123 def p_opt_signed_1(self, t):
2124 'opt_signed : empty'
2125 t[0] = ''
2126
2127 def p_def_template(self, t):
2128 'def_template : DEF TEMPLATE ID CODELIT SEMI'
2129 if t[3] in self.templateMap:
2130 print("warning: template %s already defined" % t[3])
2131 self.templateMap[t[3]] = Template(self, t[4])
2132
2133 # An instruction format definition looks like
2134 # "def format <fmt>(<params>) {{...}};"
2135 def p_def_format(self, t):
2136 'def_format : DEF FORMAT ID LPAREN param_list RPAREN CODELIT SEMI'
2137 (id, params, code) = (t[3], t[5], t[7])
2138 self.defFormat(id, params, code, t.lexer.lineno)
2139
2140 # The formal parameter list for an instruction format is a
2141 # possibly empty list of comma-separated parameters. Positional
2142 # (standard, non-keyword) parameters must come first, followed by
2143 # keyword parameters, followed by a '*foo' parameter that gets
2144 # excess positional arguments (as in Python). Each of these three
2145 # parameter categories is optional.
2146 #
2147 # Note that we do not support the '**foo' parameter for collecting
2148 # otherwise undefined keyword args. Otherwise the parameter list
2149 # is (I believe) identical to what is supported in Python.
2150 #
2151 # The param list generates a tuple, where the first element is a
2152 # list of the positional params and the second element is a dict
2153 # containing the keyword params.
2154 def p_param_list_0(self, t):
2155 'param_list : positional_param_list COMMA nonpositional_param_list'
2156 t[0] = t[1] + t[3]
2157
2158 def p_param_list_1(self, t):
2159 '''param_list : positional_param_list
2160 | nonpositional_param_list'''
2161 t[0] = t[1]
2162
2163 def p_positional_param_list_0(self, t):
2164 'positional_param_list : empty'
2165 t[0] = []
2166
2167 def p_positional_param_list_1(self, t):
2168 'positional_param_list : ID'
2169 t[0] = [t[1]]
2170
2171 def p_positional_param_list_2(self, t):
2172 'positional_param_list : positional_param_list COMMA ID'
2173 t[0] = t[1] + [t[3]]
2174
2175 def p_nonpositional_param_list_0(self, t):
2176 'nonpositional_param_list : keyword_param_list COMMA excess_args_param'
2177 t[0] = t[1] + t[3]
2178
2179 def p_nonpositional_param_list_1(self, t):
2180 '''nonpositional_param_list : keyword_param_list
2181 | excess_args_param'''
2182 t[0] = t[1]
2183
2184 def p_keyword_param_list_0(self, t):
2185 'keyword_param_list : keyword_param'
2186 t[0] = [t[1]]
2187
2188 def p_keyword_param_list_1(self, t):
2189 'keyword_param_list : keyword_param_list COMMA keyword_param'
2190 t[0] = t[1] + [t[3]]
2191
2192 def p_keyword_param(self, t):
2193 'keyword_param : ID EQUALS expr'
2194 t[0] = t[1] + ' = ' + t[3].__repr__()
2195
2196 def p_excess_args_param(self, t):
2197 'excess_args_param : ASTERISK ID'
2198 # Just concatenate them: '*ID'. Wrap in list to be consistent
2199 # with positional_param_list and keyword_param_list.
2200 t[0] = [t[1] + t[2]]
2201
2202 # End of format definition-related rules.
2203 ##############
2204
2205 #
2206 # A decode block looks like:
2207 # decode <field1> [, <field2>]* [default <inst>] { ... }
2208 #
2209 def p_top_level_decode_block(self, t):
2210 'top_level_decode_block : decode_block'
2211 codeObj = t[1]
2212 codeObj.wrap_decode_block('''
2213 StaticInstPtr
2214 %(isa_name)s::Decoder::decodeInst(%(isa_name)s::ExtMachInst machInst)
2215 {
2216 using namespace %(namespace)s;
2217 ''' % self, '}')
2218
2219 codeObj.emit()
2220
2221 def p_decode_block(self, t):
2222 'decode_block : DECODE ID opt_default LBRACE decode_stmt_list RBRACE'
2223 default_defaults = self.defaultStack.pop()
2224 codeObj = t[5]
2225 # use the "default defaults" only if there was no explicit
2226 # default statement in decode_stmt_list
2227 if not codeObj.has_decode_default:
2228 codeObj += default_defaults
2229 codeObj.wrap_decode_block('switch (%s) {\n' % t[2], '}\n')
2230 t[0] = codeObj
2231
2232 # The opt_default statement serves only to push the "default
2233 # defaults" onto defaultStack. This value will be used by nested
2234 # decode blocks, and used and popped off when the current
2235 # decode_block is processed (in p_decode_block() above).
2236 def p_opt_default_0(self, t):
2237 'opt_default : empty'
2238 # no default specified: reuse the one currently at the top of
2239 # the stack
2240 self.defaultStack.push(self.defaultStack.top())
2241 # no meaningful value returned
2242 t[0] = None
2243
2244 def p_opt_default_1(self, t):
2245 'opt_default : DEFAULT inst'
2246 # push the new default
2247 codeObj = t[2]
2248 codeObj.wrap_decode_block('\ndefault:\n', 'break;\n')
2249 self.defaultStack.push(codeObj)
2250 # no meaningful value returned
2251 t[0] = None
2252
2253 def p_decode_stmt_list_0(self, t):
2254 'decode_stmt_list : decode_stmt'
2255 t[0] = t[1]
2256
2257 def p_decode_stmt_list_1(self, t):
2258 'decode_stmt_list : decode_stmt decode_stmt_list'
2259 if (t[1].has_decode_default and t[2].has_decode_default):
2260 error(t.lineno(1), 'Two default cases in decode block')
2261 t[0] = t[1] + t[2]
2262
2263 #
2264 # Decode statement rules
2265 #
2266 # There are four types of statements allowed in a decode block:
2267 # 1. Format blocks 'format <foo> { ... }'
2268 # 2. Nested decode blocks
2269 # 3. Instruction definitions.
2270 # 4. C preprocessor directives.
2271
2272
2273 # Preprocessor directives found in a decode statement list are
2274 # passed through to the output, replicated to all of the output
2275 # code streams. This works well for ifdefs, so we can ifdef out
2276 # both the declarations and the decode cases generated by an
2277 # instruction definition. Handling them as part of the grammar
2278 # makes it easy to keep them in the right place with respect to
2279 # the code generated by the other statements.
2280 def p_decode_stmt_cpp(self, t):
2281 'decode_stmt : CPPDIRECTIVE'
2282 t[0] = GenCode(self, t[1], t[1], t[1], t[1])
2283
2284 # A format block 'format <foo> { ... }' sets the default
2285 # instruction format used to handle instruction definitions inside
2286 # the block. This format can be overridden by using an explicit
2287 # format on the instruction definition or with a nested format
2288 # block.
2289 def p_decode_stmt_format(self, t):
2290 'decode_stmt : FORMAT push_format_id LBRACE decode_stmt_list RBRACE'
2291 # The format will be pushed on the stack when 'push_format_id'
2292 # is processed (see below). Once the parser has recognized
2293 # the full production (though the right brace), we're done
2294 # with the format, so now we can pop it.
2295 self.formatStack.pop()
2296 t[0] = t[4]
2297
2298 # This rule exists so we can set the current format (& push the
2299 # stack) when we recognize the format name part of the format
2300 # block.
2301 def p_push_format_id(self, t):
2302 'push_format_id : ID'
2303 try:
2304 self.formatStack.push(self.formatMap[t[1]])
2305 t[0] = ('', '// format %s' % t[1])
2306 except KeyError:
2307 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1])
2308
2309 # Nested decode block: if the value of the current field matches
2310 # the specified constant(s), do a nested decode on some other field.
2311 def p_decode_stmt_decode(self, t):
2312 'decode_stmt : case_list COLON decode_block'
2313 case_list = t[1]
2314 codeObj = t[3]
2315 # just wrap the decoding code from the block as a case in the
2316 # outer switch statement.
2317 codeObj.wrap_decode_block('\n%s\n' % ''.join(case_list),
2318 'M5_UNREACHABLE;\n')
2319 codeObj.has_decode_default = (case_list == ['default:'])
2320 t[0] = codeObj
2321
2322 # Instruction definition (finally!).
2323 def p_decode_stmt_inst(self, t):
2324 'decode_stmt : case_list COLON inst SEMI'
2325 case_list = t[1]
2326 codeObj = t[3]
2327 codeObj.wrap_decode_block('\n%s' % ''.join(case_list), 'break;\n')
2328 codeObj.has_decode_default = (case_list == ['default:'])
2329 t[0] = codeObj
2330
2331 # The constant list for a decode case label must be non-empty, and must
2332 # either be the keyword 'default', or made up of one or more
2333 # comma-separated integer literals or strings which evaluate to
2334 # constants when compiled as C++.
2335 def p_case_list_0(self, t):
2336 'case_list : DEFAULT'
2337 t[0] = ['default:']
2338
2339 def prep_int_lit_case_label(self, lit):
2340 if lit >= 2**32:
2341 return 'case ULL(%#x): ' % lit
2342 else:
2343 return 'case %#x: ' % lit
2344
2345 def prep_str_lit_case_label(self, lit):
2346 return 'case %s: ' % lit
2347
2348 def p_case_list_1(self, t):
2349 'case_list : INTLIT'
2350 t[0] = [self.prep_int_lit_case_label(t[1])]
2351
2352 def p_case_list_2(self, t):
2353 'case_list : STRLIT'
2354 t[0] = [self.prep_str_lit_case_label(t[1])]
2355
2356 def p_case_list_3(self, t):
2357 'case_list : case_list COMMA INTLIT'
2358 t[0] = t[1]
2359 t[0].append(self.prep_int_lit_case_label(t[3]))
2360
2361 def p_case_list_4(self, t):
2362 'case_list : case_list COMMA STRLIT'
2363 t[0] = t[1]
2364 t[0].append(self.prep_str_lit_case_label(t[3]))
2365
2366 # Define an instruction using the current instruction format
2367 # (specified by an enclosing format block).
2368 # "<mnemonic>(<args>)"
2369 def p_inst_0(self, t):
2370 'inst : ID LPAREN arg_list RPAREN'
2371 # Pass the ID and arg list to the current format class to deal with.
2372 currentFormat = self.formatStack.top()
2373 codeObj = currentFormat.defineInst(self, t[1], t[3], t.lexer.lineno)
2374 args = ','.join(map(str, t[3]))
2375 args = re.sub('(?m)^', '//', args)
2376 args = re.sub('^//', '', args)
2377 comment = '\n// %s::%s(%s)\n' % (currentFormat.id, t[1], args)
2378 codeObj.prepend_all(comment)
2379 t[0] = codeObj
2380
2381 # Define an instruction using an explicitly specified format:
2382 # "<fmt>::<mnemonic>(<args>)"
2383 def p_inst_1(self, t):
2384 'inst : ID DBLCOLON ID LPAREN arg_list RPAREN'
2385 try:
2386 format = self.formatMap[t[1]]
2387 except KeyError:
2388 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1])
2389
2390 codeObj = format.defineInst(self, t[3], t[5], t.lexer.lineno)
2391 comment = '\n// %s::%s(%s)\n' % (t[1], t[3], t[5])
2392 codeObj.prepend_all(comment)
2393 t[0] = codeObj
2394
2395 # The arg list generates a tuple, where the first element is a
2396 # list of the positional args and the second element is a dict
2397 # containing the keyword args.
2398 def p_arg_list_0(self, t):
2399 'arg_list : positional_arg_list COMMA keyword_arg_list'
2400 t[0] = ( t[1], t[3] )
2401
2402 def p_arg_list_1(self, t):
2403 'arg_list : positional_arg_list'
2404 t[0] = ( t[1], {} )
2405
2406 def p_arg_list_2(self, t):
2407 'arg_list : keyword_arg_list'
2408 t[0] = ( [], t[1] )
2409
2410 def p_positional_arg_list_0(self, t):
2411 'positional_arg_list : empty'
2412 t[0] = []
2413
2414 def p_positional_arg_list_1(self, t):
2415 'positional_arg_list : expr'
2416 t[0] = [t[1]]
2417
2418 def p_positional_arg_list_2(self, t):
2419 'positional_arg_list : positional_arg_list COMMA expr'
2420 t[0] = t[1] + [t[3]]
2421
2422 def p_keyword_arg_list_0(self, t):
2423 'keyword_arg_list : keyword_arg'
2424 t[0] = t[1]
2425
2426 def p_keyword_arg_list_1(self, t):
2427 'keyword_arg_list : keyword_arg_list COMMA keyword_arg'
2428 t[0] = t[1]
2429 t[0].update(t[3])
2430
2431 def p_keyword_arg(self, t):
2432 'keyword_arg : ID EQUALS expr'
2433 t[0] = { t[1] : t[3] }
2434
2435 #
2436 # Basic expressions. These constitute the argument values of
2437 # "function calls" (i.e. instruction definitions in the decode
2438 # block) and default values for formal parameters of format
2439 # functions.
2440 #
2441 # Right now, these are either strings, integers, or (recursively)
2442 # lists of exprs (using Python square-bracket list syntax). Note
2443 # that bare identifiers are trated as string constants here (since
2444 # there isn't really a variable namespace to refer to).
2445 #
2446 def p_expr_0(self, t):
2447 '''expr : ID
2448 | INTLIT
2449 | STRLIT
2450 | CODELIT'''
2451 t[0] = t[1]
2452
2453 def p_expr_1(self, t):
2454 '''expr : LBRACKET list_expr RBRACKET'''
2455 t[0] = t[2]
2456
2457 def p_list_expr_0(self, t):
2458 'list_expr : expr'
2459 t[0] = [t[1]]
2460
2461 def p_list_expr_1(self, t):
2462 'list_expr : list_expr COMMA expr'
2463 t[0] = t[1] + [t[3]]
2464
2465 def p_list_expr_2(self, t):
2466 'list_expr : empty'
2467 t[0] = []
2468
2469 #
2470 # Empty production... use in other rules for readability.
2471 #
2472 def p_empty(self, t):
2473 'empty :'
2474 pass
2475
2476 # Parse error handler. Note that the argument here is the
2477 # offending *token*, not a grammar symbol (hence the need to use
2478 # t.value)
2479 def p_error(self, t):
2480 if t:
2481 error(t.lexer.lineno, "syntax error at '%s'" % t.value)
2482 else:
2483 error("unknown syntax error")
2484
2485 # END OF GRAMMAR RULES
2486
2487 def updateExportContext(self):
2488
2489 # create a continuation that allows us to grab the current parser
2490 def wrapInstObjParams(*args):
2491 return InstObjParams(self, *args)
2492 self.exportContext['InstObjParams'] = wrapInstObjParams
2493 self.exportContext.update(self.templateMap)
2494
2495 def defFormat(self, id, params, code, lineno):
2496 '''Define a new format'''
2497
2498 # make sure we haven't already defined this one
2499 if id in self.formatMap:
2500 error(lineno, 'format %s redefined.' % id)
2501
2502 # create new object and store in global map
2503 self.formatMap[id] = Format(id, params, code)
2504
2505 def protectNonSubstPercents(self, s):
2506 '''Protect any non-dict-substitution '%'s in a format string
2507 (i.e. those not followed by '(')'''
2508
2509 return re.sub(r'%(?!\()', '%%', s)
2510
2511 def buildOperandNameMap(self, user_dict, lineno):
2512 operand_name = {}
2513 for op_name, val in user_dict.iteritems():
2514
2515 # Check if extra attributes have been specified.
2516 if len(val) > 9:
2517 error(lineno, 'error: too many attributes for operand "%s"' %
2518 base_cls_name)
2519
2520 # Pad val with None in case optional args are missing
2521 val += (None, None, None, None)
2522 base_cls_name, dflt_ext, reg_spec, flags, sort_pri, \
2523 read_code, write_code, read_predicate, write_predicate = val[:9]
2524
2525 # Canonical flag structure is a triple of lists, where each list
2526 # indicates the set of flags implied by this operand always, when
2527 # used as a source, and when used as a dest, respectively.
2528 # For simplicity this can be initialized using a variety of fairly
2529 # obvious shortcuts; we convert these to canonical form here.
2530 if not flags:
2531 # no flags specified (e.g., 'None')
2532 flags = ( [], [], [] )
2533 elif isinstance(flags, str):
2534 # a single flag: assumed to be unconditional
2535 flags = ( [ flags ], [], [] )
2536 elif isinstance(flags, list):
2537 # a list of flags: also assumed to be unconditional
2538 flags = ( flags, [], [] )
2539 elif isinstance(flags, tuple):
2540 # it's a tuple: it should be a triple,
2541 # but each item could be a single string or a list
2542 (uncond_flags, src_flags, dest_flags) = flags
2543 flags = (makeList(uncond_flags),
2544 makeList(src_flags), makeList(dest_flags))
2545
2546 # Accumulate attributes of new operand class in tmp_dict
2547 tmp_dict = {}
2548 attrList = ['reg_spec', 'flags', 'sort_pri',
2549 'read_code', 'write_code',
2550 'read_predicate', 'write_predicate']
2551 if dflt_ext:
2552 dflt_ctype = self.operandTypeMap[dflt_ext]
2553 attrList.extend(['dflt_ctype', 'dflt_ext'])
2554 # reg_spec is either just a string or a dictionary
2555 # (for elems of vector)
2556 if isinstance(reg_spec, tuple):
2557 (reg_spec, elem_spec) = reg_spec
2558 if isinstance(elem_spec, str):
2559 attrList.append('elem_spec')
2560 else:
2561 assert(isinstance(elem_spec, dict))
2562 elems = elem_spec
2563 attrList.append('elems')
2564 for attr in attrList:
2565 tmp_dict[attr] = eval(attr)
2566 tmp_dict['base_name'] = op_name
2567
2568 # New class name will be e.g. "IntReg_Ra"
2569 cls_name = base_cls_name + '_' + op_name
2570 # Evaluate string arg to get class object. Note that the
2571 # actual base class for "IntReg" is "IntRegOperand", i.e. we
2572 # have to append "Operand".
2573 try:
2574 base_cls = eval(base_cls_name + 'Operand')
2575 except NameError:
2576 error(lineno,
2577 'error: unknown operand base class "%s"' % base_cls_name)
2578 # The following statement creates a new class called
2579 # <cls_name> as a subclass of <base_cls> with the attributes
2580 # in tmp_dict, just as if we evaluated a class declaration.
2581 operand_name[op_name] = type(cls_name, (base_cls,), tmp_dict)
2582
2583 self.operandNameMap = operand_name
2584
2585 # Define operand variables.
2586 operands = user_dict.keys()
2587 # Add the elems defined in the vector operands and
2588 # build a map elem -> vector (used in OperandList)
2589 elem_to_vec = {}
2590 for op in user_dict.keys():
2591 if hasattr(self.operandNameMap[op], 'elems'):
2592 for elem in self.operandNameMap[op].elems.keys():
2593 operands.append(elem)
2594 elem_to_vec[elem] = op
2595 self.elemToVector = elem_to_vec
2596 extensions = self.operandTypeMap.keys()
2597
2598 operandsREString = r'''
2599 (?<!\w) # neg. lookbehind assertion: prevent partial matches
2600 ((%s)(?:_(%s))?) # match: operand with optional '_' then suffix
2601 (?!\w) # neg. lookahead assertion: prevent partial matches
2602 ''' % (string.join(operands, '|'), string.join(extensions, '|'))
2603
2604 self.operandsRE = re.compile(operandsREString, re.MULTILINE|re.VERBOSE)
2605
2606 # Same as operandsREString, but extension is mandatory, and only two
2607 # groups are returned (base and ext, not full name as above).
2608 # Used for subtituting '_' for '.' to make C++ identifiers.
2609 operandsWithExtREString = r'(?<!\w)(%s)_(%s)(?!\w)' \
2610 % (string.join(operands, '|'), string.join(extensions, '|'))
2611
2612 self.operandsWithExtRE = \
2613 re.compile(operandsWithExtREString, re.MULTILINE)
2614
2615 def substMungedOpNames(self, code):
2616 '''Munge operand names in code string to make legal C++
2617 variable names. This means getting rid of the type extension
2618 if any. Will match base_name attribute of Operand object.)'''
2619 return self.operandsWithExtRE.sub(r'\1', code)
2620
2621 def mungeSnippet(self, s):
2622 '''Fix up code snippets for final substitution in templates.'''
2623 if isinstance(s, str):
2624 return self.substMungedOpNames(substBitOps(s))
2625 else:
2626 return s
2627
2628 def open(self, name, bare=False):
2629 '''Open the output file for writing and include scary warning.'''
2630 filename = os.path.join(self.output_dir, name)
2631 f = open(filename, 'w')
2632 if f:
2633 if not bare:
2634 f.write(ISAParser.scaremonger_template % self)
2635 return f
2636
2637 def update(self, file, contents):
2638 '''Update the output file only. Scons should handle the case when
2639 the new contents are unchanged using its built-in hash feature.'''
2640 f = self.open(file)
2641 f.write(contents)
2642 f.close()
2643
2644 # This regular expression matches '##include' directives
2645 includeRE = re.compile(r'^\s*##include\s+"(?P<filename>[^"]*)".*$',
2646 re.MULTILINE)
2647
2648 def replace_include(self, matchobj, dirname):
2649 """Function to replace a matched '##include' directive with the
2650 contents of the specified file (with nested ##includes
2651 replaced recursively). 'matchobj' is an re match object
2652 (from a match of includeRE) and 'dirname' is the directory
2653 relative to which the file path should be resolved."""
2654
2655 fname = matchobj.group('filename')
2656 full_fname = os.path.normpath(os.path.join(dirname, fname))
2657 contents = '##newfile "%s"\n%s\n##endfile\n' % \
2658 (full_fname, self.read_and_flatten(full_fname))
2659 return contents
2660
2661 def read_and_flatten(self, filename):
2662 """Read a file and recursively flatten nested '##include' files."""
2663
2664 current_dir = os.path.dirname(filename)
2665 try:
2666 contents = open(filename).read()
2667 except IOError:
2668 error('Error including file "%s"' % filename)
2669
2670 self.fileNameStack.push(LineTracker(filename))
2671
2672 # Find any includes and include them
2673 def replace(matchobj):
2674 return self.replace_include(matchobj, current_dir)
2675 contents = self.includeRE.sub(replace, contents)
2676
2677 self.fileNameStack.pop()
2678 return contents
2679
2680 AlreadyGenerated = {}
2681
2682 def _parse_isa_desc(self, isa_desc_file):
2683 '''Read in and parse the ISA description.'''
2684
2685 # The build system can end up running the ISA parser twice: once to
2686 # finalize the build dependencies, and then to actually generate
2687 # the files it expects (in src/arch/$ARCH/generated). This code
2688 # doesn't do anything different either time, however; the SCons
2689 # invocations just expect different things. Since this code runs
2690 # within SCons, we can just remember that we've already run and
2691 # not perform a completely unnecessary run, since the ISA parser's
2692 # effect is idempotent.
2693 if isa_desc_file in ISAParser.AlreadyGenerated:
2694 return
2695
2696 # grab the last three path components of isa_desc_file
2697 self.filename = '/'.join(isa_desc_file.split('/')[-3:])
2698
2699 # Read file and (recursively) all included files into a string.
2700 # PLY requires that the input be in a single string so we have to
2701 # do this up front.
2702 isa_desc = self.read_and_flatten(isa_desc_file)
2703
2704 # Initialize lineno tracker
2705 self.lex.lineno = LineTracker(isa_desc_file)
2706
2707 # Parse.
2708 self.parse_string(isa_desc)
2709
2710 ISAParser.AlreadyGenerated[isa_desc_file] = None
2711
2712 def parse_isa_desc(self, *args, **kwargs):
2713 try:
2714 self._parse_isa_desc(*args, **kwargs)
2715 except ISAParserError, e:
2716 print(backtrace(self.fileNameStack))
2717 print("At %s:" % e.lineno)
2718 print(e)
2719 sys.exit(1)
2720
2721 # Called as script: get args from command line.
2722 # Args are: <isa desc file> <output dir>
2723 if __name__ == '__main__':
2724 ISAParser(sys.argv[2]).parse_isa_desc(sys.argv[1])