misc: string.join has been removed in python3
[gem5.git] / src / arch / isa_parser.py
1 # Copyright (c) 2014, 2016, 2018-2019 ARM Limited
2 # All rights reserved
3 #
4 # The license below extends only to copyright in the software and shall
5 # not be construed as granting a license to any other intellectual
6 # property including but not limited to intellectual property relating
7 # to a hardware implementation of the functionality of the software
8 # licensed hereunder. You may use the software subject to the license
9 # terms below provided that you ensure that this notice is replicated
10 # unmodified and in its entirety in all distributions of the software,
11 # modified or unmodified, in source code or in binary form.
12 #
13 # Copyright (c) 2003-2005 The Regents of The University of Michigan
14 # Copyright (c) 2013,2015 Advanced Micro Devices, Inc.
15 # All rights reserved.
16 #
17 # Redistribution and use in source and binary forms, with or without
18 # modification, are permitted provided that the following conditions are
19 # met: redistributions of source code must retain the above copyright
20 # notice, this list of conditions and the following disclaimer;
21 # redistributions in binary form must reproduce the above copyright
22 # notice, this list of conditions and the following disclaimer in the
23 # documentation and/or other materials provided with the distribution;
24 # neither the name of the copyright holders nor the names of its
25 # contributors may be used to endorse or promote products derived from
26 # this software without specific prior written permission.
27 #
28 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
31 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
32 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
33 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
34 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
35 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
36 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
38 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39
40 from __future__ import with_statement, print_function
41 import os
42 import sys
43 import re
44 import inspect, traceback
45 # get type names
46 from types import *
47
48 from m5.util.grammar import Grammar
49
50 debug=False
51
52 ###################
53 # Utility functions
54
55 #
56 # Indent every line in string 's' by two spaces
57 # (except preprocessor directives).
58 # Used to make nested code blocks look pretty.
59 #
60 def indent(s):
61 return re.sub(r'(?m)^(?!#)', ' ', s)
62
63 #
64 # Munge a somewhat arbitrarily formatted piece of Python code
65 # (e.g. from a format 'let' block) into something whose indentation
66 # will get by the Python parser.
67 #
68 # The two keys here are that Python will give a syntax error if
69 # there's any whitespace at the beginning of the first line, and that
70 # all lines at the same lexical nesting level must have identical
71 # indentation. Unfortunately the way code literals work, an entire
72 # let block tends to have some initial indentation. Rather than
73 # trying to figure out what that is and strip it off, we prepend 'if
74 # 1:' to make the let code the nested block inside the if (and have
75 # the parser automatically deal with the indentation for us).
76 #
77 # We don't want to do this if (1) the code block is empty or (2) the
78 # first line of the block doesn't have any whitespace at the front.
79
80 def fixPythonIndentation(s):
81 # get rid of blank lines first
82 s = re.sub(r'(?m)^\s*\n', '', s);
83 if (s != '' and re.match(r'[ \t]', s[0])):
84 s = 'if 1:\n' + s
85 return s
86
87 class ISAParserError(Exception):
88 """Exception class for parser errors"""
89 def __init__(self, first, second=None):
90 if second is None:
91 self.lineno = 0
92 self.string = first
93 else:
94 self.lineno = first
95 self.string = second
96
97 def __str__(self):
98 return self.string
99
100 def error(*args):
101 raise ISAParserError(*args)
102
103 ####################
104 # Template objects.
105 #
106 # Template objects are format strings that allow substitution from
107 # the attribute spaces of other objects (e.g. InstObjParams instances).
108
109 labelRE = re.compile(r'(?<!%)%\(([^\)]+)\)[sd]')
110
111 class Template(object):
112 def __init__(self, parser, t):
113 self.parser = parser
114 self.template = t
115
116 def subst(self, d):
117 myDict = None
118
119 # Protect non-Python-dict substitutions (e.g. if there's a printf
120 # in the templated C++ code)
121 template = self.parser.protectNonSubstPercents(self.template)
122
123 # Build a dict ('myDict') to use for the template substitution.
124 # Start with the template namespace. Make a copy since we're
125 # going to modify it.
126 myDict = self.parser.templateMap.copy()
127
128 if isinstance(d, InstObjParams):
129 # If we're dealing with an InstObjParams object, we need
130 # to be a little more sophisticated. The instruction-wide
131 # parameters are already formed, but the parameters which
132 # are only function wide still need to be generated.
133 compositeCode = ''
134
135 myDict.update(d.__dict__)
136 # The "operands" and "snippets" attributes of the InstObjParams
137 # objects are for internal use and not substitution.
138 del myDict['operands']
139 del myDict['snippets']
140
141 snippetLabels = [l for l in labelRE.findall(template)
142 if l in d.snippets]
143
144 snippets = dict([(s, self.parser.mungeSnippet(d.snippets[s]))
145 for s in snippetLabels])
146
147 myDict.update(snippets)
148
149 compositeCode = ' '.join(map(str, snippets.values()))
150
151 # Add in template itself in case it references any
152 # operands explicitly (like Mem)
153 compositeCode += ' ' + template
154
155 operands = SubOperandList(self.parser, compositeCode, d.operands)
156
157 myDict['op_decl'] = operands.concatAttrStrings('op_decl')
158 if operands.readPC or operands.setPC:
159 myDict['op_decl'] += 'TheISA::PCState __parserAutoPCState;\n'
160
161 # In case there are predicated register reads and write, declare
162 # the variables for register indicies. It is being assumed that
163 # all the operands in the OperandList are also in the
164 # SubOperandList and in the same order. Otherwise, it is
165 # expected that predication would not be used for the operands.
166 if operands.predRead:
167 myDict['op_decl'] += 'uint8_t _sourceIndex = 0;\n'
168 if operands.predWrite:
169 myDict['op_decl'] += 'uint8_t M5_VAR_USED _destIndex = 0;\n'
170
171 is_src = lambda op: op.is_src
172 is_dest = lambda op: op.is_dest
173
174 myDict['op_src_decl'] = \
175 operands.concatSomeAttrStrings(is_src, 'op_src_decl')
176 myDict['op_dest_decl'] = \
177 operands.concatSomeAttrStrings(is_dest, 'op_dest_decl')
178 if operands.readPC:
179 myDict['op_src_decl'] += \
180 'TheISA::PCState __parserAutoPCState;\n'
181 if operands.setPC:
182 myDict['op_dest_decl'] += \
183 'TheISA::PCState __parserAutoPCState;\n'
184
185 myDict['op_rd'] = operands.concatAttrStrings('op_rd')
186 if operands.readPC:
187 myDict['op_rd'] = '__parserAutoPCState = xc->pcState();\n' + \
188 myDict['op_rd']
189
190 # Compose the op_wb string. If we're going to write back the
191 # PC state because we changed some of its elements, we'll need to
192 # do that as early as possible. That allows later uncoordinated
193 # modifications to the PC to layer appropriately.
194 reordered = list(operands.items)
195 reordered.reverse()
196 op_wb_str = ''
197 pcWbStr = 'xc->pcState(__parserAutoPCState);\n'
198 for op_desc in reordered:
199 if op_desc.isPCPart() and op_desc.is_dest:
200 op_wb_str = op_desc.op_wb + pcWbStr + op_wb_str
201 pcWbStr = ''
202 else:
203 op_wb_str = op_desc.op_wb + op_wb_str
204 myDict['op_wb'] = op_wb_str
205
206 elif isinstance(d, dict):
207 # if the argument is a dictionary, we just use it.
208 myDict.update(d)
209 elif hasattr(d, '__dict__'):
210 # if the argument is an object, we use its attribute map.
211 myDict.update(d.__dict__)
212 else:
213 raise TypeError, "Template.subst() arg must be or have dictionary"
214 return template % myDict
215
216 # Convert to string.
217 def __str__(self):
218 return self.template
219
220 ################
221 # Format object.
222 #
223 # A format object encapsulates an instruction format. It must provide
224 # a defineInst() method that generates the code for an instruction
225 # definition.
226
227 class Format(object):
228 def __init__(self, id, params, code):
229 self.id = id
230 self.params = params
231 label = 'def format ' + id
232 self.user_code = compile(fixPythonIndentation(code), label, 'exec')
233 param_list = ", ".join(params)
234 f = '''def defInst(_code, _context, %s):
235 my_locals = vars().copy()
236 exec _code in _context, my_locals
237 return my_locals\n''' % param_list
238 c = compile(f, label + ' wrapper', 'exec')
239 exec c
240 self.func = defInst
241
242 def defineInst(self, parser, name, args, lineno):
243 parser.updateExportContext()
244 context = parser.exportContext.copy()
245 if len(name):
246 Name = name[0].upper()
247 if len(name) > 1:
248 Name += name[1:]
249 context.update({ 'name' : name, 'Name' : Name })
250 try:
251 vars = self.func(self.user_code, context, *args[0], **args[1])
252 except Exception, exc:
253 if debug:
254 raise
255 error(lineno, 'error defining "%s": %s.' % (name, exc))
256 for k in vars.keys():
257 if k not in ('header_output', 'decoder_output',
258 'exec_output', 'decode_block'):
259 del vars[k]
260 return GenCode(parser, **vars)
261
262 # Special null format to catch an implicit-format instruction
263 # definition outside of any format block.
264 class NoFormat(object):
265 def __init__(self):
266 self.defaultInst = ''
267
268 def defineInst(self, parser, name, args, lineno):
269 error(lineno,
270 'instruction definition "%s" with no active format!' % name)
271
272 ###############
273 # GenCode class
274 #
275 # The GenCode class encapsulates generated code destined for various
276 # output files. The header_output and decoder_output attributes are
277 # strings containing code destined for decoder.hh and decoder.cc
278 # respectively. The decode_block attribute contains code to be
279 # incorporated in the decode function itself (that will also end up in
280 # decoder.cc). The exec_output attribute is the string of code for the
281 # exec.cc file. The has_decode_default attribute is used in the decode block
282 # to allow explicit default clauses to override default default clauses.
283
284 class GenCode(object):
285 # Constructor.
286 def __init__(self, parser,
287 header_output = '', decoder_output = '', exec_output = '',
288 decode_block = '', has_decode_default = False):
289 self.parser = parser
290 self.header_output = header_output
291 self.decoder_output = decoder_output
292 self.exec_output = exec_output
293 self.decode_block = decode_block
294 self.has_decode_default = has_decode_default
295
296 # Write these code chunks out to the filesystem. They will be properly
297 # interwoven by the write_top_level_files().
298 def emit(self):
299 if self.header_output:
300 self.parser.get_file('header').write(self.header_output)
301 if self.decoder_output:
302 self.parser.get_file('decoder').write(self.decoder_output)
303 if self.exec_output:
304 self.parser.get_file('exec').write(self.exec_output)
305 if self.decode_block:
306 self.parser.get_file('decode_block').write(self.decode_block)
307
308 # Override '+' operator: generate a new GenCode object that
309 # concatenates all the individual strings in the operands.
310 def __add__(self, other):
311 return GenCode(self.parser,
312 self.header_output + other.header_output,
313 self.decoder_output + other.decoder_output,
314 self.exec_output + other.exec_output,
315 self.decode_block + other.decode_block,
316 self.has_decode_default or other.has_decode_default)
317
318 # Prepend a string (typically a comment) to all the strings.
319 def prepend_all(self, pre):
320 self.header_output = pre + self.header_output
321 self.decoder_output = pre + self.decoder_output
322 self.decode_block = pre + self.decode_block
323 self.exec_output = pre + self.exec_output
324
325 # Wrap the decode block in a pair of strings (e.g., 'case foo:'
326 # and 'break;'). Used to build the big nested switch statement.
327 def wrap_decode_block(self, pre, post = ''):
328 self.decode_block = pre + indent(self.decode_block) + post
329
330 #####################################################################
331 #
332 # Bitfield Operator Support
333 #
334 #####################################################################
335
336 bitOp1ArgRE = re.compile(r'<\s*(\w+)\s*:\s*>')
337
338 bitOpWordRE = re.compile(r'(?<![\w\.])([\w\.]+)<\s*(\w+)\s*:\s*(\w+)\s*>')
339 bitOpExprRE = re.compile(r'\)<\s*(\w+)\s*:\s*(\w+)\s*>')
340
341 def substBitOps(code):
342 # first convert single-bit selectors to two-index form
343 # i.e., <n> --> <n:n>
344 code = bitOp1ArgRE.sub(r'<\1:\1>', code)
345 # simple case: selector applied to ID (name)
346 # i.e., foo<a:b> --> bits(foo, a, b)
347 code = bitOpWordRE.sub(r'bits(\1, \2, \3)', code)
348 # if selector is applied to expression (ending in ')'),
349 # we need to search backward for matching '('
350 match = bitOpExprRE.search(code)
351 while match:
352 exprEnd = match.start()
353 here = exprEnd - 1
354 nestLevel = 1
355 while nestLevel > 0:
356 if code[here] == '(':
357 nestLevel -= 1
358 elif code[here] == ')':
359 nestLevel += 1
360 here -= 1
361 if here < 0:
362 sys.exit("Didn't find '('!")
363 exprStart = here+1
364 newExpr = r'bits(%s, %s, %s)' % (code[exprStart:exprEnd+1],
365 match.group(1), match.group(2))
366 code = code[:exprStart] + newExpr + code[match.end():]
367 match = bitOpExprRE.search(code)
368 return code
369
370
371 #####################################################################
372 #
373 # Code Parser
374 #
375 # The remaining code is the support for automatically extracting
376 # instruction characteristics from pseudocode.
377 #
378 #####################################################################
379
380 # Force the argument to be a list. Useful for flags, where a caller
381 # can specify a singleton flag or a list of flags. Also usful for
382 # converting tuples to lists so they can be modified.
383 def makeList(arg):
384 if isinstance(arg, list):
385 return arg
386 elif isinstance(arg, tuple):
387 return list(arg)
388 elif not arg:
389 return []
390 else:
391 return [ arg ]
392
393 class Operand(object):
394 '''Base class for operand descriptors. An instance of this class
395 (or actually a class derived from this one) represents a specific
396 operand for a code block (e.g, "Rc.sq" as a dest). Intermediate
397 derived classes encapsulates the traits of a particular operand
398 type (e.g., "32-bit integer register").'''
399
400 def buildReadCode(self, func = None):
401 subst_dict = {"name": self.base_name,
402 "func": func,
403 "reg_idx": self.reg_spec,
404 "ctype": self.ctype}
405 if hasattr(self, 'src_reg_idx'):
406 subst_dict['op_idx'] = self.src_reg_idx
407 code = self.read_code % subst_dict
408 return '%s = %s;\n' % (self.base_name, code)
409
410 def buildWriteCode(self, func = None):
411 subst_dict = {"name": self.base_name,
412 "func": func,
413 "reg_idx": self.reg_spec,
414 "ctype": self.ctype,
415 "final_val": self.base_name}
416 if hasattr(self, 'dest_reg_idx'):
417 subst_dict['op_idx'] = self.dest_reg_idx
418 code = self.write_code % subst_dict
419 return '''
420 {
421 %s final_val = %s;
422 %s;
423 if (traceData) { traceData->setData(final_val); }
424 }''' % (self.dflt_ctype, self.base_name, code)
425
426 def __init__(self, parser, full_name, ext, is_src, is_dest):
427 self.full_name = full_name
428 self.ext = ext
429 self.is_src = is_src
430 self.is_dest = is_dest
431 # The 'effective extension' (eff_ext) is either the actual
432 # extension, if one was explicitly provided, or the default.
433 if ext:
434 self.eff_ext = ext
435 elif hasattr(self, 'dflt_ext'):
436 self.eff_ext = self.dflt_ext
437
438 if hasattr(self, 'eff_ext'):
439 self.ctype = parser.operandTypeMap[self.eff_ext]
440
441 # Finalize additional fields (primarily code fields). This step
442 # is done separately since some of these fields may depend on the
443 # register index enumeration that hasn't been performed yet at the
444 # time of __init__(). The register index enumeration is affected
445 # by predicated register reads/writes. Hence, we forward the flags
446 # that indicate whether or not predication is in use.
447 def finalize(self, predRead, predWrite):
448 self.flags = self.getFlags()
449 self.constructor = self.makeConstructor(predRead, predWrite)
450 self.op_decl = self.makeDecl()
451
452 if self.is_src:
453 self.op_rd = self.makeRead(predRead)
454 self.op_src_decl = self.makeDecl()
455 else:
456 self.op_rd = ''
457 self.op_src_decl = ''
458
459 if self.is_dest:
460 self.op_wb = self.makeWrite(predWrite)
461 self.op_dest_decl = self.makeDecl()
462 else:
463 self.op_wb = ''
464 self.op_dest_decl = ''
465
466 def isMem(self):
467 return 0
468
469 def isReg(self):
470 return 0
471
472 def isFloatReg(self):
473 return 0
474
475 def isIntReg(self):
476 return 0
477
478 def isCCReg(self):
479 return 0
480
481 def isControlReg(self):
482 return 0
483
484 def isVecReg(self):
485 return 0
486
487 def isVecElem(self):
488 return 0
489
490 def isVecPredReg(self):
491 return 0
492
493 def isPCState(self):
494 return 0
495
496 def isPCPart(self):
497 return self.isPCState() and self.reg_spec
498
499 def hasReadPred(self):
500 return self.read_predicate != None
501
502 def hasWritePred(self):
503 return self.write_predicate != None
504
505 def getFlags(self):
506 # note the empty slice '[:]' gives us a copy of self.flags[0]
507 # instead of a reference to it
508 my_flags = self.flags[0][:]
509 if self.is_src:
510 my_flags += self.flags[1]
511 if self.is_dest:
512 my_flags += self.flags[2]
513 return my_flags
514
515 def makeDecl(self):
516 # Note that initializations in the declarations are solely
517 # to avoid 'uninitialized variable' errors from the compiler.
518 return self.ctype + ' ' + self.base_name + ' = 0;\n';
519
520
521 src_reg_constructor = '\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s);'
522 dst_reg_constructor = '\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s);'
523
524
525 class IntRegOperand(Operand):
526 reg_class = 'IntRegClass'
527
528 def isReg(self):
529 return 1
530
531 def isIntReg(self):
532 return 1
533
534 def makeConstructor(self, predRead, predWrite):
535 c_src = ''
536 c_dest = ''
537
538 if self.is_src:
539 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
540 if self.hasReadPred():
541 c_src = '\n\tif (%s) {%s\n\t}' % \
542 (self.read_predicate, c_src)
543
544 if self.is_dest:
545 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
546 c_dest += '\n\t_numIntDestRegs++;'
547 if self.hasWritePred():
548 c_dest = '\n\tif (%s) {%s\n\t}' % \
549 (self.write_predicate, c_dest)
550
551 return c_src + c_dest
552
553 def makeRead(self, predRead):
554 if (self.ctype == 'float' or self.ctype == 'double'):
555 error('Attempt to read integer register as FP')
556 if self.read_code != None:
557 return self.buildReadCode('readIntRegOperand')
558
559 int_reg_val = ''
560 if predRead:
561 int_reg_val = 'xc->readIntRegOperand(this, _sourceIndex++)'
562 if self.hasReadPred():
563 int_reg_val = '(%s) ? %s : 0' % \
564 (self.read_predicate, int_reg_val)
565 else:
566 int_reg_val = 'xc->readIntRegOperand(this, %d)' % self.src_reg_idx
567
568 return '%s = %s;\n' % (self.base_name, int_reg_val)
569
570 def makeWrite(self, predWrite):
571 if (self.ctype == 'float' or self.ctype == 'double'):
572 error('Attempt to write integer register as FP')
573 if self.write_code != None:
574 return self.buildWriteCode('setIntRegOperand')
575
576 if predWrite:
577 wp = 'true'
578 if self.hasWritePred():
579 wp = self.write_predicate
580
581 wcond = 'if (%s)' % (wp)
582 windex = '_destIndex++'
583 else:
584 wcond = ''
585 windex = '%d' % self.dest_reg_idx
586
587 wb = '''
588 %s
589 {
590 %s final_val = %s;
591 xc->setIntRegOperand(this, %s, final_val);\n
592 if (traceData) { traceData->setData(final_val); }
593 }''' % (wcond, self.ctype, self.base_name, windex)
594
595 return wb
596
597 class FloatRegOperand(Operand):
598 reg_class = 'FloatRegClass'
599
600 def isReg(self):
601 return 1
602
603 def isFloatReg(self):
604 return 1
605
606 def makeConstructor(self, predRead, predWrite):
607 c_src = ''
608 c_dest = ''
609
610 if self.is_src:
611 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
612
613 if self.is_dest:
614 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
615 c_dest += '\n\t_numFPDestRegs++;'
616
617 return c_src + c_dest
618
619 def makeRead(self, predRead):
620 if self.read_code != None:
621 return self.buildReadCode('readFloatRegOperandBits')
622
623 if predRead:
624 rindex = '_sourceIndex++'
625 else:
626 rindex = '%d' % self.src_reg_idx
627
628 code = 'xc->readFloatRegOperandBits(this, %s)' % rindex
629 if self.ctype == 'float':
630 code = 'bitsToFloat32(%s)' % code
631 elif self.ctype == 'double':
632 code = 'bitsToFloat64(%s)' % code
633 return '%s = %s;\n' % (self.base_name, code)
634
635 def makeWrite(self, predWrite):
636 if self.write_code != None:
637 return self.buildWriteCode('setFloatRegOperandBits')
638
639 if predWrite:
640 wp = '_destIndex++'
641 else:
642 wp = '%d' % self.dest_reg_idx
643
644 val = 'final_val'
645 if self.ctype == 'float':
646 val = 'floatToBits32(%s)' % val
647 elif self.ctype == 'double':
648 val = 'floatToBits64(%s)' % val
649
650 wp = 'xc->setFloatRegOperandBits(this, %s, %s);' % (wp, val)
651
652 wb = '''
653 {
654 %s final_val = %s;
655 %s\n
656 if (traceData) { traceData->setData(final_val); }
657 }''' % (self.ctype, self.base_name, wp)
658 return wb
659
660 class VecRegOperand(Operand):
661 reg_class = 'VecRegClass'
662
663 def __init__(self, parser, full_name, ext, is_src, is_dest):
664 Operand.__init__(self, parser, full_name, ext, is_src, is_dest)
665 self.elemExt = None
666 self.parser = parser
667
668 def isReg(self):
669 return 1
670
671 def isVecReg(self):
672 return 1
673
674 def makeDeclElem(self, elem_op):
675 (elem_name, elem_ext) = elem_op
676 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
677 if elem_ext:
678 ext = elem_ext
679 else:
680 ext = dflt_elem_ext
681 ctype = self.parser.operandTypeMap[ext]
682 return '\n\t%s %s = 0;' % (ctype, elem_name)
683
684 def makeDecl(self):
685 if not self.is_dest and self.is_src:
686 c_decl = '\t/* Vars for %s*/' % (self.base_name)
687 if hasattr(self, 'active_elems'):
688 if self.active_elems:
689 for elem in self.active_elems:
690 c_decl += self.makeDeclElem(elem)
691 return c_decl + '\t/* End vars for %s */\n' % (self.base_name)
692 else:
693 return ''
694
695 def makeConstructor(self, predRead, predWrite):
696 c_src = ''
697 c_dest = ''
698
699 numAccessNeeded = 1
700
701 if self.is_src:
702 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
703
704 if self.is_dest:
705 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
706 c_dest += '\n\t_numVecDestRegs++;'
707
708 return c_src + c_dest
709
710 # Read destination register to write
711 def makeReadWElem(self, elem_op):
712 (elem_name, elem_ext) = elem_op
713 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
714 if elem_ext:
715 ext = elem_ext
716 else:
717 ext = dflt_elem_ext
718 ctype = self.parser.operandTypeMap[ext]
719 c_read = '\t\t%s& %s = %s[%s];\n' % \
720 (ctype, elem_name, self.base_name, elem_spec)
721 return c_read
722
723 def makeReadW(self, predWrite):
724 func = 'getWritableVecRegOperand'
725 if self.read_code != None:
726 return self.buildReadCode(func)
727
728 if predWrite:
729 rindex = '_destIndex++'
730 else:
731 rindex = '%d' % self.dest_reg_idx
732
733 c_readw = '\t\t%s& tmp_d%s = xc->%s(this, %s);\n'\
734 % ('TheISA::VecRegContainer', rindex, func, rindex)
735 if self.elemExt:
736 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name,
737 rindex, self.parser.operandTypeMap[self.elemExt])
738 if self.ext:
739 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name,
740 rindex, self.parser.operandTypeMap[self.ext])
741 if hasattr(self, 'active_elems'):
742 if self.active_elems:
743 for elem in self.active_elems:
744 c_readw += self.makeReadWElem(elem)
745 return c_readw
746
747 # Normal source operand read
748 def makeReadElem(self, elem_op, name):
749 (elem_name, elem_ext) = elem_op
750 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
751
752 if elem_ext:
753 ext = elem_ext
754 else:
755 ext = dflt_elem_ext
756 ctype = self.parser.operandTypeMap[ext]
757 c_read = '\t\t%s = %s[%s];\n' % \
758 (elem_name, name, elem_spec)
759 return c_read
760
761 def makeRead(self, predRead):
762 func = 'readVecRegOperand'
763 if self.read_code != None:
764 return self.buildReadCode(func)
765
766 if predRead:
767 rindex = '_sourceIndex++'
768 else:
769 rindex = '%d' % self.src_reg_idx
770
771 name = self.base_name
772 if self.is_dest and self.is_src:
773 name += '_merger'
774
775 c_read = '\t\t%s& tmp_s%s = xc->%s(this, %s);\n' \
776 % ('const TheISA::VecRegContainer', rindex, func, rindex)
777 # If the parser has detected that elements are being access, create
778 # the appropriate view
779 if self.elemExt:
780 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \
781 (name, rindex, self.parser.operandTypeMap[self.elemExt])
782 if self.ext:
783 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \
784 (name, rindex, self.parser.operandTypeMap[self.ext])
785 if hasattr(self, 'active_elems'):
786 if self.active_elems:
787 for elem in self.active_elems:
788 c_read += self.makeReadElem(elem, name)
789 return c_read
790
791 def makeWrite(self, predWrite):
792 func = 'setVecRegOperand'
793 if self.write_code != None:
794 return self.buildWriteCode(func)
795
796 wb = '''
797 if (traceData) {
798 traceData->setData(tmp_d%d);
799 }
800 ''' % self.dest_reg_idx
801 return wb
802
803 def finalize(self, predRead, predWrite):
804 super(VecRegOperand, self).finalize(predRead, predWrite)
805 if self.is_dest:
806 self.op_rd = self.makeReadW(predWrite) + self.op_rd
807
808 class VecElemOperand(Operand):
809 reg_class = 'VecElemClass'
810
811 def isReg(self):
812 return 1
813
814 def isVecElem(self):
815 return 1
816
817 def makeDecl(self):
818 if self.is_dest and not self.is_src:
819 return '\n\t%s %s;' % (self.ctype, self.base_name)
820 else:
821 return ''
822
823 def makeConstructor(self, predRead, predWrite):
824 c_src = ''
825 c_dest = ''
826
827 numAccessNeeded = 1
828
829 if self.is_src:
830 c_src = ('\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s, %s);' %
831 (self.reg_class, self.reg_spec, self.elem_spec))
832
833 if self.is_dest:
834 c_dest = ('\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s, %s);' %
835 (self.reg_class, self.reg_spec, self.elem_spec))
836 c_dest += '\n\t_numVecElemDestRegs++;'
837 return c_src + c_dest
838
839 def makeRead(self, predRead):
840 c_read = 'xc->readVecElemOperand(this, %d)' % self.src_reg_idx
841
842 if self.ctype == 'float':
843 c_read = 'bitsToFloat32(%s)' % c_read
844 elif self.ctype == 'double':
845 c_read = 'bitsToFloat64(%s)' % c_read
846
847 return '\n\t%s %s = %s;\n' % (self.ctype, self.base_name, c_read)
848
849 def makeWrite(self, predWrite):
850 if self.ctype == 'float':
851 c_write = 'floatToBits32(%s)' % self.base_name
852 elif self.ctype == 'double':
853 c_write = 'floatToBits64(%s)' % self.base_name
854 else:
855 c_write = self.base_name
856
857 c_write = ('\n\txc->setVecElemOperand(this, %d, %s);' %
858 (self.dest_reg_idx, c_write))
859
860 return c_write
861
862 class VecPredRegOperand(Operand):
863 reg_class = 'VecPredRegClass'
864
865 def __init__(self, parser, full_name, ext, is_src, is_dest):
866 Operand.__init__(self, parser, full_name, ext, is_src, is_dest)
867 self.parser = parser
868
869 def isReg(self):
870 return 1
871
872 def isVecPredReg(self):
873 return 1
874
875 def makeDecl(self):
876 return ''
877
878 def makeConstructor(self, predRead, predWrite):
879 c_src = ''
880 c_dest = ''
881
882 if self.is_src:
883 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
884
885 if self.is_dest:
886 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
887 c_dest += '\n\t_numVecPredDestRegs++;'
888
889 return c_src + c_dest
890
891 def makeRead(self, predRead):
892 func = 'readVecPredRegOperand'
893 if self.read_code != None:
894 return self.buildReadCode(func)
895
896 if predRead:
897 rindex = '_sourceIndex++'
898 else:
899 rindex = '%d' % self.src_reg_idx
900
901 c_read = '\t\t%s& tmp_s%s = xc->%s(this, %s);\n' % (
902 'const TheISA::VecPredRegContainer', rindex, func, rindex)
903 if self.ext:
904 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % (
905 self.base_name, rindex,
906 self.parser.operandTypeMap[self.ext])
907 return c_read
908
909 def makeReadW(self, predWrite):
910 func = 'getWritableVecPredRegOperand'
911 if self.read_code != None:
912 return self.buildReadCode(func)
913
914 if predWrite:
915 rindex = '_destIndex++'
916 else:
917 rindex = '%d' % self.dest_reg_idx
918
919 c_readw = '\t\t%s& tmp_d%s = xc->%s(this, %s);\n' % (
920 'TheISA::VecPredRegContainer', rindex, func, rindex)
921 if self.ext:
922 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (
923 self.base_name, rindex,
924 self.parser.operandTypeMap[self.ext])
925 return c_readw
926
927 def makeWrite(self, predWrite):
928 func = 'setVecPredRegOperand'
929 if self.write_code != None:
930 return self.buildWriteCode(func)
931
932 wb = '''
933 if (traceData) {
934 traceData->setData(tmp_d%d);
935 }
936 ''' % self.dest_reg_idx
937 return wb
938
939 def finalize(self, predRead, predWrite):
940 super(VecPredRegOperand, self).finalize(predRead, predWrite)
941 if self.is_dest:
942 self.op_rd = self.makeReadW(predWrite) + self.op_rd
943
944 class CCRegOperand(Operand):
945 reg_class = 'CCRegClass'
946
947 def isReg(self):
948 return 1
949
950 def isCCReg(self):
951 return 1
952
953 def makeConstructor(self, predRead, predWrite):
954 c_src = ''
955 c_dest = ''
956
957 if self.is_src:
958 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
959 if self.hasReadPred():
960 c_src = '\n\tif (%s) {%s\n\t}' % \
961 (self.read_predicate, c_src)
962
963 if self.is_dest:
964 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
965 c_dest += '\n\t_numCCDestRegs++;'
966 if self.hasWritePred():
967 c_dest = '\n\tif (%s) {%s\n\t}' % \
968 (self.write_predicate, c_dest)
969
970 return c_src + c_dest
971
972 def makeRead(self, predRead):
973 if (self.ctype == 'float' or self.ctype == 'double'):
974 error('Attempt to read condition-code register as FP')
975 if self.read_code != None:
976 return self.buildReadCode('readCCRegOperand')
977
978 int_reg_val = ''
979 if predRead:
980 int_reg_val = 'xc->readCCRegOperand(this, _sourceIndex++)'
981 if self.hasReadPred():
982 int_reg_val = '(%s) ? %s : 0' % \
983 (self.read_predicate, int_reg_val)
984 else:
985 int_reg_val = 'xc->readCCRegOperand(this, %d)' % self.src_reg_idx
986
987 return '%s = %s;\n' % (self.base_name, int_reg_val)
988
989 def makeWrite(self, predWrite):
990 if (self.ctype == 'float' or self.ctype == 'double'):
991 error('Attempt to write condition-code register as FP')
992 if self.write_code != None:
993 return self.buildWriteCode('setCCRegOperand')
994
995 if predWrite:
996 wp = 'true'
997 if self.hasWritePred():
998 wp = self.write_predicate
999
1000 wcond = 'if (%s)' % (wp)
1001 windex = '_destIndex++'
1002 else:
1003 wcond = ''
1004 windex = '%d' % self.dest_reg_idx
1005
1006 wb = '''
1007 %s
1008 {
1009 %s final_val = %s;
1010 xc->setCCRegOperand(this, %s, final_val);\n
1011 if (traceData) { traceData->setData(final_val); }
1012 }''' % (wcond, self.ctype, self.base_name, windex)
1013
1014 return wb
1015
1016 class ControlRegOperand(Operand):
1017 reg_class = 'MiscRegClass'
1018
1019 def isReg(self):
1020 return 1
1021
1022 def isControlReg(self):
1023 return 1
1024
1025 def makeConstructor(self, predRead, predWrite):
1026 c_src = ''
1027 c_dest = ''
1028
1029 if self.is_src:
1030 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
1031
1032 if self.is_dest:
1033 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
1034
1035 return c_src + c_dest
1036
1037 def makeRead(self, predRead):
1038 bit_select = 0
1039 if (self.ctype == 'float' or self.ctype == 'double'):
1040 error('Attempt to read control register as FP')
1041 if self.read_code != None:
1042 return self.buildReadCode('readMiscRegOperand')
1043
1044 if predRead:
1045 rindex = '_sourceIndex++'
1046 else:
1047 rindex = '%d' % self.src_reg_idx
1048
1049 return '%s = xc->readMiscRegOperand(this, %s);\n' % \
1050 (self.base_name, rindex)
1051
1052 def makeWrite(self, predWrite):
1053 if (self.ctype == 'float' or self.ctype == 'double'):
1054 error('Attempt to write control register as FP')
1055 if self.write_code != None:
1056 return self.buildWriteCode('setMiscRegOperand')
1057
1058 if predWrite:
1059 windex = '_destIndex++'
1060 else:
1061 windex = '%d' % self.dest_reg_idx
1062
1063 wb = 'xc->setMiscRegOperand(this, %s, %s);\n' % \
1064 (windex, self.base_name)
1065 wb += 'if (traceData) { traceData->setData(%s); }' % \
1066 self.base_name
1067
1068 return wb
1069
1070 class MemOperand(Operand):
1071 def isMem(self):
1072 return 1
1073
1074 def makeConstructor(self, predRead, predWrite):
1075 return ''
1076
1077 def makeDecl(self):
1078 # Declare memory data variable.
1079 return '%s %s;\n' % (self.ctype, self.base_name)
1080
1081 def makeRead(self, predRead):
1082 if self.read_code != None:
1083 return self.buildReadCode()
1084 return ''
1085
1086 def makeWrite(self, predWrite):
1087 if self.write_code != None:
1088 return self.buildWriteCode()
1089 return ''
1090
1091 class PCStateOperand(Operand):
1092 def makeConstructor(self, predRead, predWrite):
1093 return ''
1094
1095 def makeRead(self, predRead):
1096 if self.reg_spec:
1097 # A component of the PC state.
1098 return '%s = __parserAutoPCState.%s();\n' % \
1099 (self.base_name, self.reg_spec)
1100 else:
1101 # The whole PC state itself.
1102 return '%s = xc->pcState();\n' % self.base_name
1103
1104 def makeWrite(self, predWrite):
1105 if self.reg_spec:
1106 # A component of the PC state.
1107 return '__parserAutoPCState.%s(%s);\n' % \
1108 (self.reg_spec, self.base_name)
1109 else:
1110 # The whole PC state itself.
1111 return 'xc->pcState(%s);\n' % self.base_name
1112
1113 def makeDecl(self):
1114 ctype = 'TheISA::PCState'
1115 if self.isPCPart():
1116 ctype = self.ctype
1117 # Note that initializations in the declarations are solely
1118 # to avoid 'uninitialized variable' errors from the compiler.
1119 return '%s %s = 0;\n' % (ctype, self.base_name)
1120
1121 def isPCState(self):
1122 return 1
1123
1124 class OperandList(object):
1125 '''Find all the operands in the given code block. Returns an operand
1126 descriptor list (instance of class OperandList).'''
1127 def __init__(self, parser, code):
1128 self.items = []
1129 self.bases = {}
1130 # delete strings and comments so we don't match on operands inside
1131 for regEx in (stringRE, commentRE):
1132 code = regEx.sub('', code)
1133 # search for operands
1134 next_pos = 0
1135 while 1:
1136 match = parser.operandsRE.search(code, next_pos)
1137 if not match:
1138 # no more matches: we're done
1139 break
1140 op = match.groups()
1141 # regexp groups are operand full name, base, and extension
1142 (op_full, op_base, op_ext) = op
1143 # If is a elem operand, define or update the corresponding
1144 # vector operand
1145 isElem = False
1146 if op_base in parser.elemToVector:
1147 isElem = True
1148 elem_op = (op_base, op_ext)
1149 op_base = parser.elemToVector[op_base]
1150 op_ext = '' # use the default one
1151 # if the token following the operand is an assignment, this is
1152 # a destination (LHS), else it's a source (RHS)
1153 is_dest = (assignRE.match(code, match.end()) != None)
1154 is_src = not is_dest
1155
1156 # see if we've already seen this one
1157 op_desc = self.find_base(op_base)
1158 if op_desc:
1159 if op_ext and op_ext != '' and op_desc.ext != op_ext:
1160 error ('Inconsistent extensions for operand %s: %s - %s' \
1161 % (op_base, op_desc.ext, op_ext))
1162 op_desc.is_src = op_desc.is_src or is_src
1163 op_desc.is_dest = op_desc.is_dest or is_dest
1164 if isElem:
1165 (elem_base, elem_ext) = elem_op
1166 found = False
1167 for ae in op_desc.active_elems:
1168 (ae_base, ae_ext) = ae
1169 if ae_base == elem_base:
1170 if ae_ext != elem_ext:
1171 error('Inconsistent extensions for elem'
1172 ' operand %s' % elem_base)
1173 else:
1174 found = True
1175 if not found:
1176 op_desc.active_elems.append(elem_op)
1177 else:
1178 # new operand: create new descriptor
1179 op_desc = parser.operandNameMap[op_base](parser,
1180 op_full, op_ext, is_src, is_dest)
1181 # if operand is a vector elem, add the corresponding vector
1182 # operand if not already done
1183 if isElem:
1184 op_desc.elemExt = elem_op[1]
1185 op_desc.active_elems = [elem_op]
1186 self.append(op_desc)
1187 # start next search after end of current match
1188 next_pos = match.end()
1189 self.sort()
1190 # enumerate source & dest register operands... used in building
1191 # constructor later
1192 self.numSrcRegs = 0
1193 self.numDestRegs = 0
1194 self.numFPDestRegs = 0
1195 self.numIntDestRegs = 0
1196 self.numVecDestRegs = 0
1197 self.numVecPredDestRegs = 0
1198 self.numCCDestRegs = 0
1199 self.numMiscDestRegs = 0
1200 self.memOperand = None
1201
1202 # Flags to keep track if one or more operands are to be read/written
1203 # conditionally.
1204 self.predRead = False
1205 self.predWrite = False
1206
1207 for op_desc in self.items:
1208 if op_desc.isReg():
1209 if op_desc.is_src:
1210 op_desc.src_reg_idx = self.numSrcRegs
1211 self.numSrcRegs += 1
1212 if op_desc.is_dest:
1213 op_desc.dest_reg_idx = self.numDestRegs
1214 self.numDestRegs += 1
1215 if op_desc.isFloatReg():
1216 self.numFPDestRegs += 1
1217 elif op_desc.isIntReg():
1218 self.numIntDestRegs += 1
1219 elif op_desc.isVecReg():
1220 self.numVecDestRegs += 1
1221 elif op_desc.isVecPredReg():
1222 self.numVecPredDestRegs += 1
1223 elif op_desc.isCCReg():
1224 self.numCCDestRegs += 1
1225 elif op_desc.isControlReg():
1226 self.numMiscDestRegs += 1
1227 elif op_desc.isMem():
1228 if self.memOperand:
1229 error("Code block has more than one memory operand.")
1230 self.memOperand = op_desc
1231
1232 # Check if this operand has read/write predication. If true, then
1233 # the microop will dynamically index source/dest registers.
1234 self.predRead = self.predRead or op_desc.hasReadPred()
1235 self.predWrite = self.predWrite or op_desc.hasWritePred()
1236
1237 if parser.maxInstSrcRegs < self.numSrcRegs:
1238 parser.maxInstSrcRegs = self.numSrcRegs
1239 if parser.maxInstDestRegs < self.numDestRegs:
1240 parser.maxInstDestRegs = self.numDestRegs
1241 if parser.maxMiscDestRegs < self.numMiscDestRegs:
1242 parser.maxMiscDestRegs = self.numMiscDestRegs
1243
1244 # now make a final pass to finalize op_desc fields that may depend
1245 # on the register enumeration
1246 for op_desc in self.items:
1247 op_desc.finalize(self.predRead, self.predWrite)
1248
1249 def __len__(self):
1250 return len(self.items)
1251
1252 def __getitem__(self, index):
1253 return self.items[index]
1254
1255 def append(self, op_desc):
1256 self.items.append(op_desc)
1257 self.bases[op_desc.base_name] = op_desc
1258
1259 def find_base(self, base_name):
1260 # like self.bases[base_name], but returns None if not found
1261 # (rather than raising exception)
1262 return self.bases.get(base_name)
1263
1264 # internal helper function for concat[Some]Attr{Strings|Lists}
1265 def __internalConcatAttrs(self, attr_name, filter, result):
1266 for op_desc in self.items:
1267 if filter(op_desc):
1268 result += getattr(op_desc, attr_name)
1269 return result
1270
1271 # return a single string that is the concatenation of the (string)
1272 # values of the specified attribute for all operands
1273 def concatAttrStrings(self, attr_name):
1274 return self.__internalConcatAttrs(attr_name, lambda x: 1, '')
1275
1276 # like concatAttrStrings, but only include the values for the operands
1277 # for which the provided filter function returns true
1278 def concatSomeAttrStrings(self, filter, attr_name):
1279 return self.__internalConcatAttrs(attr_name, filter, '')
1280
1281 # return a single list that is the concatenation of the (list)
1282 # values of the specified attribute for all operands
1283 def concatAttrLists(self, attr_name):
1284 return self.__internalConcatAttrs(attr_name, lambda x: 1, [])
1285
1286 # like concatAttrLists, but only include the values for the operands
1287 # for which the provided filter function returns true
1288 def concatSomeAttrLists(self, filter, attr_name):
1289 return self.__internalConcatAttrs(attr_name, filter, [])
1290
1291 def sort(self):
1292 self.items.sort(lambda a, b: a.sort_pri - b.sort_pri)
1293
1294 class SubOperandList(OperandList):
1295 '''Find all the operands in the given code block. Returns an operand
1296 descriptor list (instance of class OperandList).'''
1297 def __init__(self, parser, code, master_list):
1298 self.items = []
1299 self.bases = {}
1300 # delete strings and comments so we don't match on operands inside
1301 for regEx in (stringRE, commentRE):
1302 code = regEx.sub('', code)
1303 # search for operands
1304 next_pos = 0
1305 while 1:
1306 match = parser.operandsRE.search(code, next_pos)
1307 if not match:
1308 # no more matches: we're done
1309 break
1310 op = match.groups()
1311 # regexp groups are operand full name, base, and extension
1312 (op_full, op_base, op_ext) = op
1313 # If is a elem operand, define or update the corresponding
1314 # vector operand
1315 if op_base in parser.elemToVector:
1316 elem_op = op_base
1317 op_base = parser.elemToVector[elem_op]
1318 # find this op in the master list
1319 op_desc = master_list.find_base(op_base)
1320 if not op_desc:
1321 error('Found operand %s which is not in the master list!'
1322 % op_base)
1323 else:
1324 # See if we've already found this operand
1325 op_desc = self.find_base(op_base)
1326 if not op_desc:
1327 # if not, add a reference to it to this sub list
1328 self.append(master_list.bases[op_base])
1329
1330 # start next search after end of current match
1331 next_pos = match.end()
1332 self.sort()
1333 self.memOperand = None
1334 # Whether the whole PC needs to be read so parts of it can be accessed
1335 self.readPC = False
1336 # Whether the whole PC needs to be written after parts of it were
1337 # changed
1338 self.setPC = False
1339 # Whether this instruction manipulates the whole PC or parts of it.
1340 # Mixing the two is a bad idea and flagged as an error.
1341 self.pcPart = None
1342
1343 # Flags to keep track if one or more operands are to be read/written
1344 # conditionally.
1345 self.predRead = False
1346 self.predWrite = False
1347
1348 for op_desc in self.items:
1349 if op_desc.isPCPart():
1350 self.readPC = True
1351 if op_desc.is_dest:
1352 self.setPC = True
1353
1354 if op_desc.isPCState():
1355 if self.pcPart is not None:
1356 if self.pcPart and not op_desc.isPCPart() or \
1357 not self.pcPart and op_desc.isPCPart():
1358 error("Mixed whole and partial PC state operands.")
1359 self.pcPart = op_desc.isPCPart()
1360
1361 if op_desc.isMem():
1362 if self.memOperand:
1363 error("Code block has more than one memory operand.")
1364 self.memOperand = op_desc
1365
1366 # Check if this operand has read/write predication. If true, then
1367 # the microop will dynamically index source/dest registers.
1368 self.predRead = self.predRead or op_desc.hasReadPred()
1369 self.predWrite = self.predWrite or op_desc.hasWritePred()
1370
1371 # Regular expression object to match C++ strings
1372 stringRE = re.compile(r'"([^"\\]|\\.)*"')
1373
1374 # Regular expression object to match C++ comments
1375 # (used in findOperands())
1376 commentRE = re.compile(r'(^)?[^\S\n]*/(?:\*(.*?)\*/[^\S\n]*|/[^\n]*)($)?',
1377 re.DOTALL | re.MULTILINE)
1378
1379 # Regular expression object to match assignment statements (used in
1380 # findOperands()). If the code immediately following the first
1381 # appearance of the operand matches this regex, then the operand
1382 # appears to be on the LHS of an assignment, and is thus a
1383 # destination. basically we're looking for an '=' that's not '=='.
1384 # The heinous tangle before that handles the case where the operand
1385 # has an array subscript.
1386 assignRE = re.compile(r'(\[[^\]]+\])?\s*=(?!=)', re.MULTILINE)
1387
1388 def makeFlagConstructor(flag_list):
1389 if len(flag_list) == 0:
1390 return ''
1391 # filter out repeated flags
1392 flag_list.sort()
1393 i = 1
1394 while i < len(flag_list):
1395 if flag_list[i] == flag_list[i-1]:
1396 del flag_list[i]
1397 else:
1398 i += 1
1399 pre = '\n\tflags['
1400 post = '] = true;'
1401 code = pre + (post + pre).join(flag_list) + post
1402 return code
1403
1404 # Assume all instruction flags are of the form 'IsFoo'
1405 instFlagRE = re.compile(r'Is.*')
1406
1407 # OpClass constants end in 'Op' except No_OpClass
1408 opClassRE = re.compile(r'.*Op|No_OpClass')
1409
1410 class InstObjParams(object):
1411 def __init__(self, parser, mnem, class_name, base_class = '',
1412 snippets = {}, opt_args = []):
1413 self.mnemonic = mnem
1414 self.class_name = class_name
1415 self.base_class = base_class
1416 if not isinstance(snippets, dict):
1417 snippets = {'code' : snippets}
1418 compositeCode = ' '.join(map(str, snippets.values()))
1419 self.snippets = snippets
1420
1421 self.operands = OperandList(parser, compositeCode)
1422
1423 # The header of the constructor declares the variables to be used
1424 # in the body of the constructor.
1425 header = ''
1426 header += '\n\t_numSrcRegs = 0;'
1427 header += '\n\t_numDestRegs = 0;'
1428 header += '\n\t_numFPDestRegs = 0;'
1429 header += '\n\t_numVecDestRegs = 0;'
1430 header += '\n\t_numVecElemDestRegs = 0;'
1431 header += '\n\t_numVecPredDestRegs = 0;'
1432 header += '\n\t_numIntDestRegs = 0;'
1433 header += '\n\t_numCCDestRegs = 0;'
1434
1435 self.constructor = header + \
1436 self.operands.concatAttrStrings('constructor')
1437
1438 self.flags = self.operands.concatAttrLists('flags')
1439
1440 self.op_class = None
1441
1442 # Optional arguments are assumed to be either StaticInst flags
1443 # or an OpClass value. To avoid having to import a complete
1444 # list of these values to match against, we do it ad-hoc
1445 # with regexps.
1446 for oa in opt_args:
1447 if instFlagRE.match(oa):
1448 self.flags.append(oa)
1449 elif opClassRE.match(oa):
1450 self.op_class = oa
1451 else:
1452 error('InstObjParams: optional arg "%s" not recognized '
1453 'as StaticInst::Flag or OpClass.' % oa)
1454
1455 # Make a basic guess on the operand class if not set.
1456 # These are good enough for most cases.
1457 if not self.op_class:
1458 if 'IsStore' in self.flags:
1459 # The order matters here: 'IsFloating' and 'IsInteger' are
1460 # usually set in FP instructions because of the base
1461 # register
1462 if 'IsFloating' in self.flags:
1463 self.op_class = 'FloatMemWriteOp'
1464 else:
1465 self.op_class = 'MemWriteOp'
1466 elif 'IsLoad' in self.flags or 'IsPrefetch' in self.flags:
1467 # The order matters here: 'IsFloating' and 'IsInteger' are
1468 # usually set in FP instructions because of the base
1469 # register
1470 if 'IsFloating' in self.flags:
1471 self.op_class = 'FloatMemReadOp'
1472 else:
1473 self.op_class = 'MemReadOp'
1474 elif 'IsFloating' in self.flags:
1475 self.op_class = 'FloatAddOp'
1476 elif 'IsVector' in self.flags:
1477 self.op_class = 'SimdAddOp'
1478 else:
1479 self.op_class = 'IntAluOp'
1480
1481 # add flag initialization to contructor here to include
1482 # any flags added via opt_args
1483 self.constructor += makeFlagConstructor(self.flags)
1484
1485 # if 'IsFloating' is set, add call to the FP enable check
1486 # function (which should be provided by isa_desc via a declare)
1487 # if 'IsVector' is set, add call to the Vector enable check
1488 # function (which should be provided by isa_desc via a declare)
1489 if 'IsFloating' in self.flags:
1490 self.fp_enable_check = 'fault = checkFpEnableFault(xc);'
1491 elif 'IsVector' in self.flags:
1492 self.fp_enable_check = 'fault = checkVecEnableFault(xc);'
1493 else:
1494 self.fp_enable_check = ''
1495
1496 ##############
1497 # Stack: a simple stack object. Used for both formats (formatStack)
1498 # and default cases (defaultStack). Simply wraps a list to give more
1499 # stack-like syntax and enable initialization with an argument list
1500 # (as opposed to an argument that's a list).
1501
1502 class Stack(list):
1503 def __init__(self, *items):
1504 list.__init__(self, items)
1505
1506 def push(self, item):
1507 self.append(item);
1508
1509 def top(self):
1510 return self[-1]
1511
1512 # Format a file include stack backtrace as a string
1513 def backtrace(filename_stack):
1514 fmt = "In file included from %s:"
1515 return "\n".join([fmt % f for f in filename_stack])
1516
1517
1518 #######################
1519 #
1520 # LineTracker: track filenames along with line numbers in PLY lineno fields
1521 # PLY explicitly doesn't do anything with 'lineno' except propagate
1522 # it. This class lets us tie filenames with the line numbers with a
1523 # minimum of disruption to existing increment code.
1524 #
1525
1526 class LineTracker(object):
1527 def __init__(self, filename, lineno=1):
1528 self.filename = filename
1529 self.lineno = lineno
1530
1531 # Overload '+=' for increments. We need to create a new object on
1532 # each update else every token ends up referencing the same
1533 # constantly incrementing instance.
1534 def __iadd__(self, incr):
1535 return LineTracker(self.filename, self.lineno + incr)
1536
1537 def __str__(self):
1538 return "%s:%d" % (self.filename, self.lineno)
1539
1540 # In case there are places where someone really expects a number
1541 def __int__(self):
1542 return self.lineno
1543
1544
1545 #######################
1546 #
1547 # ISA Parser
1548 # parses ISA DSL and emits C++ headers and source
1549 #
1550
1551 class ISAParser(Grammar):
1552 def __init__(self, output_dir):
1553 super(ISAParser, self).__init__()
1554 self.output_dir = output_dir
1555
1556 self.filename = None # for output file watermarking/scaremongering
1557
1558 # variable to hold templates
1559 self.templateMap = {}
1560
1561 # This dictionary maps format name strings to Format objects.
1562 self.formatMap = {}
1563
1564 # Track open files and, if applicable, how many chunks it has been
1565 # split into so far.
1566 self.files = {}
1567 self.splits = {}
1568
1569 # isa_name / namespace identifier from namespace declaration.
1570 # before the namespace declaration, None.
1571 self.isa_name = None
1572 self.namespace = None
1573
1574 # The format stack.
1575 self.formatStack = Stack(NoFormat())
1576
1577 # The default case stack.
1578 self.defaultStack = Stack(None)
1579
1580 # Stack that tracks current file and line number. Each
1581 # element is a tuple (filename, lineno) that records the
1582 # *current* filename and the line number in the *previous*
1583 # file where it was included.
1584 self.fileNameStack = Stack()
1585
1586 symbols = ('makeList', 're')
1587 self.exportContext = dict([(s, eval(s)) for s in symbols])
1588
1589 self.maxInstSrcRegs = 0
1590 self.maxInstDestRegs = 0
1591 self.maxMiscDestRegs = 0
1592
1593 def __getitem__(self, i): # Allow object (self) to be
1594 return getattr(self, i) # passed to %-substitutions
1595
1596 # Change the file suffix of a base filename:
1597 # (e.g.) decoder.cc -> decoder-g.cc.inc for 'global' outputs
1598 def suffixize(self, s, sec):
1599 extn = re.compile('(\.[^\.]+)$') # isolate extension
1600 if self.namespace:
1601 return extn.sub(r'-ns\1.inc', s) # insert some text on either side
1602 else:
1603 return extn.sub(r'-g\1.inc', s)
1604
1605 # Get the file object for emitting code into the specified section
1606 # (header, decoder, exec, decode_block).
1607 def get_file(self, section):
1608 if section == 'decode_block':
1609 filename = 'decode-method.cc.inc'
1610 else:
1611 if section == 'header':
1612 file = 'decoder.hh'
1613 else:
1614 file = '%s.cc' % section
1615 filename = self.suffixize(file, section)
1616 try:
1617 return self.files[filename]
1618 except KeyError: pass
1619
1620 f = self.open(filename)
1621 self.files[filename] = f
1622
1623 # The splittable files are the ones with many independent
1624 # per-instruction functions - the decoder's instruction constructors
1625 # and the instruction execution (execute()) methods. These both have
1626 # the suffix -ns.cc.inc, meaning they are within the namespace part
1627 # of the ISA, contain object-emitting C++ source, and are included
1628 # into other top-level files. These are the files that need special
1629 # #define's to allow parts of them to be compiled separately. Rather
1630 # than splitting the emissions into separate files, the monolithic
1631 # output of the ISA parser is maintained, but the value (or lack
1632 # thereof) of the __SPLIT definition during C preprocessing will
1633 # select the different chunks. If no 'split' directives are used,
1634 # the cpp emissions have no effect.
1635 if re.search('-ns.cc.inc$', filename):
1636 print('#if !defined(__SPLIT) || (__SPLIT == 1)', file=f)
1637 self.splits[f] = 1
1638 # ensure requisite #include's
1639 elif filename == 'decoder-g.hh.inc':
1640 print('#include "base/bitfield.hh"', file=f)
1641
1642 return f
1643
1644 # Weave together the parts of the different output sections by
1645 # #include'ing them into some very short top-level .cc/.hh files.
1646 # These small files make it much clearer how this tool works, since
1647 # you directly see the chunks emitted as files that are #include'd.
1648 def write_top_level_files(self):
1649 # decoder header - everything depends on this
1650 file = 'decoder.hh'
1651 with self.open(file) as f:
1652 f.write('#ifndef __ARCH_%(isa)s_GENERATED_DECODER_HH__\n'
1653 '#define __ARCH_%(isa)s_GENERATED_DECODER_HH__\n\n' %
1654 {'isa': self.isa_name.upper()})
1655 fn = 'decoder-g.hh.inc'
1656 assert(fn in self.files)
1657 f.write('#include "%s"\n' % fn)
1658
1659 fn = 'decoder-ns.hh.inc'
1660 assert(fn in self.files)
1661 f.write('namespace %s {\n#include "%s"\n}\n'
1662 % (self.namespace, fn))
1663 f.write('\n#endif // __ARCH_%s_GENERATED_DECODER_HH__\n' %
1664 self.isa_name.upper())
1665
1666 # decoder method - cannot be split
1667 file = 'decoder.cc'
1668 with self.open(file) as f:
1669 fn = 'base/compiler.hh'
1670 f.write('#include "%s"\n' % fn)
1671
1672 fn = 'decoder-g.cc.inc'
1673 assert(fn in self.files)
1674 f.write('#include "%s"\n' % fn)
1675
1676 fn = 'decoder.hh'
1677 f.write('#include "%s"\n' % fn)
1678
1679 fn = 'decode-method.cc.inc'
1680 # is guaranteed to have been written for parse to complete
1681 f.write('#include "%s"\n' % fn)
1682
1683 extn = re.compile('(\.[^\.]+)$')
1684
1685 # instruction constructors
1686 splits = self.splits[self.get_file('decoder')]
1687 file_ = 'inst-constrs.cc'
1688 for i in range(1, splits+1):
1689 if splits > 1:
1690 file = extn.sub(r'-%d\1' % i, file_)
1691 else:
1692 file = file_
1693 with self.open(file) as f:
1694 fn = 'decoder-g.cc.inc'
1695 assert(fn in self.files)
1696 f.write('#include "%s"\n' % fn)
1697
1698 fn = 'decoder.hh'
1699 f.write('#include "%s"\n' % fn)
1700
1701 fn = 'decoder-ns.cc.inc'
1702 assert(fn in self.files)
1703 print('namespace %s {' % self.namespace, file=f)
1704 if splits > 1:
1705 print('#define __SPLIT %u' % i, file=f)
1706 print('#include "%s"' % fn, file=f)
1707 print('}', file=f)
1708
1709 # instruction execution
1710 splits = self.splits[self.get_file('exec')]
1711 for i in range(1, splits+1):
1712 file = 'generic_cpu_exec.cc'
1713 if splits > 1:
1714 file = extn.sub(r'_%d\1' % i, file)
1715 with self.open(file) as f:
1716 fn = 'exec-g.cc.inc'
1717 assert(fn in self.files)
1718 f.write('#include "%s"\n' % fn)
1719 f.write('#include "cpu/exec_context.hh"\n')
1720 f.write('#include "decoder.hh"\n')
1721
1722 fn = 'exec-ns.cc.inc'
1723 assert(fn in self.files)
1724 print('namespace %s {' % self.namespace, file=f)
1725 if splits > 1:
1726 print('#define __SPLIT %u' % i, file=f)
1727 print('#include "%s"' % fn, file=f)
1728 print('}', file=f)
1729
1730 # max_inst_regs.hh
1731 self.update('max_inst_regs.hh',
1732 '''namespace %(namespace)s {
1733 const int MaxInstSrcRegs = %(maxInstSrcRegs)d;
1734 const int MaxInstDestRegs = %(maxInstDestRegs)d;
1735 const int MaxMiscDestRegs = %(maxMiscDestRegs)d;\n}\n''' % self)
1736
1737 scaremonger_template ='''// DO NOT EDIT
1738 // This file was automatically generated from an ISA description:
1739 // %(filename)s
1740
1741 ''';
1742
1743 #####################################################################
1744 #
1745 # Lexer
1746 #
1747 # The PLY lexer module takes two things as input:
1748 # - A list of token names (the string list 'tokens')
1749 # - A regular expression describing a match for each token. The
1750 # regexp for token FOO can be provided in two ways:
1751 # - as a string variable named t_FOO
1752 # - as the doc string for a function named t_FOO. In this case,
1753 # the function is also executed, allowing an action to be
1754 # associated with each token match.
1755 #
1756 #####################################################################
1757
1758 # Reserved words. These are listed separately as they are matched
1759 # using the same regexp as generic IDs, but distinguished in the
1760 # t_ID() function. The PLY documentation suggests this approach.
1761 reserved = (
1762 'BITFIELD', 'DECODE', 'DECODER', 'DEFAULT', 'DEF', 'EXEC', 'FORMAT',
1763 'HEADER', 'LET', 'NAMESPACE', 'OPERAND_TYPES', 'OPERANDS',
1764 'OUTPUT', 'SIGNED', 'SPLIT', 'TEMPLATE'
1765 )
1766
1767 # List of tokens. The lex module requires this.
1768 tokens = reserved + (
1769 # identifier
1770 'ID',
1771
1772 # integer literal
1773 'INTLIT',
1774
1775 # string literal
1776 'STRLIT',
1777
1778 # code literal
1779 'CODELIT',
1780
1781 # ( ) [ ] { } < > , ; . : :: *
1782 'LPAREN', 'RPAREN',
1783 'LBRACKET', 'RBRACKET',
1784 'LBRACE', 'RBRACE',
1785 'LESS', 'GREATER', 'EQUALS',
1786 'COMMA', 'SEMI', 'DOT', 'COLON', 'DBLCOLON',
1787 'ASTERISK',
1788
1789 # C preprocessor directives
1790 'CPPDIRECTIVE'
1791
1792 # The following are matched but never returned. commented out to
1793 # suppress PLY warning
1794 # newfile directive
1795 # 'NEWFILE',
1796
1797 # endfile directive
1798 # 'ENDFILE'
1799 )
1800
1801 # Regular expressions for token matching
1802 t_LPAREN = r'\('
1803 t_RPAREN = r'\)'
1804 t_LBRACKET = r'\['
1805 t_RBRACKET = r'\]'
1806 t_LBRACE = r'\{'
1807 t_RBRACE = r'\}'
1808 t_LESS = r'\<'
1809 t_GREATER = r'\>'
1810 t_EQUALS = r'='
1811 t_COMMA = r','
1812 t_SEMI = r';'
1813 t_DOT = r'\.'
1814 t_COLON = r':'
1815 t_DBLCOLON = r'::'
1816 t_ASTERISK = r'\*'
1817
1818 # Identifiers and reserved words
1819 reserved_map = { }
1820 for r in reserved:
1821 reserved_map[r.lower()] = r
1822
1823 def t_ID(self, t):
1824 r'[A-Za-z_]\w*'
1825 t.type = self.reserved_map.get(t.value, 'ID')
1826 return t
1827
1828 # Integer literal
1829 def t_INTLIT(self, t):
1830 r'-?(0x[\da-fA-F]+)|\d+'
1831 try:
1832 t.value = int(t.value,0)
1833 except ValueError:
1834 error(t.lexer.lineno, 'Integer value "%s" too large' % t.value)
1835 t.value = 0
1836 return t
1837
1838 # String literal. Note that these use only single quotes, and
1839 # can span multiple lines.
1840 def t_STRLIT(self, t):
1841 r"(?m)'([^'])+'"
1842 # strip off quotes
1843 t.value = t.value[1:-1]
1844 t.lexer.lineno += t.value.count('\n')
1845 return t
1846
1847
1848 # "Code literal"... like a string literal, but delimiters are
1849 # '{{' and '}}' so they get formatted nicely under emacs c-mode
1850 def t_CODELIT(self, t):
1851 r"(?m)\{\{([^\}]|}(?!\}))+\}\}"
1852 # strip off {{ & }}
1853 t.value = t.value[2:-2]
1854 t.lexer.lineno += t.value.count('\n')
1855 return t
1856
1857 def t_CPPDIRECTIVE(self, t):
1858 r'^\#[^\#].*\n'
1859 t.lexer.lineno += t.value.count('\n')
1860 return t
1861
1862 def t_NEWFILE(self, t):
1863 r'^\#\#newfile\s+"[^"]*"\n'
1864 self.fileNameStack.push(t.lexer.lineno)
1865 t.lexer.lineno = LineTracker(t.value[11:-2])
1866
1867 def t_ENDFILE(self, t):
1868 r'^\#\#endfile\n'
1869 t.lexer.lineno = self.fileNameStack.pop()
1870
1871 #
1872 # The functions t_NEWLINE, t_ignore, and t_error are
1873 # special for the lex module.
1874 #
1875
1876 # Newlines
1877 def t_NEWLINE(self, t):
1878 r'\n+'
1879 t.lexer.lineno += t.value.count('\n')
1880
1881 # Comments
1882 def t_comment(self, t):
1883 r'//.*'
1884
1885 # Completely ignored characters
1886 t_ignore = ' \t\x0c'
1887
1888 # Error handler
1889 def t_error(self, t):
1890 error(t.lexer.lineno, "illegal character '%s'" % t.value[0])
1891 t.skip(1)
1892
1893 #####################################################################
1894 #
1895 # Parser
1896 #
1897 # Every function whose name starts with 'p_' defines a grammar
1898 # rule. The rule is encoded in the function's doc string, while
1899 # the function body provides the action taken when the rule is
1900 # matched. The argument to each function is a list of the values
1901 # of the rule's symbols: t[0] for the LHS, and t[1..n] for the
1902 # symbols on the RHS. For tokens, the value is copied from the
1903 # t.value attribute provided by the lexer. For non-terminals, the
1904 # value is assigned by the producing rule; i.e., the job of the
1905 # grammar rule function is to set the value for the non-terminal
1906 # on the LHS (by assigning to t[0]).
1907 #####################################################################
1908
1909 # The LHS of the first grammar rule is used as the start symbol
1910 # (in this case, 'specification'). Note that this rule enforces
1911 # that there will be exactly one namespace declaration, with 0 or
1912 # more global defs/decls before and after it. The defs & decls
1913 # before the namespace decl will be outside the namespace; those
1914 # after will be inside. The decoder function is always inside the
1915 # namespace.
1916 def p_specification(self, t):
1917 'specification : opt_defs_and_outputs top_level_decode_block'
1918
1919 for f in self.splits.iterkeys():
1920 f.write('\n#endif\n')
1921
1922 for f in self.files.itervalues(): # close ALL the files;
1923 f.close() # not doing so can cause compilation to fail
1924
1925 self.write_top_level_files()
1926
1927 t[0] = True
1928
1929 # 'opt_defs_and_outputs' is a possibly empty sequence of def and/or
1930 # output statements. Its productions do the hard work of eventually
1931 # instantiating a GenCode, which are generally emitted (written to disk)
1932 # as soon as possible, except for the decode_block, which has to be
1933 # accumulated into one large function of nested switch/case blocks.
1934 def p_opt_defs_and_outputs_0(self, t):
1935 'opt_defs_and_outputs : empty'
1936
1937 def p_opt_defs_and_outputs_1(self, t):
1938 'opt_defs_and_outputs : defs_and_outputs'
1939
1940 def p_defs_and_outputs_0(self, t):
1941 'defs_and_outputs : def_or_output'
1942
1943 def p_defs_and_outputs_1(self, t):
1944 'defs_and_outputs : defs_and_outputs def_or_output'
1945
1946 # The list of possible definition/output statements.
1947 # They are all processed as they are seen.
1948 def p_def_or_output(self, t):
1949 '''def_or_output : name_decl
1950 | def_format
1951 | def_bitfield
1952 | def_bitfield_struct
1953 | def_template
1954 | def_operand_types
1955 | def_operands
1956 | output
1957 | global_let
1958 | split'''
1959
1960 # Utility function used by both invocations of splitting - explicit
1961 # 'split' keyword and split() function inside "let {{ }};" blocks.
1962 def split(self, sec, write=False):
1963 assert(sec != 'header' and "header cannot be split")
1964
1965 f = self.get_file(sec)
1966 self.splits[f] += 1
1967 s = '\n#endif\n#if __SPLIT == %u\n' % self.splits[f]
1968 if write:
1969 f.write(s)
1970 else:
1971 return s
1972
1973 # split output file to reduce compilation time
1974 def p_split(self, t):
1975 'split : SPLIT output_type SEMI'
1976 assert(self.isa_name and "'split' not allowed before namespace decl")
1977
1978 self.split(t[2], True)
1979
1980 def p_output_type(self, t):
1981 '''output_type : DECODER
1982 | HEADER
1983 | EXEC'''
1984 t[0] = t[1]
1985
1986 # ISA name declaration looks like "namespace <foo>;"
1987 def p_name_decl(self, t):
1988 'name_decl : NAMESPACE ID SEMI'
1989 assert(self.isa_name == None and "Only 1 namespace decl permitted")
1990 self.isa_name = t[2]
1991 self.namespace = t[2] + 'Inst'
1992
1993 # Output blocks 'output <foo> {{...}}' (C++ code blocks) are copied
1994 # directly to the appropriate output section.
1995
1996 # Massage output block by substituting in template definitions and
1997 # bit operators. We handle '%'s embedded in the string that don't
1998 # indicate template substitutions by doubling them first so that the
1999 # format operation will reduce them back to single '%'s.
2000 def process_output(self, s):
2001 s = self.protectNonSubstPercents(s)
2002 return substBitOps(s % self.templateMap)
2003
2004 def p_output(self, t):
2005 'output : OUTPUT output_type CODELIT SEMI'
2006 kwargs = { t[2]+'_output' : self.process_output(t[3]) }
2007 GenCode(self, **kwargs).emit()
2008
2009 # global let blocks 'let {{...}}' (Python code blocks) are
2010 # executed directly when seen. Note that these execute in a
2011 # special variable context 'exportContext' to prevent the code
2012 # from polluting this script's namespace.
2013 def p_global_let(self, t):
2014 'global_let : LET CODELIT SEMI'
2015 def _split(sec):
2016 return self.split(sec)
2017 self.updateExportContext()
2018 self.exportContext["header_output"] = ''
2019 self.exportContext["decoder_output"] = ''
2020 self.exportContext["exec_output"] = ''
2021 self.exportContext["decode_block"] = ''
2022 self.exportContext["split"] = _split
2023 split_setup = '''
2024 def wrap(func):
2025 def split(sec):
2026 globals()[sec + '_output'] += func(sec)
2027 return split
2028 split = wrap(split)
2029 del wrap
2030 '''
2031 # This tricky setup (immediately above) allows us to just write
2032 # (e.g.) "split('exec')" in the Python code and the split #ifdef's
2033 # will automatically be added to the exec_output variable. The inner
2034 # Python execution environment doesn't know about the split points,
2035 # so we carefully inject and wrap a closure that can retrieve the
2036 # next split's #define from the parser and add it to the current
2037 # emission-in-progress.
2038 try:
2039 exec split_setup+fixPythonIndentation(t[2]) in self.exportContext
2040 except Exception, exc:
2041 traceback.print_exc(file=sys.stdout)
2042 if debug:
2043 raise
2044 error(t.lineno(1), 'In global let block: %s' % exc)
2045 GenCode(self,
2046 header_output=self.exportContext["header_output"],
2047 decoder_output=self.exportContext["decoder_output"],
2048 exec_output=self.exportContext["exec_output"],
2049 decode_block=self.exportContext["decode_block"]).emit()
2050
2051 # Define the mapping from operand type extensions to C++ types and
2052 # bit widths (stored in operandTypeMap).
2053 def p_def_operand_types(self, t):
2054 'def_operand_types : DEF OPERAND_TYPES CODELIT SEMI'
2055 try:
2056 self.operandTypeMap = eval('{' + t[3] + '}')
2057 except Exception, exc:
2058 if debug:
2059 raise
2060 error(t.lineno(1),
2061 'In def operand_types: %s' % exc)
2062
2063 # Define the mapping from operand names to operand classes and
2064 # other traits. Stored in operandNameMap.
2065 def p_def_operands(self, t):
2066 'def_operands : DEF OPERANDS CODELIT SEMI'
2067 if not hasattr(self, 'operandTypeMap'):
2068 error(t.lineno(1),
2069 'error: operand types must be defined before operands')
2070 try:
2071 user_dict = eval('{' + t[3] + '}', self.exportContext)
2072 except Exception, exc:
2073 if debug:
2074 raise
2075 error(t.lineno(1), 'In def operands: %s' % exc)
2076 self.buildOperandNameMap(user_dict, t.lexer.lineno)
2077
2078 # A bitfield definition looks like:
2079 # 'def [signed] bitfield <ID> [<first>:<last>]'
2080 # This generates a preprocessor macro in the output file.
2081 def p_def_bitfield_0(self, t):
2082 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT COLON INTLIT GREATER SEMI'
2083 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[8])
2084 if (t[2] == 'signed'):
2085 expr = 'sext<%d>(%s)' % (t[6] - t[8] + 1, expr)
2086 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2087 GenCode(self, header_output=hash_define).emit()
2088
2089 # alternate form for single bit: 'def [signed] bitfield <ID> [<bit>]'
2090 def p_def_bitfield_1(self, t):
2091 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT GREATER SEMI'
2092 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[6])
2093 if (t[2] == 'signed'):
2094 expr = 'sext<%d>(%s)' % (1, expr)
2095 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2096 GenCode(self, header_output=hash_define).emit()
2097
2098 # alternate form for structure member: 'def bitfield <ID> <ID>'
2099 def p_def_bitfield_struct(self, t):
2100 'def_bitfield_struct : DEF opt_signed BITFIELD ID id_with_dot SEMI'
2101 if (t[2] != ''):
2102 error(t.lineno(1),
2103 'error: structure bitfields are always unsigned.')
2104 expr = 'machInst.%s' % t[5]
2105 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2106 GenCode(self, header_output=hash_define).emit()
2107
2108 def p_id_with_dot_0(self, t):
2109 'id_with_dot : ID'
2110 t[0] = t[1]
2111
2112 def p_id_with_dot_1(self, t):
2113 'id_with_dot : ID DOT id_with_dot'
2114 t[0] = t[1] + t[2] + t[3]
2115
2116 def p_opt_signed_0(self, t):
2117 'opt_signed : SIGNED'
2118 t[0] = t[1]
2119
2120 def p_opt_signed_1(self, t):
2121 'opt_signed : empty'
2122 t[0] = ''
2123
2124 def p_def_template(self, t):
2125 'def_template : DEF TEMPLATE ID CODELIT SEMI'
2126 if t[3] in self.templateMap:
2127 print("warning: template %s already defined" % t[3])
2128 self.templateMap[t[3]] = Template(self, t[4])
2129
2130 # An instruction format definition looks like
2131 # "def format <fmt>(<params>) {{...}};"
2132 def p_def_format(self, t):
2133 'def_format : DEF FORMAT ID LPAREN param_list RPAREN CODELIT SEMI'
2134 (id, params, code) = (t[3], t[5], t[7])
2135 self.defFormat(id, params, code, t.lexer.lineno)
2136
2137 # The formal parameter list for an instruction format is a
2138 # possibly empty list of comma-separated parameters. Positional
2139 # (standard, non-keyword) parameters must come first, followed by
2140 # keyword parameters, followed by a '*foo' parameter that gets
2141 # excess positional arguments (as in Python). Each of these three
2142 # parameter categories is optional.
2143 #
2144 # Note that we do not support the '**foo' parameter for collecting
2145 # otherwise undefined keyword args. Otherwise the parameter list
2146 # is (I believe) identical to what is supported in Python.
2147 #
2148 # The param list generates a tuple, where the first element is a
2149 # list of the positional params and the second element is a dict
2150 # containing the keyword params.
2151 def p_param_list_0(self, t):
2152 'param_list : positional_param_list COMMA nonpositional_param_list'
2153 t[0] = t[1] + t[3]
2154
2155 def p_param_list_1(self, t):
2156 '''param_list : positional_param_list
2157 | nonpositional_param_list'''
2158 t[0] = t[1]
2159
2160 def p_positional_param_list_0(self, t):
2161 'positional_param_list : empty'
2162 t[0] = []
2163
2164 def p_positional_param_list_1(self, t):
2165 'positional_param_list : ID'
2166 t[0] = [t[1]]
2167
2168 def p_positional_param_list_2(self, t):
2169 'positional_param_list : positional_param_list COMMA ID'
2170 t[0] = t[1] + [t[3]]
2171
2172 def p_nonpositional_param_list_0(self, t):
2173 'nonpositional_param_list : keyword_param_list COMMA excess_args_param'
2174 t[0] = t[1] + t[3]
2175
2176 def p_nonpositional_param_list_1(self, t):
2177 '''nonpositional_param_list : keyword_param_list
2178 | excess_args_param'''
2179 t[0] = t[1]
2180
2181 def p_keyword_param_list_0(self, t):
2182 'keyword_param_list : keyword_param'
2183 t[0] = [t[1]]
2184
2185 def p_keyword_param_list_1(self, t):
2186 'keyword_param_list : keyword_param_list COMMA keyword_param'
2187 t[0] = t[1] + [t[3]]
2188
2189 def p_keyword_param(self, t):
2190 'keyword_param : ID EQUALS expr'
2191 t[0] = t[1] + ' = ' + t[3].__repr__()
2192
2193 def p_excess_args_param(self, t):
2194 'excess_args_param : ASTERISK ID'
2195 # Just concatenate them: '*ID'. Wrap in list to be consistent
2196 # with positional_param_list and keyword_param_list.
2197 t[0] = [t[1] + t[2]]
2198
2199 # End of format definition-related rules.
2200 ##############
2201
2202 #
2203 # A decode block looks like:
2204 # decode <field1> [, <field2>]* [default <inst>] { ... }
2205 #
2206 def p_top_level_decode_block(self, t):
2207 'top_level_decode_block : decode_block'
2208 codeObj = t[1]
2209 codeObj.wrap_decode_block('''
2210 StaticInstPtr
2211 %(isa_name)s::Decoder::decodeInst(%(isa_name)s::ExtMachInst machInst)
2212 {
2213 using namespace %(namespace)s;
2214 ''' % self, '}')
2215
2216 codeObj.emit()
2217
2218 def p_decode_block(self, t):
2219 'decode_block : DECODE ID opt_default LBRACE decode_stmt_list RBRACE'
2220 default_defaults = self.defaultStack.pop()
2221 codeObj = t[5]
2222 # use the "default defaults" only if there was no explicit
2223 # default statement in decode_stmt_list
2224 if not codeObj.has_decode_default:
2225 codeObj += default_defaults
2226 codeObj.wrap_decode_block('switch (%s) {\n' % t[2], '}\n')
2227 t[0] = codeObj
2228
2229 # The opt_default statement serves only to push the "default
2230 # defaults" onto defaultStack. This value will be used by nested
2231 # decode blocks, and used and popped off when the current
2232 # decode_block is processed (in p_decode_block() above).
2233 def p_opt_default_0(self, t):
2234 'opt_default : empty'
2235 # no default specified: reuse the one currently at the top of
2236 # the stack
2237 self.defaultStack.push(self.defaultStack.top())
2238 # no meaningful value returned
2239 t[0] = None
2240
2241 def p_opt_default_1(self, t):
2242 'opt_default : DEFAULT inst'
2243 # push the new default
2244 codeObj = t[2]
2245 codeObj.wrap_decode_block('\ndefault:\n', 'break;\n')
2246 self.defaultStack.push(codeObj)
2247 # no meaningful value returned
2248 t[0] = None
2249
2250 def p_decode_stmt_list_0(self, t):
2251 'decode_stmt_list : decode_stmt'
2252 t[0] = t[1]
2253
2254 def p_decode_stmt_list_1(self, t):
2255 'decode_stmt_list : decode_stmt decode_stmt_list'
2256 if (t[1].has_decode_default and t[2].has_decode_default):
2257 error(t.lineno(1), 'Two default cases in decode block')
2258 t[0] = t[1] + t[2]
2259
2260 #
2261 # Decode statement rules
2262 #
2263 # There are four types of statements allowed in a decode block:
2264 # 1. Format blocks 'format <foo> { ... }'
2265 # 2. Nested decode blocks
2266 # 3. Instruction definitions.
2267 # 4. C preprocessor directives.
2268
2269
2270 # Preprocessor directives found in a decode statement list are
2271 # passed through to the output, replicated to all of the output
2272 # code streams. This works well for ifdefs, so we can ifdef out
2273 # both the declarations and the decode cases generated by an
2274 # instruction definition. Handling them as part of the grammar
2275 # makes it easy to keep them in the right place with respect to
2276 # the code generated by the other statements.
2277 def p_decode_stmt_cpp(self, t):
2278 'decode_stmt : CPPDIRECTIVE'
2279 t[0] = GenCode(self, t[1], t[1], t[1], t[1])
2280
2281 # A format block 'format <foo> { ... }' sets the default
2282 # instruction format used to handle instruction definitions inside
2283 # the block. This format can be overridden by using an explicit
2284 # format on the instruction definition or with a nested format
2285 # block.
2286 def p_decode_stmt_format(self, t):
2287 'decode_stmt : FORMAT push_format_id LBRACE decode_stmt_list RBRACE'
2288 # The format will be pushed on the stack when 'push_format_id'
2289 # is processed (see below). Once the parser has recognized
2290 # the full production (though the right brace), we're done
2291 # with the format, so now we can pop it.
2292 self.formatStack.pop()
2293 t[0] = t[4]
2294
2295 # This rule exists so we can set the current format (& push the
2296 # stack) when we recognize the format name part of the format
2297 # block.
2298 def p_push_format_id(self, t):
2299 'push_format_id : ID'
2300 try:
2301 self.formatStack.push(self.formatMap[t[1]])
2302 t[0] = ('', '// format %s' % t[1])
2303 except KeyError:
2304 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1])
2305
2306 # Nested decode block: if the value of the current field matches
2307 # the specified constant(s), do a nested decode on some other field.
2308 def p_decode_stmt_decode(self, t):
2309 'decode_stmt : case_list COLON decode_block'
2310 case_list = t[1]
2311 codeObj = t[3]
2312 # just wrap the decoding code from the block as a case in the
2313 # outer switch statement.
2314 codeObj.wrap_decode_block('\n%s\n' % ''.join(case_list),
2315 'M5_UNREACHABLE;\n')
2316 codeObj.has_decode_default = (case_list == ['default:'])
2317 t[0] = codeObj
2318
2319 # Instruction definition (finally!).
2320 def p_decode_stmt_inst(self, t):
2321 'decode_stmt : case_list COLON inst SEMI'
2322 case_list = t[1]
2323 codeObj = t[3]
2324 codeObj.wrap_decode_block('\n%s' % ''.join(case_list), 'break;\n')
2325 codeObj.has_decode_default = (case_list == ['default:'])
2326 t[0] = codeObj
2327
2328 # The constant list for a decode case label must be non-empty, and must
2329 # either be the keyword 'default', or made up of one or more
2330 # comma-separated integer literals or strings which evaluate to
2331 # constants when compiled as C++.
2332 def p_case_list_0(self, t):
2333 'case_list : DEFAULT'
2334 t[0] = ['default:']
2335
2336 def prep_int_lit_case_label(self, lit):
2337 if lit >= 2**32:
2338 return 'case ULL(%#x): ' % lit
2339 else:
2340 return 'case %#x: ' % lit
2341
2342 def prep_str_lit_case_label(self, lit):
2343 return 'case %s: ' % lit
2344
2345 def p_case_list_1(self, t):
2346 'case_list : INTLIT'
2347 t[0] = [self.prep_int_lit_case_label(t[1])]
2348
2349 def p_case_list_2(self, t):
2350 'case_list : STRLIT'
2351 t[0] = [self.prep_str_lit_case_label(t[1])]
2352
2353 def p_case_list_3(self, t):
2354 'case_list : case_list COMMA INTLIT'
2355 t[0] = t[1]
2356 t[0].append(self.prep_int_lit_case_label(t[3]))
2357
2358 def p_case_list_4(self, t):
2359 'case_list : case_list COMMA STRLIT'
2360 t[0] = t[1]
2361 t[0].append(self.prep_str_lit_case_label(t[3]))
2362
2363 # Define an instruction using the current instruction format
2364 # (specified by an enclosing format block).
2365 # "<mnemonic>(<args>)"
2366 def p_inst_0(self, t):
2367 'inst : ID LPAREN arg_list RPAREN'
2368 # Pass the ID and arg list to the current format class to deal with.
2369 currentFormat = self.formatStack.top()
2370 codeObj = currentFormat.defineInst(self, t[1], t[3], t.lexer.lineno)
2371 args = ','.join(map(str, t[3]))
2372 args = re.sub('(?m)^', '//', args)
2373 args = re.sub('^//', '', args)
2374 comment = '\n// %s::%s(%s)\n' % (currentFormat.id, t[1], args)
2375 codeObj.prepend_all(comment)
2376 t[0] = codeObj
2377
2378 # Define an instruction using an explicitly specified format:
2379 # "<fmt>::<mnemonic>(<args>)"
2380 def p_inst_1(self, t):
2381 'inst : ID DBLCOLON ID LPAREN arg_list RPAREN'
2382 try:
2383 format = self.formatMap[t[1]]
2384 except KeyError:
2385 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1])
2386
2387 codeObj = format.defineInst(self, t[3], t[5], t.lexer.lineno)
2388 comment = '\n// %s::%s(%s)\n' % (t[1], t[3], t[5])
2389 codeObj.prepend_all(comment)
2390 t[0] = codeObj
2391
2392 # The arg list generates a tuple, where the first element is a
2393 # list of the positional args and the second element is a dict
2394 # containing the keyword args.
2395 def p_arg_list_0(self, t):
2396 'arg_list : positional_arg_list COMMA keyword_arg_list'
2397 t[0] = ( t[1], t[3] )
2398
2399 def p_arg_list_1(self, t):
2400 'arg_list : positional_arg_list'
2401 t[0] = ( t[1], {} )
2402
2403 def p_arg_list_2(self, t):
2404 'arg_list : keyword_arg_list'
2405 t[0] = ( [], t[1] )
2406
2407 def p_positional_arg_list_0(self, t):
2408 'positional_arg_list : empty'
2409 t[0] = []
2410
2411 def p_positional_arg_list_1(self, t):
2412 'positional_arg_list : expr'
2413 t[0] = [t[1]]
2414
2415 def p_positional_arg_list_2(self, t):
2416 'positional_arg_list : positional_arg_list COMMA expr'
2417 t[0] = t[1] + [t[3]]
2418
2419 def p_keyword_arg_list_0(self, t):
2420 'keyword_arg_list : keyword_arg'
2421 t[0] = t[1]
2422
2423 def p_keyword_arg_list_1(self, t):
2424 'keyword_arg_list : keyword_arg_list COMMA keyword_arg'
2425 t[0] = t[1]
2426 t[0].update(t[3])
2427
2428 def p_keyword_arg(self, t):
2429 'keyword_arg : ID EQUALS expr'
2430 t[0] = { t[1] : t[3] }
2431
2432 #
2433 # Basic expressions. These constitute the argument values of
2434 # "function calls" (i.e. instruction definitions in the decode
2435 # block) and default values for formal parameters of format
2436 # functions.
2437 #
2438 # Right now, these are either strings, integers, or (recursively)
2439 # lists of exprs (using Python square-bracket list syntax). Note
2440 # that bare identifiers are trated as string constants here (since
2441 # there isn't really a variable namespace to refer to).
2442 #
2443 def p_expr_0(self, t):
2444 '''expr : ID
2445 | INTLIT
2446 | STRLIT
2447 | CODELIT'''
2448 t[0] = t[1]
2449
2450 def p_expr_1(self, t):
2451 '''expr : LBRACKET list_expr RBRACKET'''
2452 t[0] = t[2]
2453
2454 def p_list_expr_0(self, t):
2455 'list_expr : expr'
2456 t[0] = [t[1]]
2457
2458 def p_list_expr_1(self, t):
2459 'list_expr : list_expr COMMA expr'
2460 t[0] = t[1] + [t[3]]
2461
2462 def p_list_expr_2(self, t):
2463 'list_expr : empty'
2464 t[0] = []
2465
2466 #
2467 # Empty production... use in other rules for readability.
2468 #
2469 def p_empty(self, t):
2470 'empty :'
2471 pass
2472
2473 # Parse error handler. Note that the argument here is the
2474 # offending *token*, not a grammar symbol (hence the need to use
2475 # t.value)
2476 def p_error(self, t):
2477 if t:
2478 error(t.lexer.lineno, "syntax error at '%s'" % t.value)
2479 else:
2480 error("unknown syntax error")
2481
2482 # END OF GRAMMAR RULES
2483
2484 def updateExportContext(self):
2485
2486 # create a continuation that allows us to grab the current parser
2487 def wrapInstObjParams(*args):
2488 return InstObjParams(self, *args)
2489 self.exportContext['InstObjParams'] = wrapInstObjParams
2490 self.exportContext.update(self.templateMap)
2491
2492 def defFormat(self, id, params, code, lineno):
2493 '''Define a new format'''
2494
2495 # make sure we haven't already defined this one
2496 if id in self.formatMap:
2497 error(lineno, 'format %s redefined.' % id)
2498
2499 # create new object and store in global map
2500 self.formatMap[id] = Format(id, params, code)
2501
2502 def protectNonSubstPercents(self, s):
2503 '''Protect any non-dict-substitution '%'s in a format string
2504 (i.e. those not followed by '(')'''
2505
2506 return re.sub(r'%(?!\()', '%%', s)
2507
2508 def buildOperandNameMap(self, user_dict, lineno):
2509 operand_name = {}
2510 for op_name, val in user_dict.iteritems():
2511
2512 # Check if extra attributes have been specified.
2513 if len(val) > 9:
2514 error(lineno, 'error: too many attributes for operand "%s"' %
2515 base_cls_name)
2516
2517 # Pad val with None in case optional args are missing
2518 val += (None, None, None, None)
2519 base_cls_name, dflt_ext, reg_spec, flags, sort_pri, \
2520 read_code, write_code, read_predicate, write_predicate = val[:9]
2521
2522 # Canonical flag structure is a triple of lists, where each list
2523 # indicates the set of flags implied by this operand always, when
2524 # used as a source, and when used as a dest, respectively.
2525 # For simplicity this can be initialized using a variety of fairly
2526 # obvious shortcuts; we convert these to canonical form here.
2527 if not flags:
2528 # no flags specified (e.g., 'None')
2529 flags = ( [], [], [] )
2530 elif isinstance(flags, str):
2531 # a single flag: assumed to be unconditional
2532 flags = ( [ flags ], [], [] )
2533 elif isinstance(flags, list):
2534 # a list of flags: also assumed to be unconditional
2535 flags = ( flags, [], [] )
2536 elif isinstance(flags, tuple):
2537 # it's a tuple: it should be a triple,
2538 # but each item could be a single string or a list
2539 (uncond_flags, src_flags, dest_flags) = flags
2540 flags = (makeList(uncond_flags),
2541 makeList(src_flags), makeList(dest_flags))
2542
2543 # Accumulate attributes of new operand class in tmp_dict
2544 tmp_dict = {}
2545 attrList = ['reg_spec', 'flags', 'sort_pri',
2546 'read_code', 'write_code',
2547 'read_predicate', 'write_predicate']
2548 if dflt_ext:
2549 dflt_ctype = self.operandTypeMap[dflt_ext]
2550 attrList.extend(['dflt_ctype', 'dflt_ext'])
2551 # reg_spec is either just a string or a dictionary
2552 # (for elems of vector)
2553 if isinstance(reg_spec, tuple):
2554 (reg_spec, elem_spec) = reg_spec
2555 if isinstance(elem_spec, str):
2556 attrList.append('elem_spec')
2557 else:
2558 assert(isinstance(elem_spec, dict))
2559 elems = elem_spec
2560 attrList.append('elems')
2561 for attr in attrList:
2562 tmp_dict[attr] = eval(attr)
2563 tmp_dict['base_name'] = op_name
2564
2565 # New class name will be e.g. "IntReg_Ra"
2566 cls_name = base_cls_name + '_' + op_name
2567 # Evaluate string arg to get class object. Note that the
2568 # actual base class for "IntReg" is "IntRegOperand", i.e. we
2569 # have to append "Operand".
2570 try:
2571 base_cls = eval(base_cls_name + 'Operand')
2572 except NameError:
2573 error(lineno,
2574 'error: unknown operand base class "%s"' % base_cls_name)
2575 # The following statement creates a new class called
2576 # <cls_name> as a subclass of <base_cls> with the attributes
2577 # in tmp_dict, just as if we evaluated a class declaration.
2578 operand_name[op_name] = type(cls_name, (base_cls,), tmp_dict)
2579
2580 self.operandNameMap = operand_name
2581
2582 # Define operand variables.
2583 operands = user_dict.keys()
2584 # Add the elems defined in the vector operands and
2585 # build a map elem -> vector (used in OperandList)
2586 elem_to_vec = {}
2587 for op in user_dict.keys():
2588 if hasattr(self.operandNameMap[op], 'elems'):
2589 for elem in self.operandNameMap[op].elems.keys():
2590 operands.append(elem)
2591 elem_to_vec[elem] = op
2592 self.elemToVector = elem_to_vec
2593 extensions = self.operandTypeMap.keys()
2594
2595 operandsREString = r'''
2596 (?<!\w) # neg. lookbehind assertion: prevent partial matches
2597 ((%s)(?:_(%s))?) # match: operand with optional '_' then suffix
2598 (?!\w) # neg. lookahead assertion: prevent partial matches
2599 ''' % ('|'.join(operands), '|'.join(extensions))
2600
2601 self.operandsRE = re.compile(operandsREString, re.MULTILINE|re.VERBOSE)
2602
2603 # Same as operandsREString, but extension is mandatory, and only two
2604 # groups are returned (base and ext, not full name as above).
2605 # Used for subtituting '_' for '.' to make C++ identifiers.
2606 operandsWithExtREString = r'(?<!\w)(%s)_(%s)(?!\w)' \
2607 % ('|'.join(operands), '|'.join(extensions))
2608
2609 self.operandsWithExtRE = \
2610 re.compile(operandsWithExtREString, re.MULTILINE)
2611
2612 def substMungedOpNames(self, code):
2613 '''Munge operand names in code string to make legal C++
2614 variable names. This means getting rid of the type extension
2615 if any. Will match base_name attribute of Operand object.)'''
2616 return self.operandsWithExtRE.sub(r'\1', code)
2617
2618 def mungeSnippet(self, s):
2619 '''Fix up code snippets for final substitution in templates.'''
2620 if isinstance(s, str):
2621 return self.substMungedOpNames(substBitOps(s))
2622 else:
2623 return s
2624
2625 def open(self, name, bare=False):
2626 '''Open the output file for writing and include scary warning.'''
2627 filename = os.path.join(self.output_dir, name)
2628 f = open(filename, 'w')
2629 if f:
2630 if not bare:
2631 f.write(ISAParser.scaremonger_template % self)
2632 return f
2633
2634 def update(self, file, contents):
2635 '''Update the output file only. Scons should handle the case when
2636 the new contents are unchanged using its built-in hash feature.'''
2637 f = self.open(file)
2638 f.write(contents)
2639 f.close()
2640
2641 # This regular expression matches '##include' directives
2642 includeRE = re.compile(r'^\s*##include\s+"(?P<filename>[^"]*)".*$',
2643 re.MULTILINE)
2644
2645 def replace_include(self, matchobj, dirname):
2646 """Function to replace a matched '##include' directive with the
2647 contents of the specified file (with nested ##includes
2648 replaced recursively). 'matchobj' is an re match object
2649 (from a match of includeRE) and 'dirname' is the directory
2650 relative to which the file path should be resolved."""
2651
2652 fname = matchobj.group('filename')
2653 full_fname = os.path.normpath(os.path.join(dirname, fname))
2654 contents = '##newfile "%s"\n%s\n##endfile\n' % \
2655 (full_fname, self.read_and_flatten(full_fname))
2656 return contents
2657
2658 def read_and_flatten(self, filename):
2659 """Read a file and recursively flatten nested '##include' files."""
2660
2661 current_dir = os.path.dirname(filename)
2662 try:
2663 contents = open(filename).read()
2664 except IOError:
2665 error('Error including file "%s"' % filename)
2666
2667 self.fileNameStack.push(LineTracker(filename))
2668
2669 # Find any includes and include them
2670 def replace(matchobj):
2671 return self.replace_include(matchobj, current_dir)
2672 contents = self.includeRE.sub(replace, contents)
2673
2674 self.fileNameStack.pop()
2675 return contents
2676
2677 AlreadyGenerated = {}
2678
2679 def _parse_isa_desc(self, isa_desc_file):
2680 '''Read in and parse the ISA description.'''
2681
2682 # The build system can end up running the ISA parser twice: once to
2683 # finalize the build dependencies, and then to actually generate
2684 # the files it expects (in src/arch/$ARCH/generated). This code
2685 # doesn't do anything different either time, however; the SCons
2686 # invocations just expect different things. Since this code runs
2687 # within SCons, we can just remember that we've already run and
2688 # not perform a completely unnecessary run, since the ISA parser's
2689 # effect is idempotent.
2690 if isa_desc_file in ISAParser.AlreadyGenerated:
2691 return
2692
2693 # grab the last three path components of isa_desc_file
2694 self.filename = '/'.join(isa_desc_file.split('/')[-3:])
2695
2696 # Read file and (recursively) all included files into a string.
2697 # PLY requires that the input be in a single string so we have to
2698 # do this up front.
2699 isa_desc = self.read_and_flatten(isa_desc_file)
2700
2701 # Initialize lineno tracker
2702 self.lex.lineno = LineTracker(isa_desc_file)
2703
2704 # Parse.
2705 self.parse_string(isa_desc)
2706
2707 ISAParser.AlreadyGenerated[isa_desc_file] = None
2708
2709 def parse_isa_desc(self, *args, **kwargs):
2710 try:
2711 self._parse_isa_desc(*args, **kwargs)
2712 except ISAParserError, e:
2713 print(backtrace(self.fileNameStack))
2714 print("At %s:" % e.lineno)
2715 print(e)
2716 sys.exit(1)
2717
2718 # Called as script: get args from command line.
2719 # Args are: <isa desc file> <output dir>
2720 if __name__ == '__main__':
2721 ISAParser(sys.argv[2]).parse_isa_desc(sys.argv[1])