scons: Enable building with the gcc/clang Address Sanitizer
[gem5.git] / SConstruct
1 # -*- mode:python -*-
2
3 # Copyright (c) 2013, 2015 ARM Limited
4 # All rights reserved.
5 #
6 # The license below extends only to copyright in the software and shall
7 # not be construed as granting a license to any other intellectual
8 # property including but not limited to intellectual property relating
9 # to a hardware implementation of the functionality of the software
10 # licensed hereunder. You may use the software subject to the license
11 # terms below provided that you ensure that this notice is replicated
12 # unmodified and in its entirety in all distributions of the software,
13 # modified or unmodified, in source code or in binary form.
14 #
15 # Copyright (c) 2011 Advanced Micro Devices, Inc.
16 # Copyright (c) 2009 The Hewlett-Packard Development Company
17 # Copyright (c) 2004-2005 The Regents of The University of Michigan
18 # All rights reserved.
19 #
20 # Redistribution and use in source and binary forms, with or without
21 # modification, are permitted provided that the following conditions are
22 # met: redistributions of source code must retain the above copyright
23 # notice, this list of conditions and the following disclaimer;
24 # redistributions in binary form must reproduce the above copyright
25 # notice, this list of conditions and the following disclaimer in the
26 # documentation and/or other materials provided with the distribution;
27 # neither the name of the copyright holders nor the names of its
28 # contributors may be used to endorse or promote products derived from
29 # this software without specific prior written permission.
30 #
31 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42 #
43 # Authors: Steve Reinhardt
44 # Nathan Binkert
45
46 ###################################################
47 #
48 # SCons top-level build description (SConstruct) file.
49 #
50 # While in this directory ('gem5'), just type 'scons' to build the default
51 # configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52 # to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53 # the optimized full-system version).
54 #
55 # You can build gem5 in a different directory as long as there is a
56 # 'build/<CONFIG>' somewhere along the target path. The build system
57 # expects that all configs under the same build directory are being
58 # built for the same host system.
59 #
60 # Examples:
61 #
62 # The following two commands are equivalent. The '-u' option tells
63 # scons to search up the directory tree for this SConstruct file.
64 # % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65 # % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66 #
67 # The following two commands are equivalent and demonstrate building
68 # in a directory outside of the source tree. The '-C' option tells
69 # scons to chdir to the specified directory to find this SConstruct
70 # file.
71 # % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72 # % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73 #
74 # You can use 'scons -H' to print scons options. If you're in this
75 # 'gem5' directory (or use -u or -C to tell scons where to find this
76 # file), you can use 'scons -h' to print all the gem5-specific build
77 # options as well.
78 #
79 ###################################################
80
81 # Check for recent-enough Python and SCons versions.
82 try:
83 # Really old versions of scons only take two options for the
84 # function, so check once without the revision and once with the
85 # revision, the first instance will fail for stuff other than
86 # 0.98, and the second will fail for 0.98.0
87 EnsureSConsVersion(0, 98)
88 EnsureSConsVersion(0, 98, 1)
89 except SystemExit, e:
90 print """
91 For more details, see:
92 http://gem5.org/Dependencies
93 """
94 raise
95
96 # We ensure the python version early because because python-config
97 # requires python 2.5
98 try:
99 EnsurePythonVersion(2, 5)
100 except SystemExit, e:
101 print """
102 You can use a non-default installation of the Python interpreter by
103 rearranging your PATH so that scons finds the non-default 'python' and
104 'python-config' first.
105
106 For more details, see:
107 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108 """
109 raise
110
111 # Global Python includes
112 import itertools
113 import os
114 import re
115 import subprocess
116 import sys
117
118 from os import mkdir, environ
119 from os.path import abspath, basename, dirname, expanduser, normpath
120 from os.path import exists, isdir, isfile
121 from os.path import join as joinpath, split as splitpath
122
123 # SCons includes
124 import SCons
125 import SCons.Node
126
127 extra_python_paths = [
128 Dir('src/python').srcnode().abspath, # gem5 includes
129 Dir('ext/ply').srcnode().abspath, # ply is used by several files
130 ]
131
132 sys.path[1:1] = extra_python_paths
133
134 from m5.util import compareVersions, readCommand
135 from m5.util.terminal import get_termcap
136
137 help_texts = {
138 "options" : "",
139 "global_vars" : "",
140 "local_vars" : ""
141 }
142
143 Export("help_texts")
144
145
146 # There's a bug in scons in that (1) by default, the help texts from
147 # AddOption() are supposed to be displayed when you type 'scons -h'
148 # and (2) you can override the help displayed by 'scons -h' using the
149 # Help() function, but these two features are incompatible: once
150 # you've overridden the help text using Help(), there's no way to get
151 # at the help texts from AddOptions. See:
152 # http://scons.tigris.org/issues/show_bug.cgi?id=2356
153 # http://scons.tigris.org/issues/show_bug.cgi?id=2611
154 # This hack lets us extract the help text from AddOptions and
155 # re-inject it via Help(). Ideally someday this bug will be fixed and
156 # we can just use AddOption directly.
157 def AddLocalOption(*args, **kwargs):
158 col_width = 30
159
160 help = " " + ", ".join(args)
161 if "help" in kwargs:
162 length = len(help)
163 if length >= col_width:
164 help += "\n" + " " * col_width
165 else:
166 help += " " * (col_width - length)
167 help += kwargs["help"]
168 help_texts["options"] += help + "\n"
169
170 AddOption(*args, **kwargs)
171
172 AddLocalOption('--colors', dest='use_colors', action='store_true',
173 help="Add color to abbreviated scons output")
174 AddLocalOption('--no-colors', dest='use_colors', action='store_false',
175 help="Don't add color to abbreviated scons output")
176 AddLocalOption('--with-cxx-config', dest='with_cxx_config',
177 action='store_true',
178 help="Build with support for C++-based configuration")
179 AddLocalOption('--default', dest='default', type='string', action='store',
180 help='Override which build_opts file to use for defaults')
181 AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
182 help='Disable style checking hooks')
183 AddLocalOption('--no-lto', dest='no_lto', action='store_true',
184 help='Disable Link-Time Optimization for fast')
185 AddLocalOption('--update-ref', dest='update_ref', action='store_true',
186 help='Update test reference outputs')
187 AddLocalOption('--verbose', dest='verbose', action='store_true',
188 help='Print full tool command lines')
189 AddLocalOption('--without-python', dest='without_python',
190 action='store_true',
191 help='Build without Python configuration support')
192 AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
193 action='store_true',
194 help='Disable linking against tcmalloc')
195 AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
196 help='Build with Undefined Behavior Sanitizer if available')
197 AddLocalOption('--with-asan', dest='with_asan', action='store_true',
198 help='Build with Address Sanitizer if available')
199
200 termcap = get_termcap(GetOption('use_colors'))
201
202 ########################################################################
203 #
204 # Set up the main build environment.
205 #
206 ########################################################################
207
208 # export TERM so that clang reports errors in color
209 use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
210 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
211 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
212
213 use_prefixes = [
214 "ASAN_", # address sanitizer symbolizer path and settings
215 "CCACHE_", # ccache (caching compiler wrapper) configuration
216 "CCC_", # clang static analyzer configuration
217 "DISTCC_", # distcc (distributed compiler wrapper) configuration
218 "INCLUDE_SERVER_", # distcc pump server settings
219 "M5", # M5 configuration (e.g., path to kernels)
220 ]
221
222 use_env = {}
223 for key,val in sorted(os.environ.iteritems()):
224 if key in use_vars or \
225 any([key.startswith(prefix) for prefix in use_prefixes]):
226 use_env[key] = val
227
228 # Tell scons to avoid implicit command dependencies to avoid issues
229 # with the param wrappes being compiled twice (see
230 # http://scons.tigris.org/issues/show_bug.cgi?id=2811)
231 main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
232 main.Decider('MD5-timestamp')
233 main.root = Dir(".") # The current directory (where this file lives).
234 main.srcdir = Dir("src") # The source directory
235
236 main_dict_keys = main.Dictionary().keys()
237
238 # Check that we have a C/C++ compiler
239 if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
240 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
241 Exit(1)
242
243 # Check that swig is present
244 if not 'SWIG' in main_dict_keys:
245 print "swig is not installed (package swig on Ubuntu and RedHat)"
246 Exit(1)
247
248 # add useful python code PYTHONPATH so it can be used by subprocesses
249 # as well
250 main.AppendENVPath('PYTHONPATH', extra_python_paths)
251
252 ########################################################################
253 #
254 # Mercurial Stuff.
255 #
256 # If the gem5 directory is a mercurial repository, we should do some
257 # extra things.
258 #
259 ########################################################################
260
261 hgdir = main.root.Dir(".hg")
262
263 mercurial_style_message = """
264 You're missing the gem5 style hook, which automatically checks your code
265 against the gem5 style rules on hg commit and qrefresh commands. This
266 script will now install the hook in your .hg/hgrc file.
267 Press enter to continue, or ctrl-c to abort: """
268
269 mercurial_style_hook = """
270 # The following lines were automatically added by gem5/SConstruct
271 # to provide the gem5 style-checking hooks
272 [extensions]
273 style = %s/util/style.py
274
275 [hooks]
276 pretxncommit.style = python:style.check_style
277 pre-qrefresh.style = python:style.check_style
278 # End of SConstruct additions
279
280 """ % (main.root.abspath)
281
282 mercurial_lib_not_found = """
283 Mercurial libraries cannot be found, ignoring style hook. If
284 you are a gem5 developer, please fix this and run the style
285 hook. It is important.
286 """
287
288 # Check for style hook and prompt for installation if it's not there.
289 # Skip this if --ignore-style was specified, there's no .hg dir to
290 # install a hook in, or there's no interactive terminal to prompt.
291 if not GetOption('ignore_style') and hgdir.exists() and sys.stdin.isatty():
292 style_hook = True
293 try:
294 from mercurial import ui
295 ui = ui.ui()
296 ui.readconfig(hgdir.File('hgrc').abspath)
297 style_hook = ui.config('hooks', 'pretxncommit.style', None) and \
298 ui.config('hooks', 'pre-qrefresh.style', None)
299 except ImportError:
300 print mercurial_lib_not_found
301
302 if not style_hook:
303 print mercurial_style_message,
304 # continue unless user does ctrl-c/ctrl-d etc.
305 try:
306 raw_input()
307 except:
308 print "Input exception, exiting scons.\n"
309 sys.exit(1)
310 hgrc_path = '%s/.hg/hgrc' % main.root.abspath
311 print "Adding style hook to", hgrc_path, "\n"
312 try:
313 hgrc = open(hgrc_path, 'a')
314 hgrc.write(mercurial_style_hook)
315 hgrc.close()
316 except:
317 print "Error updating", hgrc_path
318 sys.exit(1)
319
320
321 ###################################################
322 #
323 # Figure out which configurations to set up based on the path(s) of
324 # the target(s).
325 #
326 ###################################################
327
328 # Find default configuration & binary.
329 Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
330
331 # helper function: find last occurrence of element in list
332 def rfind(l, elt, offs = -1):
333 for i in range(len(l)+offs, 0, -1):
334 if l[i] == elt:
335 return i
336 raise ValueError, "element not found"
337
338 # Take a list of paths (or SCons Nodes) and return a list with all
339 # paths made absolute and ~-expanded. Paths will be interpreted
340 # relative to the launch directory unless a different root is provided
341 def makePathListAbsolute(path_list, root=GetLaunchDir()):
342 return [abspath(joinpath(root, expanduser(str(p))))
343 for p in path_list]
344
345 # Each target must have 'build' in the interior of the path; the
346 # directory below this will determine the build parameters. For
347 # example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
348 # recognize that ALPHA_SE specifies the configuration because it
349 # follow 'build' in the build path.
350
351 # The funky assignment to "[:]" is needed to replace the list contents
352 # in place rather than reassign the symbol to a new list, which
353 # doesn't work (obviously!).
354 BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
355
356 # Generate a list of the unique build roots and configs that the
357 # collected targets reference.
358 variant_paths = []
359 build_root = None
360 for t in BUILD_TARGETS:
361 path_dirs = t.split('/')
362 try:
363 build_top = rfind(path_dirs, 'build', -2)
364 except:
365 print "Error: no non-leaf 'build' dir found on target path", t
366 Exit(1)
367 this_build_root = joinpath('/',*path_dirs[:build_top+1])
368 if not build_root:
369 build_root = this_build_root
370 else:
371 if this_build_root != build_root:
372 print "Error: build targets not under same build root\n"\
373 " %s\n %s" % (build_root, this_build_root)
374 Exit(1)
375 variant_path = joinpath('/',*path_dirs[:build_top+2])
376 if variant_path not in variant_paths:
377 variant_paths.append(variant_path)
378
379 # Make sure build_root exists (might not if this is the first build there)
380 if not isdir(build_root):
381 mkdir(build_root)
382 main['BUILDROOT'] = build_root
383
384 Export('main')
385
386 main.SConsignFile(joinpath(build_root, "sconsign"))
387
388 # Default duplicate option is to use hard links, but this messes up
389 # when you use emacs to edit a file in the target dir, as emacs moves
390 # file to file~ then copies to file, breaking the link. Symbolic
391 # (soft) links work better.
392 main.SetOption('duplicate', 'soft-copy')
393
394 #
395 # Set up global sticky variables... these are common to an entire build
396 # tree (not specific to a particular build like ALPHA_SE)
397 #
398
399 global_vars_file = joinpath(build_root, 'variables.global')
400
401 global_vars = Variables(global_vars_file, args=ARGUMENTS)
402
403 global_vars.AddVariables(
404 ('CC', 'C compiler', environ.get('CC', main['CC'])),
405 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
406 ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
407 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
408 ('BATCH', 'Use batch pool for build and tests', False),
409 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
410 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
411 ('EXTRAS', 'Add extra directories to the compilation', '')
412 )
413
414 # Update main environment with values from ARGUMENTS & global_vars_file
415 global_vars.Update(main)
416 help_texts["global_vars"] += global_vars.GenerateHelpText(main)
417
418 # Save sticky variable settings back to current variables file
419 global_vars.Save(global_vars_file, main)
420
421 # Parse EXTRAS variable to build list of all directories where we're
422 # look for sources etc. This list is exported as extras_dir_list.
423 base_dir = main.srcdir.abspath
424 if main['EXTRAS']:
425 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
426 else:
427 extras_dir_list = []
428
429 Export('base_dir')
430 Export('extras_dir_list')
431
432 # the ext directory should be on the #includes path
433 main.Append(CPPPATH=[Dir('ext')])
434
435 def strip_build_path(path, env):
436 path = str(path)
437 variant_base = env['BUILDROOT'] + os.path.sep
438 if path.startswith(variant_base):
439 path = path[len(variant_base):]
440 elif path.startswith('build/'):
441 path = path[6:]
442 return path
443
444 # Generate a string of the form:
445 # common/path/prefix/src1, src2 -> tgt1, tgt2
446 # to print while building.
447 class Transform(object):
448 # all specific color settings should be here and nowhere else
449 tool_color = termcap.Normal
450 pfx_color = termcap.Yellow
451 srcs_color = termcap.Yellow + termcap.Bold
452 arrow_color = termcap.Blue + termcap.Bold
453 tgts_color = termcap.Yellow + termcap.Bold
454
455 def __init__(self, tool, max_sources=99):
456 self.format = self.tool_color + (" [%8s] " % tool) \
457 + self.pfx_color + "%s" \
458 + self.srcs_color + "%s" \
459 + self.arrow_color + " -> " \
460 + self.tgts_color + "%s" \
461 + termcap.Normal
462 self.max_sources = max_sources
463
464 def __call__(self, target, source, env, for_signature=None):
465 # truncate source list according to max_sources param
466 source = source[0:self.max_sources]
467 def strip(f):
468 return strip_build_path(str(f), env)
469 if len(source) > 0:
470 srcs = map(strip, source)
471 else:
472 srcs = ['']
473 tgts = map(strip, target)
474 # surprisingly, os.path.commonprefix is a dumb char-by-char string
475 # operation that has nothing to do with paths.
476 com_pfx = os.path.commonprefix(srcs + tgts)
477 com_pfx_len = len(com_pfx)
478 if com_pfx:
479 # do some cleanup and sanity checking on common prefix
480 if com_pfx[-1] == ".":
481 # prefix matches all but file extension: ok
482 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
483 com_pfx = com_pfx[0:-1]
484 elif com_pfx[-1] == "/":
485 # common prefix is directory path: OK
486 pass
487 else:
488 src0_len = len(srcs[0])
489 tgt0_len = len(tgts[0])
490 if src0_len == com_pfx_len:
491 # source is a substring of target, OK
492 pass
493 elif tgt0_len == com_pfx_len:
494 # target is a substring of source, need to back up to
495 # avoid empty string on RHS of arrow
496 sep_idx = com_pfx.rfind(".")
497 if sep_idx != -1:
498 com_pfx = com_pfx[0:sep_idx]
499 else:
500 com_pfx = ''
501 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
502 # still splitting at file extension: ok
503 pass
504 else:
505 # probably a fluke; ignore it
506 com_pfx = ''
507 # recalculate length in case com_pfx was modified
508 com_pfx_len = len(com_pfx)
509 def fmt(files):
510 f = map(lambda s: s[com_pfx_len:], files)
511 return ', '.join(f)
512 return self.format % (com_pfx, fmt(srcs), fmt(tgts))
513
514 Export('Transform')
515
516 # enable the regression script to use the termcap
517 main['TERMCAP'] = termcap
518
519 if GetOption('verbose'):
520 def MakeAction(action, string, *args, **kwargs):
521 return Action(action, *args, **kwargs)
522 else:
523 MakeAction = Action
524 main['CCCOMSTR'] = Transform("CC")
525 main['CXXCOMSTR'] = Transform("CXX")
526 main['ASCOMSTR'] = Transform("AS")
527 main['SWIGCOMSTR'] = Transform("SWIG")
528 main['ARCOMSTR'] = Transform("AR", 0)
529 main['LINKCOMSTR'] = Transform("LINK", 0)
530 main['RANLIBCOMSTR'] = Transform("RANLIB", 0)
531 main['M4COMSTR'] = Transform("M4")
532 main['SHCCCOMSTR'] = Transform("SHCC")
533 main['SHCXXCOMSTR'] = Transform("SHCXX")
534 Export('MakeAction')
535
536 # Initialize the Link-Time Optimization (LTO) flags
537 main['LTO_CCFLAGS'] = []
538 main['LTO_LDFLAGS'] = []
539
540 # According to the readme, tcmalloc works best if the compiler doesn't
541 # assume that we're using the builtin malloc and friends. These flags
542 # are compiler-specific, so we need to set them after we detect which
543 # compiler we're using.
544 main['TCMALLOC_CCFLAGS'] = []
545
546 CXX_version = readCommand([main['CXX'],'--version'], exception=False)
547 CXX_V = readCommand([main['CXX'],'-V'], exception=False)
548
549 main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
550 main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
551 if main['GCC'] + main['CLANG'] > 1:
552 print 'Error: How can we have two at the same time?'
553 Exit(1)
554
555 # Set up default C++ compiler flags
556 if main['GCC'] or main['CLANG']:
557 # As gcc and clang share many flags, do the common parts here
558 main.Append(CCFLAGS=['-pipe'])
559 main.Append(CCFLAGS=['-fno-strict-aliasing'])
560 # Enable -Wall and -Wextra and then disable the few warnings that
561 # we consistently violate
562 main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
563 '-Wno-sign-compare', '-Wno-unused-parameter'])
564 # We always compile using C++11
565 main.Append(CXXFLAGS=['-std=c++11'])
566 else:
567 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
568 print "Don't know what compiler options to use for your compiler."
569 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
570 print termcap.Yellow + ' version:' + termcap.Normal,
571 if not CXX_version:
572 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
573 termcap.Normal
574 else:
575 print CXX_version.replace('\n', '<nl>')
576 print " If you're trying to use a compiler other than GCC"
577 print " or clang, there appears to be something wrong with your"
578 print " environment."
579 print " "
580 print " If you are trying to use a compiler other than those listed"
581 print " above you will need to ease fix SConstruct and "
582 print " src/SConscript to support that compiler."
583 Exit(1)
584
585 if main['GCC']:
586 # Check for a supported version of gcc. >= 4.7 is chosen for its
587 # level of c++11 support. See
588 # http://gcc.gnu.org/projects/cxx0x.html for details.
589 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
590 if compareVersions(gcc_version, "4.7") < 0:
591 print 'Error: gcc version 4.7 or newer required.'
592 print ' Installed version:', gcc_version
593 Exit(1)
594
595 main['GCC_VERSION'] = gcc_version
596
597 # gcc from version 4.8 and above generates "rep; ret" instructions
598 # to avoid performance penalties on certain AMD chips. Older
599 # assemblers detect this as an error, "Error: expecting string
600 # instruction after `rep'"
601 if compareVersions(gcc_version, "4.8") > 0:
602 as_version_raw = readCommand([main['AS'], '-v', '/dev/null'],
603 exception=False).split()
604
605 # version strings may contain extra distro-specific
606 # qualifiers, so play it safe and keep only what comes before
607 # the first hyphen
608 as_version = as_version_raw[-1].split('-')[0] if as_version_raw \
609 else None
610
611 if not as_version or compareVersions(as_version, "2.23") < 0:
612 print termcap.Yellow + termcap.Bold + \
613 'Warning: This combination of gcc and binutils have' + \
614 ' known incompatibilities.\n' + \
615 ' If you encounter build problems, please update ' + \
616 'binutils to 2.23.' + \
617 termcap.Normal
618
619 # Make sure we warn if the user has requested to compile with the
620 # Undefined Benahvior Sanitizer and this version of gcc does not
621 # support it.
622 if GetOption('with_ubsan') and \
623 compareVersions(gcc_version, '4.9') < 0:
624 print termcap.Yellow + termcap.Bold + \
625 'Warning: UBSan is only supported using gcc 4.9 and later.' + \
626 termcap.Normal
627
628 # Add the appropriate Link-Time Optimization (LTO) flags
629 # unless LTO is explicitly turned off. Note that these flags
630 # are only used by the fast target.
631 if not GetOption('no_lto'):
632 # Pass the LTO flag when compiling to produce GIMPLE
633 # output, we merely create the flags here and only append
634 # them later
635 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
636
637 # Use the same amount of jobs for LTO as we are running
638 # scons with
639 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
640
641 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
642 '-fno-builtin-realloc', '-fno-builtin-free'])
643
644 elif main['CLANG']:
645 # Check for a supported version of clang, >= 3.1 is needed to
646 # support similar features as gcc 4.7. See
647 # http://clang.llvm.org/cxx_status.html for details
648 clang_version_re = re.compile(".* version (\d+\.\d+)")
649 clang_version_match = clang_version_re.search(CXX_version)
650 if (clang_version_match):
651 clang_version = clang_version_match.groups()[0]
652 if compareVersions(clang_version, "3.1") < 0:
653 print 'Error: clang version 3.1 or newer required.'
654 print ' Installed version:', clang_version
655 Exit(1)
656 else:
657 print 'Error: Unable to determine clang version.'
658 Exit(1)
659
660 # clang has a few additional warnings that we disable, extraneous
661 # parantheses are allowed due to Ruby's printing of the AST,
662 # finally self assignments are allowed as the generated CPU code
663 # is relying on this
664 main.Append(CCFLAGS=['-Wno-parentheses',
665 '-Wno-self-assign',
666 # Some versions of libstdc++ (4.8?) seem to
667 # use struct hash and class hash
668 # interchangeably.
669 '-Wno-mismatched-tags',
670 ])
671
672 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
673
674 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
675 # opposed to libstdc++, as the later is dated.
676 if sys.platform == "darwin":
677 main.Append(CXXFLAGS=['-stdlib=libc++'])
678 main.Append(LIBS=['c++'])
679
680 else:
681 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
682 print "Don't know what compiler options to use for your compiler."
683 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
684 print termcap.Yellow + ' version:' + termcap.Normal,
685 if not CXX_version:
686 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
687 termcap.Normal
688 else:
689 print CXX_version.replace('\n', '<nl>')
690 print " If you're trying to use a compiler other than GCC"
691 print " or clang, there appears to be something wrong with your"
692 print " environment."
693 print " "
694 print " If you are trying to use a compiler other than those listed"
695 print " above you will need to ease fix SConstruct and "
696 print " src/SConscript to support that compiler."
697 Exit(1)
698
699 # Set up common yacc/bison flags (needed for Ruby)
700 main['YACCFLAGS'] = '-d'
701 main['YACCHXXFILESUFFIX'] = '.hh'
702
703 # Do this after we save setting back, or else we'll tack on an
704 # extra 'qdo' every time we run scons.
705 if main['BATCH']:
706 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC']
707 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX']
708 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS']
709 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR']
710 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
711
712 if sys.platform == 'cygwin':
713 # cygwin has some header file issues...
714 main.Append(CCFLAGS=["-Wno-uninitialized"])
715
716 # Check for the protobuf compiler
717 protoc_version = readCommand([main['PROTOC'], '--version'],
718 exception='').split()
719
720 # First two words should be "libprotoc x.y.z"
721 if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
722 print termcap.Yellow + termcap.Bold + \
723 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
724 ' Please install protobuf-compiler for tracing support.' + \
725 termcap.Normal
726 main['PROTOC'] = False
727 else:
728 # Based on the availability of the compress stream wrappers,
729 # require 2.1.0
730 min_protoc_version = '2.1.0'
731 if compareVersions(protoc_version[1], min_protoc_version) < 0:
732 print termcap.Yellow + termcap.Bold + \
733 'Warning: protoc version', min_protoc_version, \
734 'or newer required.\n' + \
735 ' Installed version:', protoc_version[1], \
736 termcap.Normal
737 main['PROTOC'] = False
738 else:
739 # Attempt to determine the appropriate include path and
740 # library path using pkg-config, that means we also need to
741 # check for pkg-config. Note that it is possible to use
742 # protobuf without the involvement of pkg-config. Later on we
743 # check go a library config check and at that point the test
744 # will fail if libprotobuf cannot be found.
745 if readCommand(['pkg-config', '--version'], exception=''):
746 try:
747 # Attempt to establish what linking flags to add for protobuf
748 # using pkg-config
749 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
750 except:
751 print termcap.Yellow + termcap.Bold + \
752 'Warning: pkg-config could not get protobuf flags.' + \
753 termcap.Normal
754
755 # Check for SWIG
756 if not main.has_key('SWIG'):
757 print 'Error: SWIG utility not found.'
758 print ' Please install (see http://www.swig.org) and retry.'
759 Exit(1)
760
761 # Check for appropriate SWIG version
762 swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
763 # First 3 words should be "SWIG Version x.y.z"
764 if len(swig_version) < 3 or \
765 swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
766 print 'Error determining SWIG version.'
767 Exit(1)
768
769 min_swig_version = '2.0.4'
770 if compareVersions(swig_version[2], min_swig_version) < 0:
771 print 'Error: SWIG version', min_swig_version, 'or newer required.'
772 print ' Installed version:', swig_version[2]
773 Exit(1)
774
775 # Check for known incompatibilities. The standard library shipped with
776 # gcc >= 4.9 does not play well with swig versions prior to 3.0
777 if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \
778 compareVersions(swig_version[2], '3.0') < 0:
779 print termcap.Yellow + termcap.Bold + \
780 'Warning: This combination of gcc and swig have' + \
781 ' known incompatibilities.\n' + \
782 ' If you encounter build problems, please update ' + \
783 'swig to 3.0 or later.' + \
784 termcap.Normal
785
786 # Set up SWIG flags & scanner
787 swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
788 main.Append(SWIGFLAGS=swig_flags)
789
790 # Check for 'timeout' from GNU coreutils. If present, regressions will
791 # be run with a time limit. We require version 8.13 since we rely on
792 # support for the '--foreground' option.
793 timeout_lines = readCommand(['timeout', '--version'],
794 exception='').splitlines()
795 # Get the first line and tokenize it
796 timeout_version = timeout_lines[0].split() if timeout_lines else []
797 main['TIMEOUT'] = timeout_version and \
798 compareVersions(timeout_version[-1], '8.13') >= 0
799
800 # filter out all existing swig scanners, they mess up the dependency
801 # stuff for some reason
802 scanners = []
803 for scanner in main['SCANNERS']:
804 skeys = scanner.skeys
805 if skeys == '.i':
806 continue
807
808 if isinstance(skeys, (list, tuple)) and '.i' in skeys:
809 continue
810
811 scanners.append(scanner)
812
813 # add the new swig scanner that we like better
814 from SCons.Scanner import ClassicCPP as CPPScanner
815 swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
816 scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
817
818 # replace the scanners list that has what we want
819 main['SCANNERS'] = scanners
820
821 # Add a custom Check function to test for structure members.
822 def CheckMember(context, include, decl, member, include_quotes="<>"):
823 context.Message("Checking for member %s in %s..." %
824 (member, decl))
825 text = """
826 #include %(header)s
827 int main(){
828 %(decl)s test;
829 (void)test.%(member)s;
830 return 0;
831 };
832 """ % { "header" : include_quotes[0] + include + include_quotes[1],
833 "decl" : decl,
834 "member" : member,
835 }
836
837 ret = context.TryCompile(text, extension=".cc")
838 context.Result(ret)
839 return ret
840
841 # Platform-specific configuration. Note again that we assume that all
842 # builds under a given build root run on the same host platform.
843 conf = Configure(main,
844 conf_dir = joinpath(build_root, '.scons_config'),
845 log_file = joinpath(build_root, 'scons_config.log'),
846 custom_tests = {
847 'CheckMember' : CheckMember,
848 })
849
850 # Check if we should compile a 64 bit binary on Mac OS X/Darwin
851 try:
852 import platform
853 uname = platform.uname()
854 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
855 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
856 main.Append(CCFLAGS=['-arch', 'x86_64'])
857 main.Append(CFLAGS=['-arch', 'x86_64'])
858 main.Append(LINKFLAGS=['-arch', 'x86_64'])
859 main.Append(ASFLAGS=['-arch', 'x86_64'])
860 except:
861 pass
862
863 # Recent versions of scons substitute a "Null" object for Configure()
864 # when configuration isn't necessary, e.g., if the "--help" option is
865 # present. Unfortuantely this Null object always returns false,
866 # breaking all our configuration checks. We replace it with our own
867 # more optimistic null object that returns True instead.
868 if not conf:
869 def NullCheck(*args, **kwargs):
870 return True
871
872 class NullConf:
873 def __init__(self, env):
874 self.env = env
875 def Finish(self):
876 return self.env
877 def __getattr__(self, mname):
878 return NullCheck
879
880 conf = NullConf(main)
881
882 # Cache build files in the supplied directory.
883 if main['M5_BUILD_CACHE']:
884 print 'Using build cache located at', main['M5_BUILD_CACHE']
885 CacheDir(main['M5_BUILD_CACHE'])
886
887 if not GetOption('without_python'):
888 # Find Python include and library directories for embedding the
889 # interpreter. We rely on python-config to resolve the appropriate
890 # includes and linker flags. ParseConfig does not seem to understand
891 # the more exotic linker flags such as -Xlinker and -export-dynamic so
892 # we add them explicitly below. If you want to link in an alternate
893 # version of python, see above for instructions on how to invoke
894 # scons with the appropriate PATH set.
895 #
896 # First we check if python2-config exists, else we use python-config
897 python_config = readCommand(['which', 'python2-config'],
898 exception='').strip()
899 if not os.path.exists(python_config):
900 python_config = readCommand(['which', 'python-config'],
901 exception='').strip()
902 py_includes = readCommand([python_config, '--includes'],
903 exception='').split()
904 # Strip the -I from the include folders before adding them to the
905 # CPPPATH
906 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
907
908 # Read the linker flags and split them into libraries and other link
909 # flags. The libraries are added later through the call the CheckLib.
910 py_ld_flags = readCommand([python_config, '--ldflags'],
911 exception='').split()
912 py_libs = []
913 for lib in py_ld_flags:
914 if not lib.startswith('-l'):
915 main.Append(LINKFLAGS=[lib])
916 else:
917 lib = lib[2:]
918 if lib not in py_libs:
919 py_libs.append(lib)
920
921 # verify that this stuff works
922 if not conf.CheckHeader('Python.h', '<>'):
923 print "Error: can't find Python.h header in", py_includes
924 print "Install Python headers (package python-dev on Ubuntu and RedHat)"
925 Exit(1)
926
927 for lib in py_libs:
928 if not conf.CheckLib(lib):
929 print "Error: can't find library %s required by python" % lib
930 Exit(1)
931
932 # On Solaris you need to use libsocket for socket ops
933 if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
934 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
935 print "Can't find library with socket calls (e.g. accept())"
936 Exit(1)
937
938 # Check for zlib. If the check passes, libz will be automatically
939 # added to the LIBS environment variable.
940 if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
941 print 'Error: did not find needed zlib compression library '\
942 'and/or zlib.h header file.'
943 print ' Please install zlib and try again.'
944 Exit(1)
945
946 # If we have the protobuf compiler, also make sure we have the
947 # development libraries. If the check passes, libprotobuf will be
948 # automatically added to the LIBS environment variable. After
949 # this, we can use the HAVE_PROTOBUF flag to determine if we have
950 # got both protoc and libprotobuf available.
951 main['HAVE_PROTOBUF'] = main['PROTOC'] and \
952 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
953 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
954
955 # If we have the compiler but not the library, print another warning.
956 if main['PROTOC'] and not main['HAVE_PROTOBUF']:
957 print termcap.Yellow + termcap.Bold + \
958 'Warning: did not find protocol buffer library and/or headers.\n' + \
959 ' Please install libprotobuf-dev for tracing support.' + \
960 termcap.Normal
961
962 # Check for librt.
963 have_posix_clock = \
964 conf.CheckLibWithHeader(None, 'time.h', 'C',
965 'clock_nanosleep(0,0,NULL,NULL);') or \
966 conf.CheckLibWithHeader('rt', 'time.h', 'C',
967 'clock_nanosleep(0,0,NULL,NULL);')
968
969 have_posix_timers = \
970 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
971 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
972
973 if not GetOption('without_tcmalloc'):
974 if conf.CheckLib('tcmalloc'):
975 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
976 elif conf.CheckLib('tcmalloc_minimal'):
977 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
978 else:
979 print termcap.Yellow + termcap.Bold + \
980 "You can get a 12% performance improvement by "\
981 "installing tcmalloc (libgoogle-perftools-dev package "\
982 "on Ubuntu or RedHat)." + termcap.Normal
983
984
985 # Detect back trace implementations. The last implementation in the
986 # list will be used by default.
987 backtrace_impls = [ "none" ]
988
989 if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
990 'backtrace_symbols_fd((void*)0, 0, 0);'):
991 backtrace_impls.append("glibc")
992
993 if backtrace_impls[-1] == "none":
994 default_backtrace_impl = "none"
995 print termcap.Yellow + termcap.Bold + \
996 "No suitable back trace implementation found." + \
997 termcap.Normal
998
999 if not have_posix_clock:
1000 print "Can't find library for POSIX clocks."
1001
1002 # Check for <fenv.h> (C99 FP environment control)
1003 have_fenv = conf.CheckHeader('fenv.h', '<>')
1004 if not have_fenv:
1005 print "Warning: Header file <fenv.h> not found."
1006 print " This host has no IEEE FP rounding mode control."
1007
1008 # Check if we should enable KVM-based hardware virtualization. The API
1009 # we rely on exists since version 2.6.36 of the kernel, but somehow
1010 # the KVM_API_VERSION does not reflect the change. We test for one of
1011 # the types as a fall back.
1012 have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1013 if not have_kvm:
1014 print "Info: Compatible header file <linux/kvm.h> not found, " \
1015 "disabling KVM support."
1016
1017 # x86 needs support for xsave. We test for the structure here since we
1018 # won't be able to run new tests by the time we know which ISA we're
1019 # targeting.
1020 have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1021 '#include <linux/kvm.h>') != 0
1022
1023 # Check if the requested target ISA is compatible with the host
1024 def is_isa_kvm_compatible(isa):
1025 try:
1026 import platform
1027 host_isa = platform.machine()
1028 except:
1029 print "Warning: Failed to determine host ISA."
1030 return False
1031
1032 if not have_posix_timers:
1033 print "Warning: Can not enable KVM, host seems to lack support " \
1034 "for POSIX timers"
1035 return False
1036
1037 if isa == "arm":
1038 return host_isa in ( "armv7l", "aarch64" )
1039 elif isa == "x86":
1040 if host_isa != "x86_64":
1041 return False
1042
1043 if not have_kvm_xsave:
1044 print "KVM on x86 requires xsave support in kernel headers."
1045 return False
1046
1047 return True
1048 else:
1049 return False
1050
1051
1052 # Check if the exclude_host attribute is available. We want this to
1053 # get accurate instruction counts in KVM.
1054 main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1055 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1056
1057
1058 ######################################################################
1059 #
1060 # Finish the configuration
1061 #
1062 main = conf.Finish()
1063
1064 ######################################################################
1065 #
1066 # Collect all non-global variables
1067 #
1068
1069 # Define the universe of supported ISAs
1070 all_isa_list = [ ]
1071 all_gpu_isa_list = [ ]
1072 Export('all_isa_list')
1073 Export('all_gpu_isa_list')
1074
1075 class CpuModel(object):
1076 '''The CpuModel class encapsulates everything the ISA parser needs to
1077 know about a particular CPU model.'''
1078
1079 # Dict of available CPU model objects. Accessible as CpuModel.dict.
1080 dict = {}
1081
1082 # Constructor. Automatically adds models to CpuModel.dict.
1083 def __init__(self, name, default=False):
1084 self.name = name # name of model
1085
1086 # This cpu is enabled by default
1087 self.default = default
1088
1089 # Add self to dict
1090 if name in CpuModel.dict:
1091 raise AttributeError, "CpuModel '%s' already registered" % name
1092 CpuModel.dict[name] = self
1093
1094 Export('CpuModel')
1095
1096 # Sticky variables get saved in the variables file so they persist from
1097 # one invocation to the next (unless overridden, in which case the new
1098 # value becomes sticky).
1099 sticky_vars = Variables(args=ARGUMENTS)
1100 Export('sticky_vars')
1101
1102 # Sticky variables that should be exported
1103 export_vars = []
1104 Export('export_vars')
1105
1106 # For Ruby
1107 all_protocols = []
1108 Export('all_protocols')
1109 protocol_dirs = []
1110 Export('protocol_dirs')
1111 slicc_includes = []
1112 Export('slicc_includes')
1113
1114 # Walk the tree and execute all SConsopts scripts that wil add to the
1115 # above variables
1116 if GetOption('verbose'):
1117 print "Reading SConsopts"
1118 for bdir in [ base_dir ] + extras_dir_list:
1119 if not isdir(bdir):
1120 print "Error: directory '%s' does not exist" % bdir
1121 Exit(1)
1122 for root, dirs, files in os.walk(bdir):
1123 if 'SConsopts' in files:
1124 if GetOption('verbose'):
1125 print "Reading", joinpath(root, 'SConsopts')
1126 SConscript(joinpath(root, 'SConsopts'))
1127
1128 all_isa_list.sort()
1129 all_gpu_isa_list.sort()
1130
1131 sticky_vars.AddVariables(
1132 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1133 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1134 ListVariable('CPU_MODELS', 'CPU models',
1135 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1136 sorted(CpuModel.dict.keys())),
1137 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1138 False),
1139 BoolVariable('SS_COMPATIBLE_FP',
1140 'Make floating-point results compatible with SimpleScalar',
1141 False),
1142 BoolVariable('USE_SSE2',
1143 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1144 False),
1145 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1146 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1147 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1148 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1149 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1150 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1151 all_protocols),
1152 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1153 backtrace_impls[-1], backtrace_impls)
1154 )
1155
1156 # These variables get exported to #defines in config/*.hh (see src/SConscript).
1157 export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1158 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL',
1159 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1160
1161 ###################################################
1162 #
1163 # Define a SCons builder for configuration flag headers.
1164 #
1165 ###################################################
1166
1167 # This function generates a config header file that #defines the
1168 # variable symbol to the current variable setting (0 or 1). The source
1169 # operands are the name of the variable and a Value node containing the
1170 # value of the variable.
1171 def build_config_file(target, source, env):
1172 (variable, value) = [s.get_contents() for s in source]
1173 f = file(str(target[0]), 'w')
1174 print >> f, '#define', variable, value
1175 f.close()
1176 return None
1177
1178 # Combine the two functions into a scons Action object.
1179 config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1180
1181 # The emitter munges the source & target node lists to reflect what
1182 # we're really doing.
1183 def config_emitter(target, source, env):
1184 # extract variable name from Builder arg
1185 variable = str(target[0])
1186 # True target is config header file
1187 target = joinpath('config', variable.lower() + '.hh')
1188 val = env[variable]
1189 if isinstance(val, bool):
1190 # Force value to 0/1
1191 val = int(val)
1192 elif isinstance(val, str):
1193 val = '"' + val + '"'
1194
1195 # Sources are variable name & value (packaged in SCons Value nodes)
1196 return ([target], [Value(variable), Value(val)])
1197
1198 config_builder = Builder(emitter = config_emitter, action = config_action)
1199
1200 main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1201
1202 # libelf build is shared across all configs in the build root.
1203 main.SConscript('ext/libelf/SConscript',
1204 variant_dir = joinpath(build_root, 'libelf'))
1205
1206 # iostream3 build is shared across all configs in the build root.
1207 main.SConscript('ext/iostream3/SConscript',
1208 variant_dir = joinpath(build_root, 'iostream3'))
1209
1210 # libfdt build is shared across all configs in the build root.
1211 main.SConscript('ext/libfdt/SConscript',
1212 variant_dir = joinpath(build_root, 'libfdt'))
1213
1214 # fputils build is shared across all configs in the build root.
1215 main.SConscript('ext/fputils/SConscript',
1216 variant_dir = joinpath(build_root, 'fputils'))
1217
1218 # DRAMSim2 build is shared across all configs in the build root.
1219 main.SConscript('ext/dramsim2/SConscript',
1220 variant_dir = joinpath(build_root, 'dramsim2'))
1221
1222 # DRAMPower build is shared across all configs in the build root.
1223 main.SConscript('ext/drampower/SConscript',
1224 variant_dir = joinpath(build_root, 'drampower'))
1225
1226 # nomali build is shared across all configs in the build root.
1227 main.SConscript('ext/nomali/SConscript',
1228 variant_dir = joinpath(build_root, 'nomali'))
1229
1230 ###################################################
1231 #
1232 # This function is used to set up a directory with switching headers
1233 #
1234 ###################################################
1235
1236 main['ALL_ISA_LIST'] = all_isa_list
1237 main['ALL_GPU_ISA_LIST'] = all_gpu_isa_list
1238 all_isa_deps = {}
1239 def make_switching_dir(dname, switch_headers, env):
1240 # Generate the header. target[0] is the full path of the output
1241 # header to generate. 'source' is a dummy variable, since we get the
1242 # list of ISAs from env['ALL_ISA_LIST'].
1243 def gen_switch_hdr(target, source, env):
1244 fname = str(target[0])
1245 isa = env['TARGET_ISA'].lower()
1246 try:
1247 f = open(fname, 'w')
1248 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1249 f.close()
1250 except IOError:
1251 print "Failed to create %s" % fname
1252 raise
1253
1254 # Build SCons Action object. 'varlist' specifies env vars that this
1255 # action depends on; when env['ALL_ISA_LIST'] changes these actions
1256 # should get re-executed.
1257 switch_hdr_action = MakeAction(gen_switch_hdr,
1258 Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1259
1260 # Instantiate actions for each header
1261 for hdr in switch_headers:
1262 env.Command(hdr, [], switch_hdr_action)
1263
1264 isa_target = Dir('.').up().name.lower().replace('_', '-')
1265 env['PHONY_BASE'] = '#'+isa_target
1266 all_isa_deps[isa_target] = None
1267
1268 Export('make_switching_dir')
1269
1270 def make_gpu_switching_dir(dname, switch_headers, env):
1271 # Generate the header. target[0] is the full path of the output
1272 # header to generate. 'source' is a dummy variable, since we get the
1273 # list of ISAs from env['ALL_ISA_LIST'].
1274 def gen_switch_hdr(target, source, env):
1275 fname = str(target[0])
1276
1277 isa = env['TARGET_GPU_ISA'].lower()
1278
1279 try:
1280 f = open(fname, 'w')
1281 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1282 f.close()
1283 except IOError:
1284 print "Failed to create %s" % fname
1285 raise
1286
1287 # Build SCons Action object. 'varlist' specifies env vars that this
1288 # action depends on; when env['ALL_ISA_LIST'] changes these actions
1289 # should get re-executed.
1290 switch_hdr_action = MakeAction(gen_switch_hdr,
1291 Transform("GENERATE"), varlist=['ALL_ISA_GPU_LIST'])
1292
1293 # Instantiate actions for each header
1294 for hdr in switch_headers:
1295 env.Command(hdr, [], switch_hdr_action)
1296
1297 Export('make_gpu_switching_dir')
1298
1299 # all-isas -> all-deps -> all-environs -> all_targets
1300 main.Alias('#all-isas', [])
1301 main.Alias('#all-deps', '#all-isas')
1302
1303 # Dummy target to ensure all environments are created before telling
1304 # SCons what to actually make (the command line arguments). We attach
1305 # them to the dependence graph after the environments are complete.
1306 ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1307 def environsComplete(target, source, env):
1308 for t in ORIG_BUILD_TARGETS:
1309 main.Depends('#all-targets', t)
1310
1311 # Each build/* switching_dir attaches its *-environs target to #all-environs.
1312 main.Append(BUILDERS = {'CompleteEnvirons' :
1313 Builder(action=MakeAction(environsComplete, None))})
1314 main.CompleteEnvirons('#all-environs', [])
1315
1316 def doNothing(**ignored): pass
1317 main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1318
1319 # The final target to which all the original targets ultimately get attached.
1320 main.Dummy('#all-targets', '#all-environs')
1321 BUILD_TARGETS[:] = ['#all-targets']
1322
1323 ###################################################
1324 #
1325 # Define build environments for selected configurations.
1326 #
1327 ###################################################
1328
1329 for variant_path in variant_paths:
1330 if not GetOption('silent'):
1331 print "Building in", variant_path
1332
1333 # Make a copy of the build-root environment to use for this config.
1334 env = main.Clone()
1335 env['BUILDDIR'] = variant_path
1336
1337 # variant_dir is the tail component of build path, and is used to
1338 # determine the build parameters (e.g., 'ALPHA_SE')
1339 (build_root, variant_dir) = splitpath(variant_path)
1340
1341 # Set env variables according to the build directory config.
1342 sticky_vars.files = []
1343 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1344 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1345 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1346 current_vars_file = joinpath(build_root, 'variables', variant_dir)
1347 if isfile(current_vars_file):
1348 sticky_vars.files.append(current_vars_file)
1349 if not GetOption('silent'):
1350 print "Using saved variables file %s" % current_vars_file
1351 else:
1352 # Build dir-specific variables file doesn't exist.
1353
1354 # Make sure the directory is there so we can create it later
1355 opt_dir = dirname(current_vars_file)
1356 if not isdir(opt_dir):
1357 mkdir(opt_dir)
1358
1359 # Get default build variables from source tree. Variables are
1360 # normally determined by name of $VARIANT_DIR, but can be
1361 # overridden by '--default=' arg on command line.
1362 default = GetOption('default')
1363 opts_dir = joinpath(main.root.abspath, 'build_opts')
1364 if default:
1365 default_vars_files = [joinpath(build_root, 'variables', default),
1366 joinpath(opts_dir, default)]
1367 else:
1368 default_vars_files = [joinpath(opts_dir, variant_dir)]
1369 existing_files = filter(isfile, default_vars_files)
1370 if existing_files:
1371 default_vars_file = existing_files[0]
1372 sticky_vars.files.append(default_vars_file)
1373 print "Variables file %s not found,\n using defaults in %s" \
1374 % (current_vars_file, default_vars_file)
1375 else:
1376 print "Error: cannot find variables file %s or " \
1377 "default file(s) %s" \
1378 % (current_vars_file, ' or '.join(default_vars_files))
1379 Exit(1)
1380
1381 # Apply current variable settings to env
1382 sticky_vars.Update(env)
1383
1384 help_texts["local_vars"] += \
1385 "Build variables for %s:\n" % variant_dir \
1386 + sticky_vars.GenerateHelpText(env)
1387
1388 # Process variable settings.
1389
1390 if not have_fenv and env['USE_FENV']:
1391 print "Warning: <fenv.h> not available; " \
1392 "forcing USE_FENV to False in", variant_dir + "."
1393 env['USE_FENV'] = False
1394
1395 if not env['USE_FENV']:
1396 print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1397 print " FP results may deviate slightly from other platforms."
1398
1399 if env['EFENCE']:
1400 env.Append(LIBS=['efence'])
1401
1402 if env['USE_KVM']:
1403 if not have_kvm:
1404 print "Warning: Can not enable KVM, host seems to lack KVM support"
1405 env['USE_KVM'] = False
1406 elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1407 print "Info: KVM support disabled due to unsupported host and " \
1408 "target ISA combination"
1409 env['USE_KVM'] = False
1410
1411 # Warn about missing optional functionality
1412 if env['USE_KVM']:
1413 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1414 print "Warning: perf_event headers lack support for the " \
1415 "exclude_host attribute. KVM instruction counts will " \
1416 "be inaccurate."
1417
1418 # Save sticky variable settings back to current variables file
1419 sticky_vars.Save(current_vars_file, env)
1420
1421 if env['USE_SSE2']:
1422 env.Append(CCFLAGS=['-msse2'])
1423
1424 # The src/SConscript file sets up the build rules in 'env' according
1425 # to the configured variables. It returns a list of environments,
1426 # one for each variant build (debug, opt, etc.)
1427 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1428
1429 def pairwise(iterable):
1430 "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1431 a, b = itertools.tee(iterable)
1432 b.next()
1433 return itertools.izip(a, b)
1434
1435 # Create false dependencies so SCons will parse ISAs, establish
1436 # dependencies, and setup the build Environments serially. Either
1437 # SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1438 # greater than 1. It appears to be standard race condition stuff; it
1439 # doesn't always fail, but usually, and the behaviors are different.
1440 # Every time I tried to remove this, builds would fail in some
1441 # creative new way. So, don't do that. You'll want to, though, because
1442 # tests/SConscript takes a long time to make its Environments.
1443 for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1444 main.Depends('#%s-deps' % t2, '#%s-deps' % t1)
1445 main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1446
1447 # base help text
1448 Help('''
1449 Usage: scons [scons options] [build variables] [target(s)]
1450
1451 Extra scons options:
1452 %(options)s
1453
1454 Global build variables:
1455 %(global_vars)s
1456
1457 %(local_vars)s
1458 ''' % help_texts)