X-Git-Url: https://git.libre-soc.org/?a=blobdiff_plain;f=SConstruct;h=a8ffb208c7fe7c9cfdc4333f1c87a792b0363545;hb=967cfa939afca4a5752ce9c1d64d5c51092e7f1a;hp=e9427124f5229d50cae4246bf8c15680ec3d6c06;hpb=7084e31341f99983a2f3f218fa2ec7fa624ad249;p=gem5.git diff --git a/SConstruct b/SConstruct index e9427124f..a8ffb208c 100755 --- a/SConstruct +++ b/SConstruct @@ -1,6 +1,6 @@ # -*- mode:python -*- -# Copyright (c) 2013 ARM Limited +# Copyright (c) 2013, 2015 ARM Limited # All rights reserved. # # The license below extends only to copyright in the software and shall @@ -109,6 +109,7 @@ For more details, see: raise # Global Python includes +import itertools import os import re import subprocess @@ -172,6 +173,9 @@ AddLocalOption('--colors', dest='use_colors', action='store_true', help="Add color to abbreviated scons output") AddLocalOption('--no-colors', dest='use_colors', action='store_false', help="Don't add color to abbreviated scons output") +AddLocalOption('--with-cxx-config', dest='with_cxx_config', + action='store_true', + help="Build with support for C++-based configuration") AddLocalOption('--default', dest='default', type='string', action='store', help='Override which build_opts file to use for defaults') AddLocalOption('--ignore-style', dest='ignore_style', action='store_true', @@ -182,6 +186,14 @@ AddLocalOption('--update-ref', dest='update_ref', action='store_true', help='Update test reference outputs') AddLocalOption('--verbose', dest='verbose', action='store_true', help='Print full tool command lines') +AddLocalOption('--without-python', dest='without_python', + action='store_true', + help='Build without Python configuration support') +AddLocalOption('--without-tcmalloc', dest='without_tcmalloc', + action='store_true', + help='Disable linking against tcmalloc') +AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true', + help='Build with Undefined Behavior Sanitizer if available') termcap = get_termcap(GetOption('use_colors')) @@ -204,12 +216,15 @@ use_prefixes = [ ] use_env = {} -for key,val in os.environ.iteritems(): +for key,val in sorted(os.environ.iteritems()): if key in use_vars or \ any([key.startswith(prefix) for prefix in use_prefixes]): use_env[key] = val -main = Environment(ENV=use_env) +# Tell scons to avoid implicit command dependencies to avoid issues +# with the param wrappes being compiled twice (see +# http://scons.tigris.org/issues/show_bug.cgi?id=2811) +main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0) main.Decider('MD5-timestamp') main.root = Dir(".") # The current directory (where this file lives). main.srcdir = Dir("src") # The source directory @@ -541,9 +556,8 @@ if main['GCC'] or main['CLANG']: # Enable -Wall and then disable the few warnings that we # consistently violate main.Append(CCFLAGS=['-Wall', '-Wno-sign-compare', '-Wundef']) - # We always compile using C++11, but only gcc >= 4.7 and clang 3.1 - # actually use that name, so we stick with c++0x - main.Append(CXXFLAGS=['-std=c++0x']) + # We always compile using C++11 + main.Append(CXXFLAGS=['-std=c++11']) # Add selected sanity checks from -Wextra main.Append(CXXFLAGS=['-Wmissing-field-initializers', '-Woverloaded-virtual']) @@ -567,52 +581,74 @@ else: Exit(1) if main['GCC']: - # Check for a supported version of gcc, >= 4.4 is needed for c++0x - # support. See http://gcc.gnu.org/projects/cxx0x.html for details + # Check for a supported version of gcc. >= 4.7 is chosen for its + # level of c++11 support. See + # http://gcc.gnu.org/projects/cxx0x.html for details. gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False) - if compareVersions(gcc_version, "4.4") < 0: - print 'Error: gcc version 4.4 or newer required.' + if compareVersions(gcc_version, "4.7") < 0: + print 'Error: gcc version 4.7 or newer required.' print ' Installed version:', gcc_version Exit(1) main['GCC_VERSION'] = gcc_version - # Check for versions with bugs - if not compareVersions(gcc_version, '4.4.1') or \ - not compareVersions(gcc_version, '4.4.2'): - print 'Info: Tree vectorizer in GCC 4.4.1 & 4.4.2 is buggy, disabling.' - main.Append(CCFLAGS=['-fno-tree-vectorize']) - - # LTO support is only really working properly from 4.6 and beyond - if compareVersions(gcc_version, '4.6') >= 0: - # Add the appropriate Link-Time Optimization (LTO) flags - # unless LTO is explicitly turned off. Note that these flags - # are only used by the fast target. - if not GetOption('no_lto'): - # Pass the LTO flag when compiling to produce GIMPLE - # output, we merely create the flags here and only append - # them later/ - main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')] - - # Use the same amount of jobs for LTO as we are running - # scons with, we hardcode the use of the linker plugin - # which requires either gold or GNU ld >= 2.21 - main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs'), - '-fuse-linker-plugin'] + # gcc from version 4.8 and above generates "rep; ret" instructions + # to avoid performance penalties on certain AMD chips. Older + # assemblers detect this as an error, "Error: expecting string + # instruction after `rep'" + if compareVersions(gcc_version, "4.8") > 0: + as_version_raw = readCommand([main['AS'], '-v', '/dev/null'], + exception=False).split() + + # version strings may contain extra distro-specific + # qualifiers, so play it safe and keep only what comes before + # the first hyphen + as_version = as_version_raw[-1].split('-')[0] if as_version_raw \ + else None + + if not as_version or compareVersions(as_version, "2.23") < 0: + print termcap.Yellow + termcap.Bold + \ + 'Warning: This combination of gcc and binutils have' + \ + ' known incompatibilities.\n' + \ + ' If you encounter build problems, please update ' + \ + 'binutils to 2.23.' + \ + termcap.Normal + + # Make sure we warn if the user has requested to compile with the + # Undefined Benahvior Sanitizer and this version of gcc does not + # support it. + if GetOption('with_ubsan') and \ + compareVersions(gcc_version, '4.9') < 0: + print termcap.Yellow + termcap.Bold + \ + 'Warning: UBSan is only supported using gcc 4.9 and later.' + \ + termcap.Normal + + # Add the appropriate Link-Time Optimization (LTO) flags + # unless LTO is explicitly turned off. Note that these flags + # are only used by the fast target. + if not GetOption('no_lto'): + # Pass the LTO flag when compiling to produce GIMPLE + # output, we merely create the flags here and only append + # them later + main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')] + + # Use the same amount of jobs for LTO as we are running + # scons with + main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')] main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc', '-fno-builtin-realloc', '-fno-builtin-free']) elif main['CLANG']: - # Check for a supported version of clang, >= 2.9 is needed to - # support similar features as gcc 4.4. See + # Check for a supported version of clang, >= 3.1 is needed to + # support similar features as gcc 4.7. See # http://clang.llvm.org/cxx_status.html for details clang_version_re = re.compile(".* version (\d+\.\d+)") clang_version_match = clang_version_re.search(CXX_version) if (clang_version_match): clang_version = clang_version_match.groups()[0] - if compareVersions(clang_version, "2.9") < 0: - print 'Error: clang version 2.9 or newer required.' + if compareVersions(clang_version, "3.1") < 0: + print 'Error: clang version 3.1 or newer required.' print ' Installed version:', clang_version Exit(1) else: @@ -627,7 +663,12 @@ elif main['CLANG']: # is relying on this main.Append(CCFLAGS=['-Wno-tautological-compare', '-Wno-parentheses', - '-Wno-self-assign']) + '-Wno-self-assign', + # Some versions of libstdc++ (4.8?) seem to + # use struct hash and class hash + # interchangeably. + '-Wno-mismatched-tags', + ]) main.Append(TCMALLOC_CCFLAGS=['-fno-builtin']) @@ -726,27 +767,37 @@ if len(swig_version) < 3 or \ print 'Error determining SWIG version.' Exit(1) -min_swig_version = '1.3.34' +min_swig_version = '2.0.4' if compareVersions(swig_version[2], min_swig_version) < 0: print 'Error: SWIG version', min_swig_version, 'or newer required.' print ' Installed version:', swig_version[2] Exit(1) -# Older versions of swig do not play well with more recent versions of -# gcc due to assumptions on implicit includes (cstddef) and use of -# namespaces -if main['GCC'] and compareVersions(gcc_version, '4.6') > 0 and \ - compareVersions(swig_version[2], '2') < 0: - print '\n' + termcap.Yellow + termcap.Bold + \ - 'Warning: SWIG 1.x cause issues with gcc 4.6 and later.\n' + \ - termcap.Normal + \ - 'Use SWIG 2.x to avoid assumptions on implicit includes\n' + \ - 'and use of namespaces\n' +# Check for known incompatibilities. The standard library shipped with +# gcc >= 4.9 does not play well with swig versions prior to 3.0 +if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \ + compareVersions(swig_version[2], '3.0') < 0: + print termcap.Yellow + termcap.Bold + \ + 'Warning: This combination of gcc and swig have' + \ + ' known incompatibilities.\n' + \ + ' If you encounter build problems, please update ' + \ + 'swig to 3.0 or later.' + \ + termcap.Normal # Set up SWIG flags & scanner swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS') main.Append(SWIGFLAGS=swig_flags) +# Check for 'timeout' from GNU coreutils. If present, regressions will +# be run with a time limit. We require version 8.13 since we rely on +# support for the '--foreground' option. +timeout_lines = readCommand(['timeout', '--version'], + exception='').splitlines() +# Get the first line and tokenize it +timeout_version = timeout_lines[0].split() if timeout_lines else [] +main['TIMEOUT'] = timeout_version and \ + compareVersions(timeout_version[-1], '8.13') >= 0 + # filter out all existing swig scanners, they mess up the dependency # stuff for some reason scanners = [] @@ -768,32 +819,6 @@ scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re)) # replace the scanners list that has what we want main['SCANNERS'] = scanners -# Add a custom Check function to the Configure context so that we can -# figure out if the compiler adds leading underscores to global -# variables. This is needed for the autogenerated asm files that we -# use for embedding the python code. -def CheckLeading(context): - context.Message("Checking for leading underscore in global variables...") - # 1) Define a global variable called x from asm so the C compiler - # won't change the symbol at all. - # 2) Declare that variable. - # 3) Use the variable - # - # If the compiler prepends an underscore, this will successfully - # link because the external symbol 'x' will be called '_x' which - # was defined by the asm statement. If the compiler does not - # prepend an underscore, this will not successfully link because - # '_x' will have been defined by assembly, while the C portion of - # the code will be trying to use 'x' - ret = context.TryLink(''' - asm(".globl _x; _x: .byte 0"); - extern int x; - int main() { return x; } - ''', extension=".c") - context.env.Append(LEADING_UNDERSCORE=ret) - context.Result(ret) - return ret - # Add a custom Check function to test for structure members. def CheckMember(context, include, decl, member, include_quotes="<>"): context.Message("Checking for member %s in %s..." % @@ -820,14 +845,9 @@ conf = Configure(main, conf_dir = joinpath(build_root, '.scons_config'), log_file = joinpath(build_root, 'scons_config.log'), custom_tests = { - 'CheckLeading' : CheckLeading, 'CheckMember' : CheckMember, }) -# Check for leading underscores. Don't really need to worry either -# way so don't need to check the return code. -conf.CheckLeading() - # Check if we should compile a 64 bit binary on Mac OS X/Darwin try: import platform @@ -865,42 +885,51 @@ if main['M5_BUILD_CACHE']: print 'Using build cache located at', main['M5_BUILD_CACHE'] CacheDir(main['M5_BUILD_CACHE']) -# Find Python include and library directories for embedding the -# interpreter. We rely on python-config to resolve the appropriate -# includes and linker flags. ParseConfig does not seem to understand -# the more exotic linker flags such as -Xlinker and -export-dynamic so -# we add them explicitly below. If you want to link in an alternate -# version of python, see above for instructions on how to invoke -# scons with the appropriate PATH set. -py_includes = readCommand(['python-config', '--includes'], - exception='').split() -# Strip the -I from the include folders before adding them to the -# CPPPATH -main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes)) - -# Read the linker flags and split them into libraries and other link -# flags. The libraries are added later through the call the CheckLib. -py_ld_flags = readCommand(['python-config', '--ldflags'], exception='').split() -py_libs = [] -for lib in py_ld_flags: - if not lib.startswith('-l'): - main.Append(LINKFLAGS=[lib]) - else: - lib = lib[2:] - if lib not in py_libs: - py_libs.append(lib) - -# verify that this stuff works -if not conf.CheckHeader('Python.h', '<>'): - print "Error: can't find Python.h header in", py_includes - print "Install Python headers (package python-dev on Ubuntu and RedHat)" - Exit(1) - -for lib in py_libs: - if not conf.CheckLib(lib): - print "Error: can't find library %s required by python" % lib +if not GetOption('without_python'): + # Find Python include and library directories for embedding the + # interpreter. We rely on python-config to resolve the appropriate + # includes and linker flags. ParseConfig does not seem to understand + # the more exotic linker flags such as -Xlinker and -export-dynamic so + # we add them explicitly below. If you want to link in an alternate + # version of python, see above for instructions on how to invoke + # scons with the appropriate PATH set. + # + # First we check if python2-config exists, else we use python-config + python_config = readCommand(['which', 'python2-config'], + exception='').strip() + if not os.path.exists(python_config): + python_config = readCommand(['which', 'python-config'], + exception='').strip() + py_includes = readCommand([python_config, '--includes'], + exception='').split() + # Strip the -I from the include folders before adding them to the + # CPPPATH + main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes)) + + # Read the linker flags and split them into libraries and other link + # flags. The libraries are added later through the call the CheckLib. + py_ld_flags = readCommand([python_config, '--ldflags'], + exception='').split() + py_libs = [] + for lib in py_ld_flags: + if not lib.startswith('-l'): + main.Append(LINKFLAGS=[lib]) + else: + lib = lib[2:] + if lib not in py_libs: + py_libs.append(lib) + + # verify that this stuff works + if not conf.CheckHeader('Python.h', '<>'): + print "Error: can't find Python.h header in", py_includes + print "Install Python headers (package python-dev on Ubuntu and RedHat)" Exit(1) + for lib in py_libs: + if not conf.CheckLib(lib): + print "Error: can't find library %s required by python" % lib + Exit(1) + # On Solaris you need to use libsocket for socket ops if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'): if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'): @@ -942,15 +971,16 @@ have_posix_timers = \ conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C', 'timer_create(CLOCK_MONOTONIC, NULL, NULL);') -if conf.CheckLib('tcmalloc'): - main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) -elif conf.CheckLib('tcmalloc_minimal'): - main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) -else: - print termcap.Yellow + termcap.Bold + \ - "You can get a 12% performance improvement by installing tcmalloc "\ - "(libgoogle-perftools-dev package on Ubuntu or RedHat)." + \ - termcap.Normal +if not GetOption('without_tcmalloc'): + if conf.CheckLib('tcmalloc'): + main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) + elif conf.CheckLib('tcmalloc_minimal'): + main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) + else: + print termcap.Yellow + termcap.Bold + \ + "You can get a 12% performance improvement by "\ + "installing tcmalloc (libgoogle-perftools-dev package "\ + "on Ubuntu or RedHat)." + termcap.Normal if not have_posix_clock: print "Can't find library for POSIX clocks." @@ -965,18 +995,19 @@ if not have_fenv: # we rely on exists since version 2.6.36 of the kernel, but somehow # the KVM_API_VERSION does not reflect the change. We test for one of # the types as a fall back. -have_kvm = conf.CheckHeader('linux/kvm.h', '<>') and \ - conf.CheckTypeSize('struct kvm_xsave', '#include ') != 0 +have_kvm = conf.CheckHeader('linux/kvm.h', '<>') if not have_kvm: print "Info: Compatible header file not found, " \ "disabling KVM support." +# x86 needs support for xsave. We test for the structure here since we +# won't be able to run new tests by the time we know which ISA we're +# targeting. +have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave', + '#include ') != 0 + # Check if the requested target ISA is compatible with the host def is_isa_kvm_compatible(isa): - isa_comp_table = { - "arm" : ( "armv7l" ), - "x86" : ( "x86_64" ), - } try: import platform host_isa = platform.machine() @@ -984,7 +1015,24 @@ def is_isa_kvm_compatible(isa): print "Warning: Failed to determine host ISA." return False - return host_isa in isa_comp_table.get(isa, []) + if not have_posix_timers: + print "Warning: Can not enable KVM, host seems to lack support " \ + "for POSIX timers" + return False + + if isa == "arm": + return host_isa in ( "armv7l", "aarch64" ) + elif isa == "x86": + if host_isa != "x86_64": + return False + + if not have_kvm_xsave: + print "KVM on x86 requires xsave support in kernel headers." + return False + + return True + else: + return False # Check if the exclude_host attribute is available. We want this to @@ -1014,17 +1062,10 @@ class CpuModel(object): # Dict of available CPU model objects. Accessible as CpuModel.dict. dict = {} - list = [] - defaults = [] # Constructor. Automatically adds models to CpuModel.dict. - def __init__(self, name, filename, includes, strings, default=False): + def __init__(self, name, default=False): self.name = name # name of model - self.filename = filename # filename for output exec code - self.includes = includes # include files needed in exec file - # The 'strings' dict holds all the per-CPU symbols we can - # substitute into templates etc. - self.strings = strings # This cpu is enabled by default self.default = default @@ -1033,7 +1074,6 @@ class CpuModel(object): if name in CpuModel.dict: raise AttributeError, "CpuModel '%s' already registered" % name CpuModel.dict[name] = self - CpuModel.list.append(name) Export('CpuModel') @@ -1057,7 +1097,7 @@ Export('slicc_includes') # Walk the tree and execute all SConsopts scripts that wil add to the # above variables -if not GetOption('verbose'): +if GetOption('verbose'): print "Reading SConsopts" for bdir in [ base_dir ] + extras_dir_list: if not isdir(bdir): @@ -1075,7 +1115,7 @@ sticky_vars.AddVariables( EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list), ListVariable('CPU_MODELS', 'CPU models', sorted(n for n,m in CpuModel.dict.iteritems() if m.default), - sorted(CpuModel.list)), + sorted(CpuModel.dict.keys())), BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger', False), BoolVariable('SS_COMPATIBLE_FP', @@ -1094,7 +1134,7 @@ sticky_vars.AddVariables( # These variables get exported to #defines in config/*.hh (see src/SConscript). export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'CP_ANNOTATE', - 'USE_POSIX_CLOCK', 'PROTOCOL', 'HAVE_PROTOBUF', + 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL', 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST'] ################################################### @@ -1158,6 +1198,14 @@ main.SConscript('ext/fputils/SConscript', main.SConscript('ext/dramsim2/SConscript', variant_dir = joinpath(build_root, 'dramsim2')) +# DRAMPower build is shared across all configs in the build root. +main.SConscript('ext/drampower/SConscript', + variant_dir = joinpath(build_root, 'drampower')) + +# nomali build is shared across all configs in the build root. +main.SConscript('ext/nomali/SConscript', + variant_dir = joinpath(build_root, 'nomali')) + ################################################### # # This function is used to set up a directory with switching headers @@ -1165,16 +1213,21 @@ main.SConscript('ext/dramsim2/SConscript', ################################################### main['ALL_ISA_LIST'] = all_isa_list +all_isa_deps = {} def make_switching_dir(dname, switch_headers, env): # Generate the header. target[0] is the full path of the output # header to generate. 'source' is a dummy variable, since we get the # list of ISAs from env['ALL_ISA_LIST']. def gen_switch_hdr(target, source, env): fname = str(target[0]) - f = open(fname, 'w') isa = env['TARGET_ISA'].lower() - print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname)) - f.close() + try: + f = open(fname, 'w') + print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname)) + f.close() + except IOError: + print "Failed to create %s" % fname + raise # Build SCons Action object. 'varlist' specifies env vars that this # action depends on; when env['ALL_ISA_LIST'] changes these actions @@ -1185,8 +1238,37 @@ def make_switching_dir(dname, switch_headers, env): # Instantiate actions for each header for hdr in switch_headers: env.Command(hdr, [], switch_hdr_action) + + isa_target = Dir('.').up().name.lower().replace('_', '-') + env['PHONY_BASE'] = '#'+isa_target + all_isa_deps[isa_target] = None + Export('make_switching_dir') +# all-isas -> all-deps -> all-environs -> all_targets +main.Alias('#all-isas', []) +main.Alias('#all-deps', '#all-isas') + +# Dummy target to ensure all environments are created before telling +# SCons what to actually make (the command line arguments). We attach +# them to the dependence graph after the environments are complete. +ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work. +def environsComplete(target, source, env): + for t in ORIG_BUILD_TARGETS: + main.Depends('#all-targets', t) + +# Each build/* switching_dir attaches its *-environs target to #all-environs. +main.Append(BUILDERS = {'CompleteEnvirons' : + Builder(action=MakeAction(environsComplete, None))}) +main.CompleteEnvirons('#all-environs', []) + +def doNothing(**ignored): pass +main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))}) + +# The final target to which all the original targets ultimately get attached. +main.Dummy('#all-targets', '#all-environs') +BUILD_TARGETS[:] = ['#all-targets'] + ################################################### # # Define build environments for selected configurations. @@ -1194,7 +1276,8 @@ Export('make_switching_dir') ################################################### for variant_path in variant_paths: - print "Building in", variant_path + if not GetOption('silent'): + print "Building in", variant_path # Make a copy of the build-root environment to use for this config. env = main.Clone() @@ -1212,7 +1295,8 @@ for variant_path in variant_paths: current_vars_file = joinpath(build_root, 'variables', variant_dir) if isfile(current_vars_file): sticky_vars.files.append(current_vars_file) - print "Using saved variables file %s" % current_vars_file + if not GetOption('silent'): + print "Using saved variables file %s" % current_vars_file else: # Build dir-specific variables file doesn't exist. @@ -1268,10 +1352,6 @@ for variant_path in variant_paths: if not have_kvm: print "Warning: Can not enable KVM, host seems to lack KVM support" env['USE_KVM'] = False - elif not have_posix_timers: - print "Warning: Can not enable KVM, host seems to lack support " \ - "for POSIX timers" - env['USE_KVM'] = False elif not is_isa_kvm_compatible(env['TARGET_ISA']): print "Info: KVM support disabled due to unsupported host and " \ "target ISA combination" @@ -1293,14 +1373,25 @@ for variant_path in variant_paths: # The src/SConscript file sets up the build rules in 'env' according # to the configured variables. It returns a list of environments, # one for each variant build (debug, opt, etc.) - envList = SConscript('src/SConscript', variant_dir = variant_path, - exports = 'env') - - # Set up the regression tests for each build. - for e in envList: - SConscript('tests/SConscript', - variant_dir = joinpath(variant_path, 'tests', e.Label), - exports = { 'env' : e }, duplicate = False) + SConscript('src/SConscript', variant_dir = variant_path, exports = 'env') + +def pairwise(iterable): + "s -> (s0,s1), (s1,s2), (s2, s3), ..." + a, b = itertools.tee(iterable) + b.next() + return itertools.izip(a, b) + +# Create false dependencies so SCons will parse ISAs, establish +# dependencies, and setup the build Environments serially. Either +# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j +# greater than 1. It appears to be standard race condition stuff; it +# doesn't always fail, but usually, and the behaviors are different. +# Every time I tried to remove this, builds would fail in some +# creative new way. So, don't do that. You'll want to, though, because +# tests/SConscript takes a long time to make its Environments. +for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())): + main.Depends('#%s-deps' % t2, '#%s-deps' % t1) + main.Depends('#%s-environs' % t2, '#%s-environs' % t1) # base help text Help('''