X-Git-Url: https://git.libre-soc.org/?a=blobdiff_plain;f=SConstruct;h=dc3addcd8a1c42fcfbef1da5a8b4b3321ab0b46e;hb=de720838053d30defcb3b5d07b76ea599b9c05ea;hp=f99bc1f20ab4f044051e8d5b93c1c7d767c1e3e4;hpb=63fdabf191b8ac1031fb25da61ab2526d4bb6d05;p=gem5.git diff --git a/SConstruct b/SConstruct index f99bc1f20..dc3addcd8 100644 --- a/SConstruct +++ b/SConstruct @@ -1,5 +1,6 @@ # -*- mode:python -*- +# Copyright (c) 2009 The Hewlett-Packard Development Company # Copyright (c) 2004-2005 The Regents of The University of Michigan # All rights reserved. # @@ -27,6 +28,7 @@ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Steve Reinhardt +# Nathan Binkert ################################################### # @@ -63,39 +65,181 @@ # ################################################### -# Python library imports -import sys +# Check for recent-enough Python and SCons versions. +try: + # Really old versions of scons only take two options for the + # function, so check once without the revision and once with the + # revision, the first instance will fail for stuff other than + # 0.98, and the second will fail for 0.98.0 + EnsureSConsVersion(0, 98) + EnsureSConsVersion(0, 98, 1) +except SystemExit, e: + print """ +For more details, see: + http://m5sim.org/wiki/index.php/Compiling_M5 +""" + raise + +# We ensure the python version early because we have stuff that +# requires python 2.4 +try: + EnsurePythonVersion(2, 4) +except SystemExit, e: + print """ +You can use a non-default installation of the Python interpreter by +either (1) rearranging your PATH so that scons finds the non-default +'python' first or (2) explicitly invoking an alternative interpreter +on the scons script. + +For more details, see: + http://m5sim.org/wiki/index.php/Using_a_non-default_Python_installation +""" + raise + import os +import re import subprocess -from os.path import join as joinpath - -# Check for recent-enough Python and SCons versions. If your system's -# default installation of Python is not recent enough, you can use a -# non-default installation of the Python interpreter by either (1) -# rearranging your PATH so that scons finds the non-default 'python' -# first or (2) explicitly invoking an alternative interpreter on the -# scons script, e.g., "/usr/local/bin/python2.4 `which scons` [args]". -EnsurePythonVersion(2,4) - -# Ironically, SCons 0.96 dies if you give EnsureSconsVersion a -# 3-element version number. -min_scons_version = (0,96,91) -try: - EnsureSConsVersion(*min_scons_version) -except: - print "Error checking current SCons version." - print "SCons", ".".join(map(str,min_scons_version)), "or greater required." - Exit(2) +import sys + +from os import mkdir, environ +from os.path import abspath, basename, dirname, expanduser, normpath +from os.path import exists, isdir, isfile +from os.path import join as joinpath, split as splitpath + +import SCons +import SCons.Node + +def read_command(cmd, **kwargs): + """run the command cmd, read the results and return them + this is sorta like `cmd` in shell""" + from subprocess import Popen, PIPE, STDOUT + + if isinstance(cmd, str): + cmd = cmd.split() + + no_exception = 'exception' in kwargs + exception = kwargs.pop('exception', None) + kwargs.setdefault('shell', False) + kwargs.setdefault('stdout', PIPE) + kwargs.setdefault('stderr', STDOUT) + kwargs.setdefault('close_fds', True) + try: + subp = Popen(cmd, **kwargs) + except Exception, e: + if no_exception: + return exception + raise + + return subp.communicate()[0] + +# helper function: compare arrays or strings of version numbers. +# E.g., compare_version((1,3,25), (1,4,1)') +# returns -1, 0, 1 if v1 is <, ==, > v2 +def compare_versions(v1, v2): + def make_version_list(v): + if isinstance(v, (list,tuple)): + return v + elif isinstance(v, str): + return map(lambda x: int(re.match('\d+', x).group()), v.split('.')) + else: + raise TypeError + + v1 = make_version_list(v1) + v2 = make_version_list(v2) + # Compare corresponding elements of lists + for n1,n2 in zip(v1, v2): + if n1 < n2: return -1 + if n1 > n2: return 1 + # all corresponding values are equal... see if one has extra values + if len(v1) < len(v2): return -1 + if len(v1) > len(v2): return 1 + return 0 + +######################################################################## +# +# Set up the base build environment. +# +######################################################################## +use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'PATH', 'RANLIB' ]) + +use_env = {} +for key,val in os.environ.iteritems(): + if key in use_vars or key.startswith("M5"): + use_env[key] = val + +env = Environment(ENV=use_env) +env.root = Dir(".") # The current directory (where this file lives). +env.srcdir = Dir("src") # The source directory + +######################################################################## +# +# Mercurial Stuff. +# +# If the M5 directory is a mercurial repository, we should do some +# extra things. +# +######################################################################## + +hgdir = env.root.Dir(".hg") -# The absolute path to the current directory (where this file lives). -ROOT = Dir('.').abspath +mercurial_style_message = """ +You're missing the M5 style hook. +Please install the hook so we can ensure that all code fits a common style. -# Path to the M5 source tree. -SRCDIR = joinpath(ROOT, 'src') +All you'd need to do is add the following lines to your repository .hg/hgrc +or your personal .hgrc +---------------- -# tell python where to find m5 python code -sys.path.append(joinpath(ROOT, 'src/python')) +[extensions] +style = %s/util/style.py + +[hooks] +pretxncommit.style = python:style.check_whitespace +""" % (env.root) + +mercurial_bin_not_found = """ +Mercurial binary cannot be found, unfortunately this means that we +cannot easily determine the version of M5 that you are running and +this makes error messages more difficult to collect. Please consider +installing mercurial if you choose to post an error message +""" + +mercurial_lib_not_found = """ +Mercurial libraries cannot be found, ignoring style hook +If you are actually a M5 developer, please fix this and +run the style hook. It is important. +""" + +if hgdir.exists(): + # 1) Grab repository revision if we know it. + cmd = "hg id -n -i -t -b" + try: + hg_info = read_command(cmd, cwd=env.root.abspath).strip() + except OSError: + hg_info = "Unknown" + print mercurial_bin_not_found + + env['HG_INFO'] = hg_info + + # 2) Ensure that the style hook is in place. + try: + ui = None + if ARGUMENTS.get('IGNORE_STYLE') != 'True': + from mercurial import ui + ui = ui.ui() + except ImportError: + print mercurial_lib_not_found + + if ui is not None: + ui.readconfig(hgdir.File('hgrc').abspath) + style_hook = ui.config('hooks', 'pretxncommit.style', None) + + if not style_hook: + print mercurial_style_message + sys.exit(1) +else: + print ".hg directory not found" ################################################### # @@ -105,7 +249,7 @@ sys.path.append(joinpath(ROOT, 'src/python')) ################################################### # Find default configuration & binary. -Default(os.environ.get('M5_DEFAULT_BINARY', 'build/ALPHA_SE/m5.debug')) +Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA_SE/m5.debug')) # helper function: find last occurrence of element in list def rfind(l, elt, offs = -1): @@ -114,22 +258,6 @@ def rfind(l, elt, offs = -1): return i raise ValueError, "element not found" -# helper function: compare dotted version numbers. -# E.g., compare_version('1.3.25', '1.4.1') -# returns -1, 0, 1 if v1 is <, ==, > v2 -def compare_versions(v1, v2): - # Convert dotted strings to lists - v1 = map(int, v1.split('.')) - v2 = map(int, v2.split('.')) - # Compare corresponding elements of lists - for n1,n2 in zip(v1, v2): - if n1 < n2: return -1 - if n1 > n2: return 1 - # all corresponding values are equal... see if one has extra values - if len(v1) < len(v2): return -1 - if len(v1) > len(v2): return 1 - return 0 - # Each target must have 'build' in the interior of the path; the # directory below this will determine the build parameters. For # example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we @@ -141,17 +269,17 @@ if COMMAND_LINE_TARGETS: # Ask SCons which directory it was invoked from launch_dir = GetLaunchDir() # Make targets relative to invocation directory - abs_targets = map(lambda x: os.path.normpath(joinpath(launch_dir, str(x))), - COMMAND_LINE_TARGETS) + abs_targets = [ normpath(joinpath(launch_dir, str(x))) for x in \ + COMMAND_LINE_TARGETS] else: # Default targets are relative to root of tree - abs_targets = map(lambda x: os.path.normpath(joinpath(ROOT, str(x))), - DEFAULT_TARGETS) + abs_targets = [ normpath(joinpath(ROOT, str(x))) for x in \ + DEFAULT_TARGETS] # Generate a list of the unique build roots and configs that the # collected targets reference. -build_paths = [] +variant_paths = [] build_root = None for t in abs_targets: path_dirs = t.split('/') @@ -168,30 +296,17 @@ for t in abs_targets: print "Error: build targets not under same build root\n"\ " %s\n %s" % (build_root, this_build_root) Exit(1) - build_path = joinpath('/',*path_dirs[:build_top+2]) - if build_path not in build_paths: - build_paths.append(build_path) + variant_path = joinpath('/',*path_dirs[:build_top+2]) + if variant_path not in variant_paths: + variant_paths.append(variant_path) -################################################### -# -# Set up the default build environment. This environment is copied -# and modified according to each selected configuration. -# -################################################### - -env = Environment(ENV = os.environ, # inherit user's environment vars - ROOT = ROOT, - SRCDIR = SRCDIR) +# Make sure build_root exists (might not if this is the first build there) +if not isdir(build_root): + mkdir(build_root) -#Parse CC/CXX early so that we use the correct compiler for -# to test for dependencies/versions/libraries/includes -if ARGUMENTS.get('CC', None): - env['CC'] = ARGUMENTS.get('CC') +Export('env') -if ARGUMENTS.get('CXX', None): - env['CXX'] = ARGUMENTS.get('CXX') - -env.SConsignFile(joinpath(build_root,"sconsign")) +env.SConsignFile(joinpath(build_root, "sconsign")) # Default duplicate option is to use hard links, but this messes up # when you use emacs to edit a file in the target dir, as emacs moves @@ -199,44 +314,107 @@ env.SConsignFile(joinpath(build_root,"sconsign")) # (soft) links work better. env.SetOption('duplicate', 'soft-copy') -# I waffle on this setting... it does avoid a few painful but -# unnecessary builds, but it also seems to make trivial builds take -# noticeably longer. -if False: - env.TargetSignatures('content') +# +# Set up global sticky variables... these are common to an entire build +# tree (not specific to a particular build like ALPHA_SE) +# + +# Variable validators & converters for global sticky variables +def PathListMakeAbsolute(val): + if not val: + return val + f = lambda p: abspath(expanduser(p)) + return ':'.join(map(f, val.split(':'))) + +def PathListAllExist(key, val, env): + if not val: + return + paths = val.split(':') + for path in paths: + if not isdir(path): + raise SCons.Errors.UserError("Path does not exist: '%s'" % path) + +global_sticky_vars_file = joinpath(build_root, 'variables.global') + +global_sticky_vars = Variables(global_sticky_vars_file, args=ARGUMENTS) + +global_sticky_vars.AddVariables( + ('CC', 'C compiler', environ.get('CC', env['CC'])), + ('CXX', 'C++ compiler', environ.get('CXX', env['CXX'])), + ('BATCH', 'Use batch pool for build and tests', False), + ('BATCH_CMD', 'Batch pool submission command name', 'qdo'), + ('EXTRAS', 'Add Extra directories to the compilation', '', + PathListAllExist, PathListMakeAbsolute) + ) + +# base help text +help_text = ''' +Usage: scons [scons options] [build options] [target(s)] + +Global sticky options: +''' + +help_text += global_sticky_vars.GenerateHelpText(env) + +# Update env with values from ARGUMENTS & file global_sticky_vars_file +global_sticky_vars.Update(env) + +# Save sticky variable settings back to current variables file +global_sticky_vars.Save(global_sticky_vars_file, env) + +# Parse EXTRAS variable to build list of all directories where we're +# look for sources etc. This list is exported as base_dir_list. +base_dir = env.srcdir.abspath +if env['EXTRAS']: + extras_dir_list = env['EXTRAS'].split(':') +else: + extras_dir_list = [] + +Export('base_dir') +Export('extras_dir_list') # M5_PLY is used by isa_parser.py to find the PLY package. -env.Append(ENV = { 'M5_PLY' : Dir('ext/ply') }) -env['GCC'] = False -env['SUNCC'] = False -env['GCC'] = subprocess.Popen(env['CXX'] + ' --version', shell=True, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - close_fds=True).communicate()[0].find('GCC') >= 0 -env['SUNCC'] = subprocess.Popen(env['CXX'] + ' -V', shell=True, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - close_fds=True).communicate()[0].find('Sun C++') >= 0 -if (env['GCC'] and env['SUNCC']): - print 'Error: How can we have both g++ and Sun C++ at the same time?' - Exit(1) +env.Append(ENV = { 'M5_PLY' : str(Dir('ext/ply')) }) +CXX_version = read_command([env['CXX'],'--version'], exception=False) +CXX_V = read_command([env['CXX'],'-V'], exception=False) + +env['GCC'] = CXX_version and CXX_version.find('g++') >= 0 +env['SUNCC'] = CXX_V and CXX_V.find('Sun C++') >= 0 +env['ICC'] = CXX_V and CXX_V.find('Intel') >= 0 +if env['GCC'] + env['SUNCC'] + env['ICC'] > 1: + print 'Error: How can we have two at the same time?' + Exit(1) # Set up default C++ compiler flags if env['GCC']: env.Append(CCFLAGS='-pipe') env.Append(CCFLAGS='-fno-strict-aliasing') env.Append(CCFLAGS=Split('-Wall -Wno-sign-compare -Werror -Wundef')) + env.Append(CXXFLAGS='-Wno-deprecated') +elif env['ICC']: + pass #Fix me... add warning flags once we clean up icc warnings elif env['SUNCC']: env.Append(CCFLAGS='-Qoption ccfe') env.Append(CCFLAGS='-features=gcc') env.Append(CCFLAGS='-features=extensions') env.Append(CCFLAGS='-library=stlport4') env.Append(CCFLAGS='-xar') -# env.Append(CCFLAGS='-instances=semiexplicit') + #env.Append(CCFLAGS='-instances=semiexplicit') else: print 'Error: Don\'t know what compiler options to use for your compiler.' - print ' Please fix SConstruct and try again.' + print ' Please fix SConstruct and src/SConscript and try again.' Exit(1) +# Do this after we save setting back, or else we'll tack on an +# extra 'qdo' every time we run scons. +if env['BATCH']: + env['CC'] = env['BATCH_CMD'] + ' ' + env['CC'] + env['CXX'] = env['BATCH_CMD'] + ' ' + env['CXX'] + env['AS'] = env['BATCH_CMD'] + ' ' + env['AS'] + env['AR'] = env['BATCH_CMD'] + ' ' + env['AR'] + env['RANLIB'] = env['BATCH_CMD'] + ' ' + env['RANLIB'] + if sys.platform == 'cygwin': # cygwin has some header file issues... env.Append(CCFLAGS=Split("-Wno-uninitialized")) @@ -249,9 +427,10 @@ if not env.has_key('SWIG'): Exit(1) # Check for appropriate SWIG version -swig_version = os.popen('swig -version').read().split() +swig_version = read_command(('swig', '-version'), exception='').split() # First 3 words should be "SWIG Version x.y.z" -if swig_version[0] != 'SWIG' or swig_version[1] != 'Version': +if len(swig_version) < 3 or \ + swig_version[0] != 'SWIG' or swig_version[1] != 'Version': print 'Error determining SWIG version.' Exit(1) @@ -262,54 +441,143 @@ if compare_versions(swig_version[2], min_swig_version) < 0: Exit(1) # Set up SWIG flags & scanner -env.Append(SWIGFLAGS=Split('-c++ -python -modern $_CPPINCFLAGS')) +swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS') +env.Append(SWIGFLAGS=swig_flags) -import SCons.Scanner +# filter out all existing swig scanners, they mess up the dependency +# stuff for some reason +scanners = [] +for scanner in env['SCANNERS']: + skeys = scanner.skeys + if skeys == '.i': + continue -swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")' + if isinstance(skeys, (list, tuple)) and '.i' in skeys: + continue -swig_scanner = SCons.Scanner.ClassicCPP("SwigScan", ".i", "CPPPATH", - swig_inc_re) + scanners.append(scanner) -env.Append(SCANNERS = swig_scanner) +# add the new swig scanner that we like better +from SCons.Scanner import ClassicCPP as CPPScanner +swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")' +scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re)) + +# replace the scanners list that has what we want +env['SCANNERS'] = scanners + +# Add a custom Check function to the Configure context so that we can +# figure out if the compiler adds leading underscores to global +# variables. This is needed for the autogenerated asm files that we +# use for embedding the python code. +def CheckLeading(context): + context.Message("Checking for leading underscore in global variables...") + # 1) Define a global variable called x from asm so the C compiler + # won't change the symbol at all. + # 2) Declare that variable. + # 3) Use the variable + # + # If the compiler prepends an underscore, this will successfully + # link because the external symbol 'x' will be called '_x' which + # was defined by the asm statement. If the compiler does not + # prepend an underscore, this will not successfully link because + # '_x' will have been defined by assembly, while the C portion of + # the code will be trying to use 'x' + ret = context.TryLink(''' + asm(".globl _x; _x: .byte 0"); + extern int x; + int main() { return x; } + ''', extension=".c") + context.env.Append(LEADING_UNDERSCORE=ret) + context.Result(ret) + return ret # Platform-specific configuration. Note again that we assume that all # builds under a given build root run on the same host platform. conf = Configure(env, conf_dir = joinpath(build_root, '.scons_config'), - log_file = joinpath(build_root, 'scons_config.log')) + log_file = joinpath(build_root, 'scons_config.log'), + custom_tests = { 'CheckLeading' : CheckLeading }) + +# Check for leading underscores. Don't really need to worry either +# way so don't need to check the return code. +conf.CheckLeading() + +# Check if we should compile a 64 bit binary on Mac OS X/Darwin +try: + import platform + uname = platform.uname() + if uname[0] == 'Darwin' and compare_versions(uname[2], '9.0.0') >= 0: + if int(read_command('sysctl -n hw.cpu64bit_capable')[0]): + env.Append(CCFLAGS='-arch x86_64') + env.Append(CFLAGS='-arch x86_64') + env.Append(LINKFLAGS='-arch x86_64') + env.Append(ASFLAGS='-arch x86_64') +except: + pass + +# Recent versions of scons substitute a "Null" object for Configure() +# when configuration isn't necessary, e.g., if the "--help" option is +# present. Unfortuantely this Null object always returns false, +# breaking all our configuration checks. We replace it with our own +# more optimistic null object that returns True instead. +if not conf: + def NullCheck(*args, **kwargs): + return True + + class NullConf: + def __init__(self, env): + self.env = env + def Finish(self): + return self.env + def __getattr__(self, mname): + return NullCheck + + conf = NullConf(env) # Find Python include and library directories for embedding the # interpreter. For consistency, we will use the same Python # installation used to run scons (and thus this script). If you want # to link in an alternate version, see above for instructions on how # to invoke scons with a different copy of the Python interpreter. +from distutils import sysconfig + +py_getvar = sysconfig.get_config_var + +py_version = 'python' + py_getvar('VERSION') + +py_general_include = sysconfig.get_python_inc() +py_platform_include = sysconfig.get_python_inc(plat_specific=True) +py_includes = [ py_general_include ] +if py_platform_include != py_general_include: + py_includes.append(py_platform_include) -# Get brief Python version name (e.g., "python2.4") for locating -# include & library files -py_version_name = 'python' + sys.version[:3] +py_lib_path = [] +# add the prefix/lib/pythonX.Y/config dir, but only if there is no +# shared library in prefix/lib/. +if not py_getvar('Py_ENABLE_SHARED'): + py_lib_path.append('-L' + py_getvar('LIBPL')) -# include path, e.g. /usr/local/include/python2.4 -py_header_path = joinpath(sys.exec_prefix, 'include', py_version_name) -env.Append(CPPPATH = py_header_path) -# verify that it works +py_libs = [] +for lib in py_getvar('LIBS').split() + py_getvar('SYSLIBS').split(): + if lib not in py_libs: + py_libs.append(lib) +py_libs.append('-l' + py_version) + +env.Append(CPPPATH=py_includes) +env.Append(LIBPATH=py_lib_path) +#env.Append(LIBS=py_libs) + +# verify that this stuff works if not conf.CheckHeader('Python.h', '<>'): - print "Error: can't find Python.h header in", py_header_path + print "Error: can't find Python.h header in", py_includes Exit(1) -# add library path too if it's not in the default place -py_lib_path = None -if sys.exec_prefix != '/usr': - py_lib_path = joinpath(sys.exec_prefix, 'lib') -elif sys.platform == 'cygwin': - # cygwin puts the .dll in /bin for some reason - py_lib_path = '/bin' -if py_lib_path: - env.Append(LIBPATH = py_lib_path) - print 'Adding', py_lib_path, 'to LIBPATH for', py_version_name -if not conf.CheckLib(py_version_name): - print "Error: can't find Python library", py_version_name - Exit(1) +for lib in py_libs: + assert lib.startswith('-l') + lib = lib[2:] + if not conf.CheckLib(lib): + print "Error: can't find library %s required by python" % lib + Exit(1) # On Solaris you need to use libsocket for socket ops if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'): @@ -331,13 +599,16 @@ if not have_fenv: print "Warning: Header file not found." print " This host has no IEEE FP rounding mode control." +###################################################################### +# # Check for mysql. +# mysql_config = WhereIs('mysql_config') -have_mysql = mysql_config != None +have_mysql = bool(mysql_config) # Check MySQL version. if have_mysql: - mysql_version = os.popen(mysql_config + ' --version').read() + mysql_version = read_command(mysql_config + ' --version') min_mysql_version = '4.1' if compare_versions(mysql_version, min_mysql_version) < 0: print 'Warning: MySQL', min_mysql_version, 'or newer required.' @@ -354,70 +625,81 @@ if have_mysql: # This seems to work in all versions mysql_config_libs = mysql_config + ' --libs' +###################################################################### +# +# Finish the configuration +# env = conf.Finish() +###################################################################### +# +# Collect all non-global variables +# + +Export('env') + # Define the universe of supported ISAs -env['ALL_ISA_LIST'] = ['alpha', 'sparc', 'mips'] +all_isa_list = [ ] +Export('all_isa_list') # Define the universe of supported CPU models -env['ALL_CPU_LIST'] = ['AtomicSimpleCPU', 'TimingSimpleCPU', - 'O3CPU', 'OzoneCPU'] +all_cpu_list = [ ] +default_cpus = [ ] +Export('all_cpu_list', 'default_cpus') -if os.path.isdir(joinpath(SRCDIR, 'encumbered/cpu/full')): - env['ALL_CPU_LIST'] += ['FullCPU'] - -# Sticky options get saved in the options file so they persist from +# Sticky variables get saved in the variables file so they persist from # one invocation to the next (unless overridden, in which case the new # value becomes sticky). -sticky_opts = Options(args=ARGUMENTS) -sticky_opts.AddOptions( - EnumOption('TARGET_ISA', 'Target ISA', 'alpha', env['ALL_ISA_LIST']), - BoolOption('FULL_SYSTEM', 'Full-system support', False), - # There's a bug in scons 0.96.1 that causes ListOptions with list - # values (more than one value) not to be able to be restored from - # a saved option file. If this causes trouble then upgrade to - # scons 0.96.90 or later. - ListOption('CPU_MODELS', 'CPU models', 'AtomicSimpleCPU,TimingSimpleCPU,O3CPU', - env['ALL_CPU_LIST']), - BoolOption('ALPHA_TLASER', - 'Model Alpha TurboLaser platform (vs. Tsunami)', False), - BoolOption('NO_FAST_ALLOC', 'Disable fast object allocator', False), - BoolOption('EFENCE', 'Link with Electric Fence malloc debugger', - False), - BoolOption('SS_COMPATIBLE_FP', - 'Make floating-point results compatible with SimpleScalar', - False), - BoolOption('USE_SSE2', - 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts', - False), - BoolOption('USE_MYSQL', 'Use MySQL for stats output', have_mysql), - BoolOption('USE_FENV', 'Use IEEE mode control', have_fenv), - BoolOption('USE_CHECKER', 'Use checker for detailed CPU models', False), - ('CC', 'C compiler', os.environ.get('CC', env['CC'])), - ('CXX', 'C++ compiler', os.environ.get('CXX', env['CXX'])), - BoolOption('BATCH', 'Use batch pool for build and tests', False), - ('BATCH_CMD', 'Batch pool submission command name', 'qdo'), - ('PYTHONHOME', - 'Override the default PYTHONHOME for this system (use with caution)', - '%s:%s' % (sys.prefix, sys.exec_prefix)) +sticky_vars = Variables(args=ARGUMENTS) +Export('sticky_vars') + +# Non-sticky variables only apply to the current build. +nonsticky_vars = Variables(args=ARGUMENTS) +Export('nonsticky_vars') + +# Walk the tree and execute all SConsopts scripts that wil add to the +# above variables +for bdir in [ base_dir ] + extras_dir_list: + for root, dirs, files in os.walk(bdir): + if 'SConsopts' in files: + print "Reading", joinpath(root, 'SConsopts') + SConscript(joinpath(root, 'SConsopts')) + +all_isa_list.sort() +all_cpu_list.sort() +default_cpus.sort() + +sticky_vars.AddVariables( + EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list), + BoolVariable('FULL_SYSTEM', 'Full-system support', False), + ListVariable('CPU_MODELS', 'CPU models', default_cpus, all_cpu_list), + BoolVariable('NO_FAST_ALLOC', 'Disable fast object allocator', False), + BoolVariable('FAST_ALLOC_DEBUG', 'Enable fast object allocator debugging', + False), + BoolVariable('FAST_ALLOC_STATS', 'Enable fast object allocator statistics', + False), + BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger', + False), + BoolVariable('SS_COMPATIBLE_FP', + 'Make floating-point results compatible with SimpleScalar', + False), + BoolVariable('USE_SSE2', + 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts', + False), + BoolVariable('USE_MYSQL', 'Use MySQL for stats output', have_mysql), + BoolVariable('USE_FENV', 'Use IEEE mode control', have_fenv), + BoolVariable('USE_CHECKER', 'Use checker for detailed CPU models', False), ) -# Non-sticky options only apply to the current build. -nonsticky_opts = Options(args=ARGUMENTS) -nonsticky_opts.AddOptions( - BoolOption('update_ref', 'Update test reference outputs', False) +nonsticky_vars.AddVariables( + BoolVariable('update_ref', 'Update test reference outputs', False) ) -# These options get exported to #defines in config/*.hh (see src/SConscript). -env.ExportOptions = ['FULL_SYSTEM', 'ALPHA_TLASER', 'USE_FENV', \ - 'USE_MYSQL', 'NO_FAST_ALLOC', 'SS_COMPATIBLE_FP', \ - 'USE_CHECKER', 'PYTHONHOME', 'TARGET_ISA'] - -# Define a handy 'no-op' action -def no_action(target, source, env): - return 0 - -env.NoAction = Action(no_action, None) +# These variables get exported to #defines in config/*.hh (see src/SConscript). +env.ExportVariables = ['FULL_SYSTEM', 'ALPHA_TLASER', 'USE_FENV', \ + 'USE_MYSQL', 'NO_FAST_ALLOC', 'FAST_ALLOC_DEBUG', \ + 'FAST_ALLOC_STATS', 'SS_COMPATIBLE_FP', \ + 'USE_CHECKER', 'TARGET_ISA'] ################################################### # @@ -426,20 +708,20 @@ env.NoAction = Action(no_action, None) ################################################### # This function generates a config header file that #defines the -# option symbol to the current option setting (0 or 1). The source -# operands are the name of the option and a Value node containing the -# value of the option. +# variable symbol to the current variable setting (0 or 1). The source +# operands are the name of the variable and a Value node containing the +# value of the variable. def build_config_file(target, source, env): - (option, value) = [s.get_contents() for s in source] + (variable, value) = [s.get_contents() for s in source] f = file(str(target[0]), 'w') - print >> f, '#define', option, value + print >> f, '#define', variable, value f.close() return None # Generate the message to be printed when building the config file. def build_config_file_string(target, source, env): - (option, value) = [s.get_contents() for s in source] - return "Defining %s as %s in %s." % (option, value, target[0]) + (variable, value) = [s.get_contents() for s in source] + return "Defining %s as %s in %s." % (variable, value, target[0]) # Combine the two functions into a scons Action object. config_action = Action(build_config_file, build_config_file_string) @@ -447,60 +729,31 @@ config_action = Action(build_config_file, build_config_file_string) # The emitter munges the source & target node lists to reflect what # we're really doing. def config_emitter(target, source, env): - # extract option name from Builder arg - option = str(target[0]) + # extract variable name from Builder arg + variable = str(target[0]) # True target is config header file - target = joinpath('config', option.lower() + '.hh') - val = env[option] + target = joinpath('config', variable.lower() + '.hh') + val = env[variable] if isinstance(val, bool): # Force value to 0/1 val = int(val) elif isinstance(val, str): val = '"' + val + '"' - - # Sources are option name & value (packaged in SCons Value nodes) - return ([target], [Value(option), Value(val)]) + + # Sources are variable name & value (packaged in SCons Value nodes) + return ([target], [Value(variable), Value(val)]) config_builder = Builder(emitter = config_emitter, action = config_action) env.Append(BUILDERS = { 'ConfigFile' : config_builder }) -################################################### -# -# Define a SCons builder for copying files. This is used by the -# Python zipfile code in src/python/SConscript, but is placed up here -# since it's potentially more generally applicable. -# -################################################### - -copy_builder = Builder(action = Copy("$TARGET", "$SOURCE")) - -env.Append(BUILDERS = { 'CopyFile' : copy_builder }) - -################################################### -# -# Define a simple SCons builder to concatenate files. -# -# Used to append the Python zip archive to the executable. -# -################################################### - -concat_builder = Builder(action = Action(['cat $SOURCES > $TARGET', - 'chmod +x $TARGET'])) - -env.Append(BUILDERS = { 'Concat' : concat_builder }) - - -# base help text -help_text = ''' -Usage: scons [scons options] [build options] [target(s)] - -''' - # libelf build is shared across all configs in the build root. env.SConscript('ext/libelf/SConscript', - build_dir = joinpath(build_root, 'libelf'), - exports = 'env') + variant_dir = joinpath(build_root, 'libelf')) + +# gzstream build is shared across all configs in the build root. +env.SConscript('ext/gzstream/SConscript', + variant_dir = joinpath(build_root, 'gzstream')) ################################################### # @@ -508,27 +761,28 @@ env.SConscript('ext/libelf/SConscript', # ################################################### -def make_switching_dir(dirname, switch_headers, env): +env['ALL_ISA_LIST'] = all_isa_list +def make_switching_dir(dname, switch_headers, env): # Generate the header. target[0] is the full path of the output # header to generate. 'source' is a dummy variable, since we get the # list of ISAs from env['ALL_ISA_LIST']. def gen_switch_hdr(target, source, env): - fname = str(target[0]) - basename = os.path.basename(fname) - f = open(fname, 'w') - f.write('#include "arch/isa_specific.hh"\n') - cond = '#if' - for isa in env['ALL_ISA_LIST']: - f.write('%s THE_ISA == %s_ISA\n#include "%s/%s/%s"\n' - % (cond, isa.upper(), dirname, isa, basename)) - cond = '#elif' - f.write('#else\n#error "THE_ISA not set"\n#endif\n') - f.close() - return 0 + fname = str(target[0]) + bname = basename(fname) + f = open(fname, 'w') + f.write('#include "arch/isa_specific.hh"\n') + cond = '#if' + for isa in all_isa_list: + f.write('%s THE_ISA == %s_ISA\n#include "%s/%s/%s"\n' + % (cond, isa.upper(), dname, isa, bname)) + cond = '#elif' + f.write('#else\n#error "THE_ISA not set"\n#endif\n') + f.close() + return 0 # String to print when generating header def gen_switch_hdr_string(target, source, env): - return "Generating switch header " + str(target[0]) + return "Generating switch header " + str(target[0]) # Build SCons Action object. 'varlist' specifies env vars that this # action depends on; when env['ALL_ISA_LIST'] changes these actions @@ -539,8 +793,7 @@ def make_switching_dir(dirname, switch_headers, env): # Instantiate actions for each header for hdr in switch_headers: env.Command(hdr, [], switch_hdr_action) - -env.make_switching_dir = make_switching_dir +Export('make_switching_dir') ################################################### # @@ -551,63 +804,66 @@ env.make_switching_dir = make_switching_dir # rename base env base_env = env -for build_path in build_paths: - print "Building in", build_path - # build_dir is the tail component of build path, and is used to - # determine the build parameters (e.g., 'ALPHA_SE') - (build_root, build_dir) = os.path.split(build_path) +for variant_path in variant_paths: + print "Building in", variant_path + # Make a copy of the build-root environment to use for this config. - env = base_env.Copy() - - # Set env options according to the build directory config. - sticky_opts.files = [] - # Options for $BUILD_ROOT/$BUILD_DIR are stored in - # $BUILD_ROOT/options/$BUILD_DIR so you can nuke - # $BUILD_ROOT/$BUILD_DIR without losing your options settings. - current_opts_file = joinpath(build_root, 'options', build_dir) - if os.path.isfile(current_opts_file): - sticky_opts.files.append(current_opts_file) - print "Using saved options file %s" % current_opts_file + env = base_env.Clone() + env['BUILDDIR'] = variant_path + + # variant_dir is the tail component of build path, and is used to + # determine the build parameters (e.g., 'ALPHA_SE') + (build_root, variant_dir) = splitpath(variant_path) + + # Set env variables according to the build directory config. + sticky_vars.files = [] + # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in + # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke + # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings. + current_vars_file = joinpath(build_root, 'variables', variant_dir) + if isfile(current_vars_file): + sticky_vars.files.append(current_vars_file) + print "Using saved variables file %s" % current_vars_file else: - # Build dir-specific options file doesn't exist. + # Build dir-specific variables file doesn't exist. # Make sure the directory is there so we can create it later - opt_dir = os.path.dirname(current_opts_file) - if not os.path.isdir(opt_dir): - os.mkdir(opt_dir) + opt_dir = dirname(current_vars_file) + if not isdir(opt_dir): + mkdir(opt_dir) - # Get default build options from source tree. Options are - # normally determined by name of $BUILD_DIR, but can be + # Get default build variables from source tree. Variables are + # normally determined by name of $VARIANT_DIR, but can be # overriden by 'default=' arg on command line. - default_opts_file = joinpath('build_opts', - ARGUMENTS.get('default', build_dir)) - if os.path.isfile(default_opts_file): - sticky_opts.files.append(default_opts_file) - print "Options file %s not found,\n using defaults in %s" \ - % (current_opts_file, default_opts_file) + default_vars_file = joinpath('build_opts', + ARGUMENTS.get('default', variant_dir)) + if isfile(default_vars_file): + sticky_vars.files.append(default_vars_file) + print "Variables file %s not found,\n using defaults in %s" \ + % (current_vars_file, default_vars_file) else: - print "Error: cannot find options file %s or %s" \ - % (current_opts_file, default_opts_file) + print "Error: cannot find variables file %s or %s" \ + % (current_vars_file, default_vars_file) Exit(1) - # Apply current option settings to env - sticky_opts.Update(env) - nonsticky_opts.Update(env) + # Apply current variable settings to env + sticky_vars.Update(env) + nonsticky_vars.Update(env) - help_text += "Sticky options for %s:\n" % build_dir \ - + sticky_opts.GenerateHelpText(env) \ - + "\nNon-sticky options for %s:\n" % build_dir \ - + nonsticky_opts.GenerateHelpText(env) + help_text += "\nSticky variables for %s:\n" % variant_dir \ + + sticky_vars.GenerateHelpText(env) \ + + "\nNon-sticky variables for %s:\n" % variant_dir \ + + nonsticky_vars.GenerateHelpText(env) - # Process option settings. + # Process variable settings. if not have_fenv and env['USE_FENV']: print "Warning: not available; " \ - "forcing USE_FENV to False in", build_dir + "." + "forcing USE_FENV to False in", variant_dir + "." env['USE_FENV'] = False if not env['USE_FENV']: - print "Warning: No IEEE FP rounding mode control in", build_dir + "." + print "Warning: No IEEE FP rounding mode control in", variant_dir + "." print " FP results may deviate slightly from other platforms." if env['EFENCE']: @@ -616,44 +872,29 @@ for build_path in build_paths: if env['USE_MYSQL']: if not have_mysql: print "Warning: MySQL not available; " \ - "forcing USE_MYSQL to False in", build_dir + "." + "forcing USE_MYSQL to False in", variant_dir + "." env['USE_MYSQL'] = False else: - print "Compiling in", build_dir, "with MySQL support." + print "Compiling in", variant_dir, "with MySQL support." env.ParseConfig(mysql_config_libs) env.ParseConfig(mysql_config_include) - # Save sticky option settings back to current options file - sticky_opts.Save(current_opts_file, env) - - # Do this after we save setting back, or else we'll tack on an - # extra 'qdo' every time we run scons. - if env['BATCH']: - env['CC'] = env['BATCH_CMD'] + ' ' + env['CC'] - env['CXX'] = env['BATCH_CMD'] + ' ' + env['CXX'] + # Save sticky variable settings back to current variables file + sticky_vars.Save(current_vars_file, env) if env['USE_SSE2']: env.Append(CCFLAGS='-msse2') # The src/SConscript file sets up the build rules in 'env' according - # to the configured options. It returns a list of environments, + # to the configured variables. It returns a list of environments, # one for each variant build (debug, opt, etc.) - envList = SConscript('src/SConscript', build_dir = build_path, + envList = SConscript('src/SConscript', variant_dir = variant_path, exports = 'env') # Set up the regression tests for each build. for e in envList: SConscript('tests/SConscript', - build_dir = joinpath(build_path, 'tests', e.Label), + variant_dir = joinpath(variant_path, 'tests', e.Label), exports = { 'env' : e }, duplicate = False) Help(help_text) - - -################################################### -# -# Let SCons do its thing. At this point SCons will use the defined -# build environments to build the requested targets. -# -################################################### -