# -*- mode:python -*-
-# Copyright (c) 2013, 2015, 2016 ARM Limited
+# Copyright (c) 2013, 2015-2017 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
#
###################################################
-# Check for recent-enough Python and SCons versions.
-try:
- # Really old versions of scons only take two options for the
- # function, so check once without the revision and once with the
- # revision, the first instance will fail for stuff other than
- # 0.98, and the second will fail for 0.98.0
- EnsureSConsVersion(0, 98)
- EnsureSConsVersion(0, 98, 1)
-except SystemExit, e:
- print """
-For more details, see:
- http://gem5.org/Dependencies
-"""
- raise
-
-# We ensure the python version early because because python-config
-# requires python 2.5
-try:
- EnsurePythonVersion(2, 5)
-except SystemExit, e:
- print """
-You can use a non-default installation of the Python interpreter by
-rearranging your PATH so that scons finds the non-default 'python' and
-'python-config' first.
-
-For more details, see:
- http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
-"""
- raise
-
# Global Python includes
import itertools
import os
import SCons
import SCons.Node
-extra_python_paths = [
- Dir('src/python').srcnode().abspath, # gem5 includes
- Dir('ext/ply').srcnode().abspath, # ply is used by several files
- ]
-
-sys.path[1:1] = extra_python_paths
-
from m5.util import compareVersions, readCommand
-from m5.util.terminal import get_termcap
help_texts = {
"options" : "",
help='Disable style checking hooks')
AddLocalOption('--no-lto', dest='no_lto', action='store_true',
help='Disable Link-Time Optimization for fast')
+AddLocalOption('--force-lto', dest='force_lto', action='store_true',
+ help='Use Link-Time Optimization instead of partial linking' +
+ ' when the compiler doesn\'t support using them together.')
AddLocalOption('--update-ref', dest='update_ref', action='store_true',
help='Update test reference outputs')
AddLocalOption('--verbose', dest='verbose', action='store_true',
AddLocalOption('--with-asan', dest='with_asan', action='store_true',
help='Build with Address Sanitizer if available')
-termcap = get_termcap(GetOption('use_colors'))
+if GetOption('no_lto') and GetOption('force_lto'):
+ print '--no-lto and --force-lto are mutually exclusive'
+ Exit(1)
########################################################################
#
#
########################################################################
-# export TERM so that clang reports errors in color
-use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
- 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
- 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
-
-use_prefixes = [
- "ASAN_", # address sanitizer symbolizer path and settings
- "CCACHE_", # ccache (caching compiler wrapper) configuration
- "CCC_", # clang static analyzer configuration
- "DISTCC_", # distcc (distributed compiler wrapper) configuration
- "INCLUDE_SERVER_", # distcc pump server settings
- "M5", # M5 configuration (e.g., path to kernels)
- ]
-
-use_env = {}
-for key,val in sorted(os.environ.iteritems()):
- if key in use_vars or \
- any([key.startswith(prefix) for prefix in use_prefixes]):
- use_env[key] = val
-
-# Tell scons to avoid implicit command dependencies to avoid issues
-# with the param wrappes being compiled twice (see
-# http://scons.tigris.org/issues/show_bug.cgi?id=2811)
-main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
-main.Decider('MD5-timestamp')
-main.root = Dir(".") # The current directory (where this file lives).
-main.srcdir = Dir("src") # The source directory
+main = Environment()
+
+from gem5_scons import Transform
+from gem5_scons.util import get_termcap
+termcap = get_termcap()
main_dict_keys = main.Dictionary().keys()
print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
Exit(1)
-# Check that swig is present
-if not 'SWIG' in main_dict_keys:
- print "swig is not installed (package swig on Ubuntu and RedHat)"
- Exit(1)
-
-# add useful python code PYTHONPATH so it can be used by subprocesses
-# as well
-main.AppendENVPath('PYTHONPATH', extra_python_paths)
-
-########################################################################
-#
-# Mercurial Stuff.
-#
-# If the gem5 directory is a mercurial repository, we should do some
-# extra things.
-#
-########################################################################
-
-hgdir = main.root.Dir(".hg")
-
-
-style_message = """
-You're missing the gem5 style hook, which automatically checks your code
-against the gem5 style rules on %s.
-This script will now install the hook in your %s.
-Press enter to continue, or ctrl-c to abort: """
-
-mercurial_style_message = """
-You're missing the gem5 style hook, which automatically checks your code
-against the gem5 style rules on hg commit and qrefresh commands.
-This script will now install the hook in your .hg/hgrc file.
-Press enter to continue, or ctrl-c to abort: """
-
-git_style_message = """
-You're missing the gem5 style or commit message hook. These hooks help
-to ensure that your code follows gem5's style rules on git commit.
-This script will now install the hook in your .git/hooks/ directory.
-Press enter to continue, or ctrl-c to abort: """
-
-mercurial_style_upgrade_message = """
-Your Mercurial style hooks are not up-to-date. This script will now
-try to automatically update them. A backup of your hgrc will be saved
-in .hg/hgrc.old.
-Press enter to continue, or ctrl-c to abort: """
-
-mercurial_style_hook = """
-# The following lines were automatically added by gem5/SConstruct
-# to provide the gem5 style-checking hooks
-[extensions]
-hgstyle = %s/util/hgstyle.py
-
-[hooks]
-pretxncommit.style = python:hgstyle.check_style
-pre-qrefresh.style = python:hgstyle.check_style
-# End of SConstruct additions
-
-""" % (main.root.abspath)
-
-mercurial_lib_not_found = """
-Mercurial libraries cannot be found, ignoring style hook. If
-you are a gem5 developer, please fix this and run the style
-hook. It is important.
-"""
-
-# Check for style hook and prompt for installation if it's not there.
-# Skip this if --ignore-style was specified, there's no interactive
-# terminal to prompt, or no recognized revision control system can be
-# found.
-ignore_style = GetOption('ignore_style') or not sys.stdin.isatty()
-
-# Try wire up Mercurial to the style hooks
-if not ignore_style and hgdir.exists():
- style_hook = True
- style_hooks = tuple()
- hgrc = hgdir.File('hgrc')
- hgrc_old = hgdir.File('hgrc.old')
- try:
- from mercurial import ui
- ui = ui.ui()
- ui.readconfig(hgrc.abspath)
- style_hooks = (ui.config('hooks', 'pretxncommit.style', None),
- ui.config('hooks', 'pre-qrefresh.style', None))
- style_hook = all(style_hooks)
- style_extension = ui.config('extensions', 'style', None)
- except ImportError:
- print mercurial_lib_not_found
-
- if "python:style.check_style" in style_hooks:
- # Try to upgrade the style hooks
- print mercurial_style_upgrade_message
- # continue unless user does ctrl-c/ctrl-d etc.
- try:
- raw_input()
- except:
- print "Input exception, exiting scons.\n"
- sys.exit(1)
- shutil.copyfile(hgrc.abspath, hgrc_old.abspath)
- re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*")
- re_style_extension = re.compile("style\s*=\s*([^#\s]+).*")
- old, new = open(hgrc_old.abspath, 'r'), open(hgrc.abspath, 'w')
- for l in old:
- m_hook = re_style_hook.match(l)
- m_ext = re_style_extension.match(l)
- if m_hook:
- hook, check = m_hook.groups()
- if check != "python:style.check_style":
- print "Warning: %s.style is using a non-default " \
- "checker: %s" % (hook, check)
- if hook not in ("pretxncommit", "pre-qrefresh"):
- print "Warning: Updating unknown style hook: %s" % hook
-
- l = "%s.style = python:hgstyle.check_style\n" % hook
- elif m_ext and m_ext.group(1) == style_extension:
- l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath
-
- new.write(l)
- elif not style_hook:
- print mercurial_style_message,
- # continue unless user does ctrl-c/ctrl-d etc.
- try:
- raw_input()
- except:
- print "Input exception, exiting scons.\n"
- sys.exit(1)
- hgrc_path = '%s/.hg/hgrc' % main.root.abspath
- print "Adding style hook to", hgrc_path, "\n"
- try:
- with open(hgrc_path, 'a') as f:
- f.write(mercurial_style_hook)
- except:
- print "Error updating", hgrc_path
- sys.exit(1)
-
-def install_git_style_hooks():
- try:
- gitdir = Dir(readCommand(
- ["git", "rev-parse", "--git-dir"]).strip("\n"))
- except Exception, e:
- print "Warning: Failed to find git repo directory: %s" % e
- return
-
- git_hooks = gitdir.Dir("hooks")
- def hook_exists(hook_name):
- hook = git_hooks.File(hook_name)
- return hook.exists()
-
- def hook_install(hook_name, script):
- hook = git_hooks.File(hook_name)
- if hook.exists():
- print "Warning: Can't install %s, hook already exists." % hook_name
- return
-
- if hook.islink():
- print "Warning: Removing broken symlink for hook %s." % hook_name
- os.unlink(hook.get_abspath())
-
- if not git_hooks.exists():
- mkdir(git_hooks.get_abspath())
- git_hooks.clear()
-
- abs_symlink_hooks = git_hooks.islink() and \
- os.path.isabs(os.readlink(git_hooks.get_abspath()))
-
- # Use a relative symlink if the hooks live in the source directory,
- # and the hooks directory is not a symlink to an absolute path.
- if hook.is_under(main.root) and not abs_symlink_hooks:
- script_path = os.path.relpath(
- os.path.realpath(script.get_abspath()),
- os.path.realpath(hook.Dir(".").get_abspath()))
- else:
- script_path = script.get_abspath()
-
- try:
- os.symlink(script_path, hook.get_abspath())
- except:
- print "Error updating git %s hook" % hook_name
- raise
-
- if hook_exists("pre-commit") and hook_exists("commit-msg"):
- return
-
- print git_style_message,
- try:
- raw_input()
- except:
- print "Input exception, exiting scons.\n"
- sys.exit(1)
-
- git_style_script = File("util/git-pre-commit.py")
- git_msg_script = File("ext/git-commit-msg")
-
- hook_install("pre-commit", git_style_script)
- hook_install("commit-msg", git_msg_script)
-
-# Try to wire up git to the style hooks
-if not ignore_style and main.root.Entry(".git").exists():
- install_git_style_hooks()
-
###################################################
#
# Figure out which configurations to set up based on the path(s) of
global_vars.AddVariables(
('CC', 'C compiler', environ.get('CC', main['CC'])),
('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
- ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
('BATCH', 'Use batch pool for build and tests', False),
('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
# the ext directory should be on the #includes path
main.Append(CPPPATH=[Dir('ext')])
-def strip_build_path(path, env):
- path = str(path)
- variant_base = env['BUILDROOT'] + os.path.sep
- if path.startswith(variant_base):
- path = path[len(variant_base):]
- elif path.startswith('build/'):
- path = path[6:]
- return path
-
-# Generate a string of the form:
-# common/path/prefix/src1, src2 -> tgt1, tgt2
-# to print while building.
-class Transform(object):
- # all specific color settings should be here and nowhere else
- tool_color = termcap.Normal
- pfx_color = termcap.Yellow
- srcs_color = termcap.Yellow + termcap.Bold
- arrow_color = termcap.Blue + termcap.Bold
- tgts_color = termcap.Yellow + termcap.Bold
-
- def __init__(self, tool, max_sources=99):
- self.format = self.tool_color + (" [%8s] " % tool) \
- + self.pfx_color + "%s" \
- + self.srcs_color + "%s" \
- + self.arrow_color + " -> " \
- + self.tgts_color + "%s" \
- + termcap.Normal
- self.max_sources = max_sources
-
- def __call__(self, target, source, env, for_signature=None):
- # truncate source list according to max_sources param
- source = source[0:self.max_sources]
- def strip(f):
- return strip_build_path(str(f), env)
- if len(source) > 0:
- srcs = map(strip, source)
- else:
- srcs = ['']
- tgts = map(strip, target)
- # surprisingly, os.path.commonprefix is a dumb char-by-char string
- # operation that has nothing to do with paths.
- com_pfx = os.path.commonprefix(srcs + tgts)
- com_pfx_len = len(com_pfx)
- if com_pfx:
- # do some cleanup and sanity checking on common prefix
- if com_pfx[-1] == ".":
- # prefix matches all but file extension: ok
- # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
- com_pfx = com_pfx[0:-1]
- elif com_pfx[-1] == "/":
- # common prefix is directory path: OK
- pass
- else:
- src0_len = len(srcs[0])
- tgt0_len = len(tgts[0])
- if src0_len == com_pfx_len:
- # source is a substring of target, OK
- pass
- elif tgt0_len == com_pfx_len:
- # target is a substring of source, need to back up to
- # avoid empty string on RHS of arrow
- sep_idx = com_pfx.rfind(".")
- if sep_idx != -1:
- com_pfx = com_pfx[0:sep_idx]
- else:
- com_pfx = ''
- elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
- # still splitting at file extension: ok
- pass
- else:
- # probably a fluke; ignore it
- com_pfx = ''
- # recalculate length in case com_pfx was modified
- com_pfx_len = len(com_pfx)
- def fmt(files):
- f = map(lambda s: s[com_pfx_len:], files)
- return ', '.join(f)
- return self.format % (com_pfx, fmt(srcs), fmt(tgts))
-
-Export('Transform')
-
-# enable the regression script to use the termcap
-main['TERMCAP'] = termcap
+# Add shared top-level headers
+main.Prepend(CPPPATH=Dir('include'))
if GetOption('verbose'):
def MakeAction(action, string, *args, **kwargs):
main['CCCOMSTR'] = Transform("CC")
main['CXXCOMSTR'] = Transform("CXX")
main['ASCOMSTR'] = Transform("AS")
- main['SWIGCOMSTR'] = Transform("SWIG")
main['ARCOMSTR'] = Transform("AR", 0)
main['LINKCOMSTR'] = Transform("LINK", 0)
main['SHLINKCOMSTR'] = Transform("SHLINK", 0)
if sys.platform.startswith('freebsd'):
main.Append(CCFLAGS=['-I/usr/local/include'])
main.Append(CXXFLAGS=['-I/usr/local/include'])
+
+ main['FILTER_PSHLINKFLAGS'] = lambda x: str(x).replace(' -shared', '')
+ main['PSHLINKFLAGS'] = main.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}')
+ main['PLINKFLAGS'] = main.subst('${LINKFLAGS}')
+ shared_partial_flags = ['-r', '-nostdlib']
+ main.Append(PSHLINKFLAGS=shared_partial_flags)
+ main.Append(PLINKFLAGS=shared_partial_flags)
+
+ # Treat warnings as errors but white list some warnings that we
+ # want to allow (e.g., deprecation warnings).
+ main.Append(CCFLAGS=['-Werror',
+ '-Wno-error=deprecated-declarations',
+ '-Wno-error=deprecated',
+ ])
else:
print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
print "Don't know what compiler options to use for your compiler."
main['GCC_VERSION'] = gcc_version
+ if compareVersions(gcc_version, '4.9') >= 0:
+ # Incremental linking with LTO is currently broken in gcc versions
+ # 4.9 and above. A version where everything works completely hasn't
+ # yet been identified.
+ #
+ # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67548
+ main['BROKEN_INCREMENTAL_LTO'] = True
+ if compareVersions(gcc_version, '6.0') >= 0:
+ # gcc versions 6.0 and greater accept an -flinker-output flag which
+ # selects what type of output the linker should generate. This is
+ # necessary for incremental lto to work, but is also broken in
+ # current versions of gcc. It may not be necessary in future
+ # versions. We add it here since it might be, and as a reminder that
+ # it exists. It's excluded if lto is being forced.
+ #
+ # https://gcc.gnu.org/gcc-6/changes.html
+ # https://gcc.gnu.org/ml/gcc-patches/2015-11/msg03161.html
+ # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=69866
+ if not GetOption('force_lto'):
+ main.Append(PSHLINKFLAGS='-flinker-output=rel')
+ main.Append(PLINKFLAGS='-flinker-output=rel')
+
# gcc from version 4.8 and above generates "rep; ret" instructions
# to avoid performance penalties on certain AMD chips. Older
# assemblers detect this as an error, "Error: expecting string
'Warning: UBSan is only supported using gcc 4.9 and later.' + \
termcap.Normal
+ disable_lto = GetOption('no_lto')
+ if not disable_lto and main.get('BROKEN_INCREMENTAL_LTO', False) and \
+ not GetOption('force_lto'):
+ print termcap.Yellow + termcap.Bold + \
+ 'Warning: Your compiler doesn\'t support incremental linking' + \
+ ' and lto at the same time, so lto is being disabled. To force' + \
+ ' lto on anyway, use the --force-lto option. That will disable' + \
+ ' partial linking.' + \
+ termcap.Normal
+ disable_lto = True
+
# Add the appropriate Link-Time Optimization (LTO) flags
# unless LTO is explicitly turned off. Note that these flags
# are only used by the fast target.
- if not GetOption('no_lto'):
+ if not disable_lto:
# Pass the LTO flag when compiling to produce GIMPLE
# output, we merely create the flags here and only append
# them later
if compareVersions(gcc_version, "5.0") > 0:
main.Append(CCFLAGS=['-Wno-error=suggest-override'])
+ # The address sanitizer is available for gcc >= 4.8
+ if GetOption('with_asan'):
+ if GetOption('with_ubsan') and \
+ compareVersions(env['GCC_VERSION'], '4.9') >= 0:
+ env.Append(CCFLAGS=['-fsanitize=address,undefined',
+ '-fno-omit-frame-pointer'],
+ LINKFLAGS='-fsanitize=address,undefined')
+ else:
+ env.Append(CCFLAGS=['-fsanitize=address',
+ '-fno-omit-frame-pointer'],
+ LINKFLAGS='-fsanitize=address')
+ # Only gcc >= 4.9 supports UBSan, so check both the version
+ # and the command-line option before adding the compiler and
+ # linker flags.
+ elif GetOption('with_ubsan') and \
+ compareVersions(env['GCC_VERSION'], '4.9') >= 0:
+ env.Append(CCFLAGS='-fsanitize=undefined')
+ env.Append(LINKFLAGS='-fsanitize=undefined')
+
elif main['CLANG']:
# Check for a supported version of clang, >= 3.1 is needed to
# support similar features as gcc 4.8. See
if sys.platform.startswith('freebsd'):
main.Append(LIBS=['thr'])
+ # We require clang >= 3.1, so there is no need to check any
+ # versions here.
+ if GetOption('with_ubsan'):
+ if GetOption('with_asan'):
+ env.Append(CCFLAGS=['-fsanitize=address,undefined',
+ '-fno-omit-frame-pointer'],
+ LINKFLAGS='-fsanitize=address,undefined')
+ else:
+ env.Append(CCFLAGS='-fsanitize=undefined',
+ LINKFLAGS='-fsanitize=undefined')
+
+ elif GetOption('with_asan'):
+ env.Append(CCFLAGS=['-fsanitize=address',
+ '-fno-omit-frame-pointer'],
+ LINKFLAGS='-fsanitize=address')
+
else:
print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
print "Don't know what compiler options to use for your compiler."
'Warning: pkg-config could not get protobuf flags.' + \
termcap.Normal
-# Check for SWIG
-if not main.has_key('SWIG'):
- print 'Error: SWIG utility not found.'
- print ' Please install (see http://www.swig.org) and retry.'
- Exit(1)
-
-# Check for appropriate SWIG version
-swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
-# First 3 words should be "SWIG Version x.y.z"
-if len(swig_version) < 3 or \
- swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
- print 'Error determining SWIG version.'
- Exit(1)
-
-min_swig_version = '2.0.4'
-if compareVersions(swig_version[2], min_swig_version) < 0:
- print 'Error: SWIG version', min_swig_version, 'or newer required.'
- print ' Installed version:', swig_version[2]
- Exit(1)
-
-# Check for known incompatibilities. The standard library shipped with
-# gcc >= 4.9 does not play well with swig versions prior to 3.0
-if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \
- compareVersions(swig_version[2], '3.0') < 0:
- print termcap.Yellow + termcap.Bold + \
- 'Warning: This combination of gcc and swig have' + \
- ' known incompatibilities.\n' + \
- ' If you encounter build problems, please update ' + \
- 'swig to 3.0 or later.' + \
- termcap.Normal
-
-# Set up SWIG flags & scanner
-swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
-main.Append(SWIGFLAGS=swig_flags)
# Check for 'timeout' from GNU coreutils. If present, regressions will
# be run with a time limit. We require version 8.13 since we rely on
main['TIMEOUT'] = timeout_version and \
compareVersions(timeout_version[-1], '8.13') >= 0
-# filter out all existing swig scanners, they mess up the dependency
-# stuff for some reason
-scanners = []
-for scanner in main['SCANNERS']:
- skeys = scanner.skeys
- if skeys == '.i':
- continue
-
- if isinstance(skeys, (list, tuple)) and '.i' in skeys:
- continue
-
- scanners.append(scanner)
-
-# add the new swig scanner that we like better
-from SCons.Scanner import ClassicCPP as CPPScanner
-swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
-scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
-
-# replace the scanners list that has what we want
-main['SCANNERS'] = scanners
-
# Add a custom Check function to test for structure members.
def CheckMember(context, include, decl, member, include_quotes="<>"):
context.Message("Checking for member %s in %s..." %
print 'Using build cache located at', main['M5_BUILD_CACHE']
CacheDir(main['M5_BUILD_CACHE'])
-if not GetOption('without_python'):
+main['USE_PYTHON'] = not GetOption('without_python')
+if main['USE_PYTHON']:
# Find Python include and library directories for embedding the
# interpreter. We rely on python-config to resolve the appropriate
# includes and linker flags. ParseConfig does not seem to understand
print "Warning: Header file <fenv.h> not found."
print " This host has no IEEE FP rounding mode control."
+# Check for <png.h> (libpng library needed if wanting to dump
+# frame buffer image in png format)
+have_png = conf.CheckHeader('png.h', '<>')
+if not have_png:
+ print "Warning: Header file <png.h> not found."
+ print " This host has no libpng library."
+ print " Disabling support for PNG framebuffers."
+
# Check if we should enable KVM-based hardware virtualization. The API
# we rely on exists since version 2.6.36 of the kernel, but somehow
# the KVM_API_VERSION does not reflect the change. We test for one of
print "Info: Compatible header file <linux/kvm.h> not found, " \
"disabling KVM support."
+# Check if the TUN/TAP driver is available.
+have_tuntap = conf.CheckHeader('linux/if_tun.h', '<>')
+if not have_tuntap:
+ print "Info: Compatible header file <linux/if_tun.h> not found."
+
# x86 needs support for xsave. We test for the structure here since we
# won't be able to run new tests by the time we know which ISA we're
# targeting.
False),
BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
- BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
- BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
+ BoolVariable('USE_PNG', 'Enable support for PNG images', have_png),
+ BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability',
+ False),
+ BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models',
+ have_kvm),
+ BoolVariable('USE_TUNTAP',
+ 'Enable using a tap device to bridge to the host network',
+ have_tuntap),
BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
all_protocols),
# These variables get exported to #defines in config/*.hh (see src/SConscript).
export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
- 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL',
- 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST']
+ 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'USE_TUNTAP',
+ 'PROTOCOL', 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST',
+ 'USE_PNG']
###################################################
#
main.Append(BUILDERS = { 'ConfigFile' : config_builder })
+###################################################
+#
+# Builders for static and shared partially linked object files.
+#
+###################################################
+
+partial_static_builder = Builder(action=SCons.Defaults.LinkAction,
+ src_suffix='$OBJSUFFIX',
+ src_builder=['StaticObject', 'Object'],
+ LINKFLAGS='$PLINKFLAGS',
+ LIBS='')
+
+def partial_shared_emitter(target, source, env):
+ for tgt in target:
+ tgt.attributes.shared = 1
+ return (target, source)
+partial_shared_builder = Builder(action=SCons.Defaults.ShLinkAction,
+ emitter=partial_shared_emitter,
+ src_suffix='$SHOBJSUFFIX',
+ src_builder='SharedObject',
+ SHLINKFLAGS='$PSHLINKFLAGS',
+ LIBS='')
+
+main.Append(BUILDERS = { 'PartialShared' : partial_shared_builder,
+ 'PartialStatic' : partial_static_builder })
+
# builds in ext are shared across all configs in the build root.
ext_dir = abspath(joinpath(str(main.root), 'ext'))
+ext_build_dirs = []
for root, dirs, files in os.walk(ext_dir):
if 'SConscript' in files:
build_dir = os.path.relpath(root, ext_dir)
+ ext_build_dirs.append(build_dir)
main.SConscript(joinpath(root, 'SConscript'),
variant_dir=joinpath(build_root, build_dir))
+main.Prepend(CPPPATH=Dir('ext/pybind11/include/'))
+
###################################################
#
-# This function is used to set up a directory with switching headers
+# This builder and wrapper method are used to set up a directory with
+# switching headers. Those are headers which are in a generic location and
+# that include more specific headers from a directory chosen at build time
+# based on the current build settings.
#
###################################################
-main['ALL_ISA_LIST'] = all_isa_list
-main['ALL_GPU_ISA_LIST'] = all_gpu_isa_list
-all_isa_deps = {}
-def make_switching_dir(dname, switch_headers, env):
- # Generate the header. target[0] is the full path of the output
- # header to generate. 'source' is a dummy variable, since we get the
- # list of ISAs from env['ALL_ISA_LIST'].
- def gen_switch_hdr(target, source, env):
- fname = str(target[0])
- isa = env['TARGET_ISA'].lower()
- try:
- f = open(fname, 'w')
- print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
- f.close()
- except IOError:
- print "Failed to create %s" % fname
- raise
-
- # Build SCons Action object. 'varlist' specifies env vars that this
- # action depends on; when env['ALL_ISA_LIST'] changes these actions
- # should get re-executed.
- switch_hdr_action = MakeAction(gen_switch_hdr,
- Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
-
- # Instantiate actions for each header
- for hdr in switch_headers:
- env.Command(hdr, [], switch_hdr_action)
-
- isa_target = Dir('.').up().name.lower().replace('_', '-')
- env['PHONY_BASE'] = '#'+isa_target
- all_isa_deps[isa_target] = None
-
-Export('make_switching_dir')
-
-def make_gpu_switching_dir(dname, switch_headers, env):
- # Generate the header. target[0] is the full path of the output
- # header to generate. 'source' is a dummy variable, since we get the
- # list of ISAs from env['ALL_ISA_LIST'].
- def gen_switch_hdr(target, source, env):
- fname = str(target[0])
-
- isa = env['TARGET_GPU_ISA'].lower()
-
- try:
- f = open(fname, 'w')
- print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
- f.close()
- except IOError:
- print "Failed to create %s" % fname
- raise
-
- # Build SCons Action object. 'varlist' specifies env vars that this
- # action depends on; when env['ALL_ISA_LIST'] changes these actions
- # should get re-executed.
- switch_hdr_action = MakeAction(gen_switch_hdr,
- Transform("GENERATE"), varlist=['ALL_ISA_GPU_LIST'])
-
- # Instantiate actions for each header
- for hdr in switch_headers:
- env.Command(hdr, [], switch_hdr_action)
-
-Export('make_gpu_switching_dir')
-
-# all-isas -> all-deps -> all-environs -> all_targets
-main.Alias('#all-isas', [])
-main.Alias('#all-deps', '#all-isas')
-
-# Dummy target to ensure all environments are created before telling
-# SCons what to actually make (the command line arguments). We attach
-# them to the dependence graph after the environments are complete.
-ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
-def environsComplete(target, source, env):
- for t in ORIG_BUILD_TARGETS:
- main.Depends('#all-targets', t)
-
-# Each build/* switching_dir attaches its *-environs target to #all-environs.
-main.Append(BUILDERS = {'CompleteEnvirons' :
- Builder(action=MakeAction(environsComplete, None))})
-main.CompleteEnvirons('#all-environs', [])
-
-def doNothing(**ignored): pass
-main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
-
-# The final target to which all the original targets ultimately get attached.
-main.Dummy('#all-targets', '#all-environs')
-BUILD_TARGETS[:] = ['#all-targets']
+def build_switching_header(target, source, env):
+ path = str(target[0])
+ subdir = str(source[0])
+ dp, fp = os.path.split(path)
+ dp = os.path.relpath(os.path.realpath(dp),
+ os.path.realpath(env['BUILDDIR']))
+ with open(path, 'w') as hdr:
+ print >>hdr, '#include "%s/%s/%s"' % (dp, subdir, fp)
+
+switching_header_action = MakeAction(build_switching_header,
+ Transform('GENERATE'))
+
+switching_header_builder = Builder(action=switching_header_action,
+ source_factory=Value,
+ single_source=True)
+
+main.Append(BUILDERS = { 'SwitchingHeader': switching_header_builder })
+
+def switching_headers(self, headers, source):
+ for header in headers:
+ self.SwitchingHeader(header, source)
+
+main.AddMethod(switching_headers, 'SwitchingHeaders')
###################################################
#
sticky_vars.files.append(current_vars_file)
if not GetOption('silent'):
print "Using saved variables file %s" % current_vars_file
+ elif variant_dir in ext_build_dirs:
+ # Things in ext are built without a variant directory.
+ continue
else:
# Build dir-specific variables file doesn't exist.
print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
print " FP results may deviate slightly from other platforms."
+ if not have_png and env['USE_PNG']:
+ print "Warning: <png.h> not available; " \
+ "forcing USE_PNG to False in", variant_dir + "."
+ env['USE_PNG'] = False
+
+ if env['USE_PNG']:
+ env.Append(LIBS=['png'])
+
if env['EFENCE']:
env.Append(LIBS=['efence'])
"target ISA combination"
env['USE_KVM'] = False
+ if env['USE_TUNTAP']:
+ if not have_tuntap:
+ print "Warning: Can't connect EtherTap with a tap device."
+ env['USE_TUNTAP'] = False
+
if env['BUILD_GPU']:
env.Append(CPPDEFINES=['BUILD_GPU'])
# one for each variant build (debug, opt, etc.)
SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
-def pairwise(iterable):
- "s -> (s0,s1), (s1,s2), (s2, s3), ..."
- a, b = itertools.tee(iterable)
- b.next()
- return itertools.izip(a, b)
-
-# Create false dependencies so SCons will parse ISAs, establish
-# dependencies, and setup the build Environments serially. Either
-# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
-# greater than 1. It appears to be standard race condition stuff; it
-# doesn't always fail, but usually, and the behaviors are different.
-# Every time I tried to remove this, builds would fail in some
-# creative new way. So, don't do that. You'll want to, though, because
-# tests/SConscript takes a long time to make its Environments.
-for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
- main.Depends('#%s-deps' % t2, '#%s-deps' % t1)
- main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
-
# base help text
Help('''
Usage: scons [scons options] [build variables] [target(s)]