# -*- mode:python -*-
-# Copyright (c) 2013 ARM Limited
+# Copyright (c) 2013, 2015-2020 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#
-# Authors: Steve Reinhardt
-# Nathan Binkert
###################################################
#
#
###################################################
-# Check for recent-enough Python and SCons versions.
-try:
- # Really old versions of scons only take two options for the
- # function, so check once without the revision and once with the
- # revision, the first instance will fail for stuff other than
- # 0.98, and the second will fail for 0.98.0
- EnsureSConsVersion(0, 98)
- EnsureSConsVersion(0, 98, 1)
-except SystemExit, e:
- print """
-For more details, see:
- http://gem5.org/Dependencies
-"""
- raise
-
-# We ensure the python version early because because python-config
-# requires python 2.5
-try:
- EnsurePythonVersion(2, 5)
-except SystemExit, e:
- print """
-You can use a non-default installation of the Python interpreter by
-rearranging your PATH so that scons finds the non-default 'python' and
-'python-config' first.
-
-For more details, see:
- http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
-"""
- raise
+from __future__ import print_function
# Global Python includes
import itertools
import os
import re
+import shutil
import subprocess
import sys
from os.path import abspath, basename, dirname, expanduser, normpath
from os.path import exists, isdir, isfile
from os.path import join as joinpath, split as splitpath
+from re import match
# SCons includes
import SCons
import SCons.Node
-
-extra_python_paths = [
- Dir('src/python').srcnode().abspath, # gem5 includes
- Dir('ext/ply').srcnode().abspath, # ply is used by several files
- ]
-
-sys.path[1:1] = extra_python_paths
+import SCons.Node.FS
from m5.util import compareVersions, readCommand
-from m5.util.terminal import get_termcap
help_texts = {
"options" : "",
help="Add color to abbreviated scons output")
AddLocalOption('--no-colors', dest='use_colors', action='store_false',
help="Don't add color to abbreviated scons output")
+AddLocalOption('--with-cxx-config', dest='with_cxx_config',
+ action='store_true',
+ help="Build with support for C++-based configuration")
AddLocalOption('--default', dest='default', type='string', action='store',
help='Override which build_opts file to use for defaults')
AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
help='Disable style checking hooks')
+AddLocalOption('--gold-linker', dest='gold_linker', action='store_true',
+ help='Use the gold linker')
AddLocalOption('--no-lto', dest='no_lto', action='store_true',
help='Disable Link-Time Optimization for fast')
+AddLocalOption('--force-lto', dest='force_lto', action='store_true',
+ help='Use Link-Time Optimization instead of partial linking' +
+ ' when the compiler doesn\'t support using them together.')
AddLocalOption('--update-ref', dest='update_ref', action='store_true',
help='Update test reference outputs')
AddLocalOption('--verbose', dest='verbose', action='store_true',
help='Print full tool command lines')
-
-termcap = get_termcap(GetOption('use_colors'))
+AddLocalOption('--without-python', dest='without_python',
+ action='store_true',
+ help='Build without Python configuration support')
+AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
+ action='store_true',
+ help='Disable linking against tcmalloc')
+AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
+ help='Build with Undefined Behavior Sanitizer if available')
+AddLocalOption('--with-asan', dest='with_asan', action='store_true',
+ help='Build with Address Sanitizer if available')
+
+from gem5_scons import Transform, error, warning
+
+if GetOption('no_lto') and GetOption('force_lto'):
+ error('--no-lto and --force-lto are mutually exclusive')
########################################################################
#
#
########################################################################
-# export TERM so that clang reports errors in color
-use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
- 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
- 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
-
-use_prefixes = [
- "M5", # M5 configuration (e.g., path to kernels)
- "DISTCC_", # distcc (distributed compiler wrapper) configuration
- "CCACHE_", # ccache (caching compiler wrapper) configuration
- "CCC_", # clang static analyzer configuration
- ]
-
-use_env = {}
-for key,val in os.environ.iteritems():
- if key in use_vars or \
- any([key.startswith(prefix) for prefix in use_prefixes]):
- use_env[key] = val
-
-main = Environment(ENV=use_env)
-main.Decider('MD5-timestamp')
-main.root = Dir(".") # The current directory (where this file lives).
-main.srcdir = Dir("src") # The source directory
+main = Environment()
+
+from gem5_scons.util import get_termcap
+termcap = get_termcap()
main_dict_keys = main.Dictionary().keys()
# Check that we have a C/C++ compiler
if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
- print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
- Exit(1)
-
-# Check that swig is present
-if not 'SWIG' in main_dict_keys:
- print "swig is not installed (package swig on Ubuntu and RedHat)"
- Exit(1)
-
-# add useful python code PYTHONPATH so it can be used by subprocesses
-# as well
-main.AppendENVPath('PYTHONPATH', extra_python_paths)
-
-########################################################################
-#
-# Mercurial Stuff.
-#
-# If the gem5 directory is a mercurial repository, we should do some
-# extra things.
-#
-########################################################################
-
-hgdir = main.root.Dir(".hg")
-
-mercurial_style_message = """
-You're missing the gem5 style hook, which automatically checks your code
-against the gem5 style rules on hg commit and qrefresh commands. This
-script will now install the hook in your .hg/hgrc file.
-Press enter to continue, or ctrl-c to abort: """
-
-mercurial_style_hook = """
-# The following lines were automatically added by gem5/SConstruct
-# to provide the gem5 style-checking hooks
-[extensions]
-style = %s/util/style.py
-
-[hooks]
-pretxncommit.style = python:style.check_style
-pre-qrefresh.style = python:style.check_style
-# End of SConstruct additions
-
-""" % (main.root.abspath)
-
-mercurial_lib_not_found = """
-Mercurial libraries cannot be found, ignoring style hook. If
-you are a gem5 developer, please fix this and run the style
-hook. It is important.
-"""
-
-# Check for style hook and prompt for installation if it's not there.
-# Skip this if --ignore-style was specified, there's no .hg dir to
-# install a hook in, or there's no interactive terminal to prompt.
-if not GetOption('ignore_style') and hgdir.exists() and sys.stdin.isatty():
- style_hook = True
- try:
- from mercurial import ui
- ui = ui.ui()
- ui.readconfig(hgdir.File('hgrc').abspath)
- style_hook = ui.config('hooks', 'pretxncommit.style', None) and \
- ui.config('hooks', 'pre-qrefresh.style', None)
- except ImportError:
- print mercurial_lib_not_found
-
- if not style_hook:
- print mercurial_style_message,
- # continue unless user does ctrl-c/ctrl-d etc.
- try:
- raw_input()
- except:
- print "Input exception, exiting scons.\n"
- sys.exit(1)
- hgrc_path = '%s/.hg/hgrc' % main.root.abspath
- print "Adding style hook to", hgrc_path, "\n"
- try:
- hgrc = open(hgrc_path, 'a')
- hgrc.write(mercurial_style_hook)
- hgrc.close()
- except:
- print "Error updating", hgrc_path
- sys.exit(1)
-
+ error("No C++ compiler installed (package g++ on Ubuntu and RedHat)")
###################################################
#
for i in range(len(l)+offs, 0, -1):
if l[i] == elt:
return i
- raise ValueError, "element not found"
+ raise ValueError("element not found")
# Take a list of paths (or SCons Nodes) and return a list with all
# paths made absolute and ~-expanded. Paths will be interpreted
return [abspath(joinpath(root, expanduser(str(p))))
for p in path_list]
+def find_first_prog(prog_names):
+ """Find the absolute path to the first existing binary in prog_names"""
+
+ if not isinstance(prog_names, (list, tuple)):
+ prog_names = [ prog_names ]
+
+ for p in prog_names:
+ p = main.WhereIs(p)
+ if p is not None:
+ return p
+
+ return None
+
# Each target must have 'build' in the interior of the path; the
# directory below this will determine the build parameters. For
# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
try:
build_top = rfind(path_dirs, 'build', -2)
except:
- print "Error: no non-leaf 'build' dir found on target path", t
- Exit(1)
+ error("No non-leaf 'build' dir found on target path.", t)
this_build_root = joinpath('/',*path_dirs[:build_top+1])
if not build_root:
build_root = this_build_root
else:
if this_build_root != build_root:
- print "Error: build targets not under same build root\n"\
- " %s\n %s" % (build_root, this_build_root)
- Exit(1)
+ error("build targets not under same build root\n"
+ " %s\n %s" % (build_root, this_build_root))
variant_path = joinpath('/',*path_dirs[:build_top+2])
if variant_path not in variant_paths:
variant_paths.append(variant_path)
global_vars.AddVariables(
('CC', 'C compiler', environ.get('CC', main['CC'])),
('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
- ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
+ ('CCFLAGS_EXTRA', 'Extra C and C++ compiler flags', ''),
+ ('LDFLAGS_EXTRA', 'Extra linker flags', ''),
+ ('PYTHON_CONFIG', 'Python config binary to use',
+ [ 'python2.7-config', 'python-config' ]),
('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
('BATCH', 'Use batch pool for build and tests', False),
('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
# the ext directory should be on the #includes path
main.Append(CPPPATH=[Dir('ext')])
-def strip_build_path(path, env):
- path = str(path)
- variant_base = env['BUILDROOT'] + os.path.sep
- if path.startswith(variant_base):
- path = path[len(variant_base):]
- elif path.startswith('build/'):
- path = path[6:]
- return path
-
-# Generate a string of the form:
-# common/path/prefix/src1, src2 -> tgt1, tgt2
-# to print while building.
-class Transform(object):
- # all specific color settings should be here and nowhere else
- tool_color = termcap.Normal
- pfx_color = termcap.Yellow
- srcs_color = termcap.Yellow + termcap.Bold
- arrow_color = termcap.Blue + termcap.Bold
- tgts_color = termcap.Yellow + termcap.Bold
-
- def __init__(self, tool, max_sources=99):
- self.format = self.tool_color + (" [%8s] " % tool) \
- + self.pfx_color + "%s" \
- + self.srcs_color + "%s" \
- + self.arrow_color + " -> " \
- + self.tgts_color + "%s" \
- + termcap.Normal
- self.max_sources = max_sources
-
- def __call__(self, target, source, env, for_signature=None):
- # truncate source list according to max_sources param
- source = source[0:self.max_sources]
- def strip(f):
- return strip_build_path(str(f), env)
- if len(source) > 0:
- srcs = map(strip, source)
- else:
- srcs = ['']
- tgts = map(strip, target)
- # surprisingly, os.path.commonprefix is a dumb char-by-char string
- # operation that has nothing to do with paths.
- com_pfx = os.path.commonprefix(srcs + tgts)
- com_pfx_len = len(com_pfx)
- if com_pfx:
- # do some cleanup and sanity checking on common prefix
- if com_pfx[-1] == ".":
- # prefix matches all but file extension: ok
- # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
- com_pfx = com_pfx[0:-1]
- elif com_pfx[-1] == "/":
- # common prefix is directory path: OK
- pass
- else:
- src0_len = len(srcs[0])
- tgt0_len = len(tgts[0])
- if src0_len == com_pfx_len:
- # source is a substring of target, OK
- pass
- elif tgt0_len == com_pfx_len:
- # target is a substring of source, need to back up to
- # avoid empty string on RHS of arrow
- sep_idx = com_pfx.rfind(".")
- if sep_idx != -1:
- com_pfx = com_pfx[0:sep_idx]
- else:
- com_pfx = ''
- elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
- # still splitting at file extension: ok
- pass
- else:
- # probably a fluke; ignore it
- com_pfx = ''
- # recalculate length in case com_pfx was modified
- com_pfx_len = len(com_pfx)
- def fmt(files):
- f = map(lambda s: s[com_pfx_len:], files)
- return ', '.join(f)
- return self.format % (com_pfx, fmt(srcs), fmt(tgts))
-
-Export('Transform')
-
-# enable the regression script to use the termcap
-main['TERMCAP'] = termcap
+# Add shared top-level headers
+main.Prepend(CPPPATH=Dir('include'))
if GetOption('verbose'):
def MakeAction(action, string, *args, **kwargs):
main['CCCOMSTR'] = Transform("CC")
main['CXXCOMSTR'] = Transform("CXX")
main['ASCOMSTR'] = Transform("AS")
- main['SWIGCOMSTR'] = Transform("SWIG")
main['ARCOMSTR'] = Transform("AR", 0)
main['LINKCOMSTR'] = Transform("LINK", 0)
+ main['SHLINKCOMSTR'] = Transform("SHLINK", 0)
main['RANLIBCOMSTR'] = Transform("RANLIB", 0)
main['M4COMSTR'] = Transform("M4")
main['SHCCCOMSTR'] = Transform("SHCC")
main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
if main['GCC'] + main['CLANG'] > 1:
- print 'Error: How can we have two at the same time?'
- Exit(1)
+ error('Two compilers enabled at once?')
# Set up default C++ compiler flags
if main['GCC'] or main['CLANG']:
# As gcc and clang share many flags, do the common parts here
main.Append(CCFLAGS=['-pipe'])
main.Append(CCFLAGS=['-fno-strict-aliasing'])
- # Enable -Wall and then disable the few warnings that we
- # consistently violate
- main.Append(CCFLAGS=['-Wall', '-Wno-sign-compare', '-Wundef'])
- # We always compile using C++11, but only gcc >= 4.7 and clang 3.1
- # actually use that name, so we stick with c++0x
- main.Append(CXXFLAGS=['-std=c++0x'])
- # Add selected sanity checks from -Wextra
- main.Append(CXXFLAGS=['-Wmissing-field-initializers',
- '-Woverloaded-virtual'])
+ # Enable -Wall and -Wextra and then disable the few warnings that
+ # we consistently violate
+ main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
+ '-Wno-sign-compare', '-Wno-unused-parameter'])
+ # We always compile using C++11
+ main.Append(CXXFLAGS=['-std=c++11'])
+ if sys.platform.startswith('freebsd'):
+ main.Append(CCFLAGS=['-I/usr/local/include'])
+ main.Append(CXXFLAGS=['-I/usr/local/include'])
+
+ main.Append(LINKFLAGS='-Wl,--as-needed')
+ main['FILTER_PSHLINKFLAGS'] = lambda x: str(x).replace(' -shared', '')
+ main['PSHLINKFLAGS'] = main.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}')
+ if GetOption('gold_linker'):
+ main.Append(LINKFLAGS='-fuse-ld=gold')
+ main['PLINKFLAGS'] = main.get('LINKFLAGS')
+ shared_partial_flags = ['-r', '-nostdlib']
+ main.Append(PSHLINKFLAGS=shared_partial_flags)
+ main.Append(PLINKFLAGS=shared_partial_flags)
+
+ # Treat warnings as errors but white list some warnings that we
+ # want to allow (e.g., deprecation warnings).
+ main.Append(CCFLAGS=['-Werror',
+ '-Wno-error=deprecated-declarations',
+ '-Wno-error=deprecated',
+ ])
else:
- print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
- print "Don't know what compiler options to use for your compiler."
- print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
- print termcap.Yellow + ' version:' + termcap.Normal,
- if not CXX_version:
- print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
- termcap.Normal
- else:
- print CXX_version.replace('\n', '<nl>')
- print " If you're trying to use a compiler other than GCC"
- print " or clang, there appears to be something wrong with your"
- print " environment."
- print " "
- print " If you are trying to use a compiler other than those listed"
- print " above you will need to ease fix SConstruct and "
- print " src/SConscript to support that compiler."
- Exit(1)
+ error('\n'.join((
+ "Don't know what compiler options to use for your compiler.",
+ "compiler: " + main['CXX'],
+ "version: " + CXX_version.replace('\n', '<nl>') if
+ CXX_version else 'COMMAND NOT FOUND!',
+ "If you're trying to use a compiler other than GCC",
+ "or clang, there appears to be something wrong with your",
+ "environment.",
+ "",
+ "If you are trying to use a compiler other than those listed",
+ "above you will need to ease fix SConstruct and ",
+ "src/SConscript to support that compiler.")))
if main['GCC']:
- # Check for a supported version of gcc. >= 4.6 is chosen for its
+ # Check for a supported version of gcc. >= 4.8 is chosen for its
# level of c++11 support. See
- # http://gcc.gnu.org/projects/cxx0x.html for details. 4.6 is also
- # the first version with proper LTO support.
+ # http://gcc.gnu.org/projects/cxx0x.html for details.
gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
- if compareVersions(gcc_version, "4.6") < 0:
- print 'Error: gcc version 4.6 or newer required.'
- print ' Installed version:', gcc_version
+ if compareVersions(gcc_version, "4.8") < 0:
+ error('gcc version 4.8 or newer required.\n'
+ 'Installed version:', gcc_version)
Exit(1)
main['GCC_VERSION'] = gcc_version
- # gcc from version 4.8 and above generates "rep; ret" instructions
- # to avoid performance penalties on certain AMD chips. Older
- # assemblers detect this as an error, "Error: expecting string
- # instruction after `rep'"
- if compareVersions(gcc_version, "4.8") > 0:
- as_version = readCommand([main['AS'], '-v', '/dev/null'],
- exception=False).split()
-
- if not as_version or compareVersions(as_version[-1], "2.23") < 0:
- print termcap.Yellow + termcap.Bold + \
- 'Warning: This combination of gcc and binutils have' + \
- ' known incompatibilities.\n' + \
- ' If you encounter build problems, please update ' + \
- 'binutils to 2.23.' + \
- termcap.Normal
+ if compareVersions(gcc_version, '4.9') >= 0:
+ # Incremental linking with LTO is currently broken in gcc versions
+ # 4.9 and above. A version where everything works completely hasn't
+ # yet been identified.
+ #
+ # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67548
+ main['BROKEN_INCREMENTAL_LTO'] = True
+ if compareVersions(gcc_version, '6.0') >= 0:
+ # gcc versions 6.0 and greater accept an -flinker-output flag which
+ # selects what type of output the linker should generate. This is
+ # necessary for incremental lto to work, but is also broken in
+ # current versions of gcc. It may not be necessary in future
+ # versions. We add it here since it might be, and as a reminder that
+ # it exists. It's excluded if lto is being forced.
+ #
+ # https://gcc.gnu.org/gcc-6/changes.html
+ # https://gcc.gnu.org/ml/gcc-patches/2015-11/msg03161.html
+ # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=69866
+ if not GetOption('force_lto'):
+ main.Append(PSHLINKFLAGS='-flinker-output=rel')
+ main.Append(PLINKFLAGS='-flinker-output=rel')
+
+ disable_lto = GetOption('no_lto')
+ if not disable_lto and main.get('BROKEN_INCREMENTAL_LTO', False) and \
+ not GetOption('force_lto'):
+ warning('Warning: Your compiler doesn\'t support incremental linking '
+ 'and lto at the same time, so lto is being disabled. To force '
+ 'lto on anyway, use the --force-lto option. That will disable '
+ 'partial linking.')
+ disable_lto = True
# Add the appropriate Link-Time Optimization (LTO) flags
# unless LTO is explicitly turned off. Note that these flags
# are only used by the fast target.
- if not GetOption('no_lto'):
+ if not disable_lto:
# Pass the LTO flag when compiling to produce GIMPLE
# output, we merely create the flags here and only append
- # them later/
+ # them later
main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
# Use the same amount of jobs for LTO as we are running
- # scons with, we hardcode the use of the linker plugin
- # which requires either gold or GNU ld >= 2.21
- main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs'),
- '-fuse-linker-plugin']
+ # scons with
+ main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
'-fno-builtin-realloc', '-fno-builtin-free'])
elif main['CLANG']:
- # Check for a supported version of clang, >= 3.0 is needed to
- # support similar features as gcc 4.6. See
+ # Check for a supported version of clang, >= 3.1 is needed to
+ # support similar features as gcc 4.8. See
# http://clang.llvm.org/cxx_status.html for details
clang_version_re = re.compile(".* version (\d+\.\d+)")
clang_version_match = clang_version_re.search(CXX_version)
if (clang_version_match):
clang_version = clang_version_match.groups()[0]
- if compareVersions(clang_version, "3.0") < 0:
- print 'Error: clang version 3.0 or newer required.'
- print ' Installed version:', clang_version
- Exit(1)
+ if compareVersions(clang_version, "3.1") < 0:
+ error('clang version 3.1 or newer required.\n'
+ 'Installed version:', clang_version)
else:
- print 'Error: Unable to determine clang version.'
- Exit(1)
+ error('Unable to determine clang version.')
- # clang has a few additional warnings that we disable,
- # tautological comparisons are allowed due to unsigned integers
- # being compared to constants that happen to be 0, and extraneous
+ # clang has a few additional warnings that we disable, extraneous
# parantheses are allowed due to Ruby's printing of the AST,
# finally self assignments are allowed as the generated CPU code
# is relying on this
- main.Append(CCFLAGS=['-Wno-tautological-compare',
- '-Wno-parentheses',
+ main.Append(CCFLAGS=['-Wno-parentheses',
'-Wno-self-assign',
# Some versions of libstdc++ (4.8?) seem to
# use struct hash and class hash
main.Append(CXXFLAGS=['-stdlib=libc++'])
main.Append(LIBS=['c++'])
-else:
- print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
- print "Don't know what compiler options to use for your compiler."
- print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
- print termcap.Yellow + ' version:' + termcap.Normal,
- if not CXX_version:
- print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
- termcap.Normal
+ # On FreeBSD we need libthr.
+ if sys.platform.startswith('freebsd'):
+ main.Append(LIBS=['thr'])
+
+# Add sanitizers flags
+sanitizers=[]
+if GetOption('with_ubsan'):
+ # Only gcc >= 4.9 supports UBSan, so check both the version
+ # and the command-line option before adding the compiler and
+ # linker flags.
+ if not main['GCC'] or compareVersions(main['GCC_VERSION'], '4.9') >= 0:
+ sanitizers.append('undefined')
+if GetOption('with_asan'):
+ # Available for gcc >= 4.8 or llvm >= 3.1 both a requirement
+ # by the build system
+ sanitizers.append('address')
+if sanitizers:
+ sanitizers = ','.join(sanitizers)
+ if main['GCC'] or main['CLANG']:
+ main.Append(CCFLAGS=['-fsanitize=%s' % sanitizers,
+ '-fno-omit-frame-pointer'],
+ LINKFLAGS='-fsanitize=%s' % sanitizers)
else:
- print CXX_version.replace('\n', '<nl>')
- print " If you're trying to use a compiler other than GCC"
- print " or clang, there appears to be something wrong with your"
- print " environment."
- print " "
- print " If you are trying to use a compiler other than those listed"
- print " above you will need to ease fix SConstruct and "
- print " src/SConscript to support that compiler."
- Exit(1)
+ warning("Don't know how to enable %s sanitizer(s) for your "
+ "compiler." % sanitizers)
# Set up common yacc/bison flags (needed for Ruby)
main['YACCFLAGS'] = '-d'
# cygwin has some header file issues...
main.Append(CCFLAGS=["-Wno-uninitialized"])
-# Check for the protobuf compiler
-protoc_version = readCommand([main['PROTOC'], '--version'],
- exception='').split()
-
-# First two words should be "libprotoc x.y.z"
-if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
- print termcap.Yellow + termcap.Bold + \
- 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
- ' Please install protobuf-compiler for tracing support.' + \
- termcap.Normal
- main['PROTOC'] = False
-else:
- # Based on the availability of the compress stream wrappers,
- # require 2.1.0
- min_protoc_version = '2.1.0'
- if compareVersions(protoc_version[1], min_protoc_version) < 0:
- print termcap.Yellow + termcap.Bold + \
- 'Warning: protoc version', min_protoc_version, \
- 'or newer required.\n' + \
- ' Installed version:', protoc_version[1], \
- termcap.Normal
- main['PROTOC'] = False
- else:
- # Attempt to determine the appropriate include path and
- # library path using pkg-config, that means we also need to
- # check for pkg-config. Note that it is possible to use
- # protobuf without the involvement of pkg-config. Later on we
- # check go a library config check and at that point the test
- # will fail if libprotobuf cannot be found.
- if readCommand(['pkg-config', '--version'], exception=''):
- try:
- # Attempt to establish what linking flags to add for protobuf
- # using pkg-config
- main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
- except:
- print termcap.Yellow + termcap.Bold + \
- 'Warning: pkg-config could not get protobuf flags.' + \
- termcap.Normal
-
-# Check for SWIG
-if not main.has_key('SWIG'):
- print 'Error: SWIG utility not found.'
- print ' Please install (see http://www.swig.org) and retry.'
- Exit(1)
-
-# Check for appropriate SWIG version
-swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
-# First 3 words should be "SWIG Version x.y.z"
-if len(swig_version) < 3 or \
- swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
- print 'Error determining SWIG version.'
- Exit(1)
-
-min_swig_version = '2.0.4'
-if compareVersions(swig_version[2], min_swig_version) < 0:
- print 'Error: SWIG version', min_swig_version, 'or newer required.'
- print ' Installed version:', swig_version[2]
- Exit(1)
-
-# Set up SWIG flags & scanner
-swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
-main.Append(SWIGFLAGS=swig_flags)
-
-# Check for 'timeout' from GNU coreutils. If present, regressions
-# will be run with a time limit.
-TIMEOUT_version = readCommand(['timeout', '--version'], exception=False)
-main['TIMEOUT'] = TIMEOUT_version and TIMEOUT_version.find('timeout') == 0
-
-# filter out all existing swig scanners, they mess up the dependency
-# stuff for some reason
-scanners = []
-for scanner in main['SCANNERS']:
- skeys = scanner.skeys
- if skeys == '.i':
- continue
- if isinstance(skeys, (list, tuple)) and '.i' in skeys:
- continue
+have_pkg_config = readCommand(['pkg-config', '--version'], exception='')
- scanners.append(scanner)
-
-# add the new swig scanner that we like better
-from SCons.Scanner import ClassicCPP as CPPScanner
-swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
-scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
-
-# replace the scanners list that has what we want
-main['SCANNERS'] = scanners
-
-# Add a custom Check function to the Configure context so that we can
-# figure out if the compiler adds leading underscores to global
-# variables. This is needed for the autogenerated asm files that we
-# use for embedding the python code.
-def CheckLeading(context):
- context.Message("Checking for leading underscore in global variables...")
- # 1) Define a global variable called x from asm so the C compiler
- # won't change the symbol at all.
- # 2) Declare that variable.
- # 3) Use the variable
- #
- # If the compiler prepends an underscore, this will successfully
- # link because the external symbol 'x' will be called '_x' which
- # was defined by the asm statement. If the compiler does not
- # prepend an underscore, this will not successfully link because
- # '_x' will have been defined by assembly, while the C portion of
- # the code will be trying to use 'x'
- ret = context.TryLink('''
- asm(".globl _x; _x: .byte 0");
- extern int x;
- int main() { return x; }
- ''', extension=".c")
- context.env.Append(LEADING_UNDERSCORE=ret)
- context.Result(ret)
- return ret
+# Check for the protobuf compiler
+try:
+ main['HAVE_PROTOC'] = True
+ protoc_version = readCommand([main['PROTOC'], '--version']).split()
+
+ # First two words should be "libprotoc x.y.z"
+ if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
+ warning('Protocol buffer compiler (protoc) not found.\n'
+ 'Please install protobuf-compiler for tracing support.')
+ main['HAVE_PROTOC'] = False
+ else:
+ # Based on the availability of the compress stream wrappers,
+ # require 2.1.0
+ min_protoc_version = '2.1.0'
+ if compareVersions(protoc_version[1], min_protoc_version) < 0:
+ warning('protoc version', min_protoc_version,
+ 'or newer required.\n'
+ 'Installed version:', protoc_version[1])
+ main['HAVE_PROTOC'] = False
+ else:
+ # Attempt to determine the appropriate include path and
+ # library path using pkg-config, that means we also need to
+ # check for pkg-config. Note that it is possible to use
+ # protobuf without the involvement of pkg-config. Later on we
+ # check go a library config check and at that point the test
+ # will fail if libprotobuf cannot be found.
+ if have_pkg_config:
+ try:
+ # Attempt to establish what linking flags to add for
+ # protobuf
+ # using pkg-config
+ main.ParseConfig(
+ 'pkg-config --cflags --libs-only-L protobuf')
+ except:
+ warning('pkg-config could not get protobuf flags.')
+except Exception as e:
+ warning('While checking protoc version:', str(e))
+ main['HAVE_PROTOC'] = False
+
+# Check for 'timeout' from GNU coreutils. If present, regressions will
+# be run with a time limit. We require version 8.13 since we rely on
+# support for the '--foreground' option.
+if sys.platform.startswith('freebsd'):
+ timeout_lines = readCommand(['gtimeout', '--version'],
+ exception='').splitlines()
+else:
+ timeout_lines = readCommand(['timeout', '--version'],
+ exception='').splitlines()
+# Get the first line and tokenize it
+timeout_version = timeout_lines[0].split() if timeout_lines else []
+main['TIMEOUT'] = timeout_version and \
+ compareVersions(timeout_version[-1], '8.13') >= 0
# Add a custom Check function to test for structure members.
def CheckMember(context, include, decl, member, include_quotes="<>"):
conf_dir = joinpath(build_root, '.scons_config'),
log_file = joinpath(build_root, 'scons_config.log'),
custom_tests = {
- 'CheckLeading' : CheckLeading,
'CheckMember' : CheckMember,
})
-# Check for leading underscores. Don't really need to worry either
-# way so don't need to check the return code.
-conf.CheckLeading()
-
# Check if we should compile a 64 bit binary on Mac OS X/Darwin
try:
import platform
# Cache build files in the supplied directory.
if main['M5_BUILD_CACHE']:
- print 'Using build cache located at', main['M5_BUILD_CACHE']
+ print('Using build cache located at', main['M5_BUILD_CACHE'])
CacheDir(main['M5_BUILD_CACHE'])
-# Find Python include and library directories for embedding the
-# interpreter. We rely on python-config to resolve the appropriate
-# includes and linker flags. ParseConfig does not seem to understand
-# the more exotic linker flags such as -Xlinker and -export-dynamic so
-# we add them explicitly below. If you want to link in an alternate
-# version of python, see above for instructions on how to invoke
-# scons with the appropriate PATH set.
-#
-# First we check if python2-config exists, else we use python-config
-python_config = readCommand(['which', 'python2-config'], exception='').strip()
-if not os.path.exists(python_config):
- python_config = readCommand(['which', 'python-config'],
- exception='').strip()
-py_includes = readCommand([python_config, '--includes'],
- exception='').split()
-# Strip the -I from the include folders before adding them to the
-# CPPPATH
-main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
-
-# Read the linker flags and split them into libraries and other link
-# flags. The libraries are added later through the call the CheckLib.
-py_ld_flags = readCommand([python_config, '--ldflags'], exception='').split()
-py_libs = []
-for lib in py_ld_flags:
- if not lib.startswith('-l'):
- main.Append(LINKFLAGS=[lib])
- else:
- lib = lib[2:]
- if lib not in py_libs:
- py_libs.append(lib)
-
-# verify that this stuff works
-if not conf.CheckHeader('Python.h', '<>'):
- print "Error: can't find Python.h header in", py_includes
- print "Install Python headers (package python-dev on Ubuntu and RedHat)"
- Exit(1)
-
-for lib in py_libs:
- if not conf.CheckLib(lib):
- print "Error: can't find library %s required by python" % lib
- Exit(1)
+main['USE_PYTHON'] = not GetOption('without_python')
+if main['USE_PYTHON']:
+ # Find Python include and library directories for embedding the
+ # interpreter. We rely on python-config to resolve the appropriate
+ # includes and linker flags. ParseConfig does not seem to understand
+ # the more exotic linker flags such as -Xlinker and -export-dynamic so
+ # we add them explicitly below. If you want to link in an alternate
+ # version of python, see above for instructions on how to invoke
+ # scons with the appropriate PATH set.
+
+ python_config = find_first_prog(main['PYTHON_CONFIG'])
+ if python_config is None:
+ error("Can't find a suitable python-config, tried %s" % \
+ main['PYTHON_CONFIG'])
+
+ print("Info: Using Python config: %s" % (python_config, ))
+ py_includes = readCommand([python_config, '--includes'],
+ exception='').split()
+ py_includes = filter(lambda s: match(r'.*\/include\/.*',s), py_includes)
+ # Strip the -I from the include folders before adding them to the
+ # CPPPATH
+ py_includes = map(lambda s: s[2:] if s.startswith('-I') else s, py_includes)
+ main.Append(CPPPATH=py_includes)
+
+ # Read the linker flags and split them into libraries and other link
+ # flags. The libraries are added later through the call the CheckLib.
+ py_ld_flags = readCommand([python_config, '--ldflags'],
+ exception='').split()
+ py_libs = []
+ for lib in py_ld_flags:
+ if not lib.startswith('-l'):
+ main.Append(LINKFLAGS=[lib])
+ else:
+ lib = lib[2:]
+ if lib not in py_libs:
+ py_libs.append(lib)
+
+ # verify that this stuff works
+ if not conf.CheckHeader('Python.h', '<>'):
+ error("Check failed for Python.h header in",
+ ' '.join(py_includes), "\n"
+ "Two possible reasons:\n"
+ "1. Python headers are not installed (You can install the "
+ "package python-dev on Ubuntu and RedHat)\n"
+ "2. SCons is using a wrong C compiler. This can happen if "
+ "CC has the wrong value.\n"
+ "CC = %s" % main['CC'])
+
+ for lib in py_libs:
+ if not conf.CheckLib(lib):
+ error("Can't find library %s required by python." % lib)
# On Solaris you need to use libsocket for socket ops
if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
- if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
- print "Can't find library with socket calls (e.g. accept())"
- Exit(1)
+ if not conf.CheckLibWithHeader('socket', 'sys/socket.h',
+ 'C++', 'accept(0,0,0);'):
+ error("Can't find library with socket calls (e.g. accept()).")
# Check for zlib. If the check passes, libz will be automatically
# added to the LIBS environment variable.
if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
- print 'Error: did not find needed zlib compression library '\
- 'and/or zlib.h header file.'
- print ' Please install zlib and try again.'
- Exit(1)
+ error('Did not find needed zlib compression library '
+ 'and/or zlib.h header file.\n'
+ 'Please install zlib and try again.')
# If we have the protobuf compiler, also make sure we have the
# development libraries. If the check passes, libprotobuf will be
# automatically added to the LIBS environment variable. After
# this, we can use the HAVE_PROTOBUF flag to determine if we have
# got both protoc and libprotobuf available.
-main['HAVE_PROTOBUF'] = main['PROTOC'] and \
+main['HAVE_PROTOBUF'] = main['HAVE_PROTOC'] and \
conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
+# Valgrind gets much less confused if you tell it when you're using
+# alternative stacks.
+main['HAVE_VALGRIND'] = conf.CheckCHeader('valgrind/valgrind.h')
+
# If we have the compiler but not the library, print another warning.
-if main['PROTOC'] and not main['HAVE_PROTOBUF']:
- print termcap.Yellow + termcap.Bold + \
- 'Warning: did not find protocol buffer library and/or headers.\n' + \
- ' Please install libprotobuf-dev for tracing support.' + \
- termcap.Normal
+if main['HAVE_PROTOC'] and not main['HAVE_PROTOBUF']:
+ warning('Did not find protocol buffer library and/or headers.\n'
+ 'Please install libprotobuf-dev for tracing support.')
# Check for librt.
have_posix_clock = \
conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
-if conf.CheckLib('tcmalloc'):
- main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
-elif conf.CheckLib('tcmalloc_minimal'):
- main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
-else:
- print termcap.Yellow + termcap.Bold + \
- "You can get a 12% performance improvement by installing tcmalloc "\
- "(libgoogle-perftools-dev package on Ubuntu or RedHat)." + \
- termcap.Normal
+if not GetOption('without_tcmalloc'):
+ if conf.CheckLib('tcmalloc'):
+ main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
+ elif conf.CheckLib('tcmalloc_minimal'):
+ main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
+ else:
+ warning("You can get a 12% performance improvement by "
+ "installing tcmalloc (libgoogle-perftools-dev package "
+ "on Ubuntu or RedHat).")
+
+
+# Detect back trace implementations. The last implementation in the
+# list will be used by default.
+backtrace_impls = [ "none" ]
+
+backtrace_checker = 'char temp;' + \
+ ' backtrace_symbols_fd((void*)&temp, 0, 0);'
+if conf.CheckLibWithHeader(None, 'execinfo.h', 'C', backtrace_checker):
+ backtrace_impls.append("glibc")
+elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
+ backtrace_checker):
+ # NetBSD and FreeBSD need libexecinfo.
+ backtrace_impls.append("glibc")
+ main.Append(LIBS=['execinfo'])
+
+if backtrace_impls[-1] == "none":
+ default_backtrace_impl = "none"
+ warning("No suitable back trace implementation found.")
if not have_posix_clock:
- print "Can't find library for POSIX clocks."
+ warning("Can't find library for POSIX clocks.")
# Check for <fenv.h> (C99 FP environment control)
have_fenv = conf.CheckHeader('fenv.h', '<>')
if not have_fenv:
- print "Warning: Header file <fenv.h> not found."
- print " This host has no IEEE FP rounding mode control."
+ warning("Header file <fenv.h> not found.\n"
+ "This host has no IEEE FP rounding mode control.")
+
+# Check for <png.h> (libpng library needed if wanting to dump
+# frame buffer image in png format)
+have_png = conf.CheckHeader('png.h', '<>')
+if not have_png:
+ warning("Header file <png.h> not found.\n"
+ "This host has no libpng library.\n"
+ "Disabling support for PNG framebuffers.")
# Check if we should enable KVM-based hardware virtualization. The API
# we rely on exists since version 2.6.36 of the kernel, but somehow
# the KVM_API_VERSION does not reflect the change. We test for one of
# the types as a fall back.
-have_kvm = conf.CheckHeader('linux/kvm.h', '<>') and \
- conf.CheckTypeSize('struct kvm_xsave', '#include <linux/kvm.h>') != 0
+have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
if not have_kvm:
- print "Info: Compatible header file <linux/kvm.h> not found, " \
- "disabling KVM support."
+ print("Info: Compatible header file <linux/kvm.h> not found, "
+ "disabling KVM support.")
+
+# Check if the TUN/TAP driver is available.
+have_tuntap = conf.CheckHeader('linux/if_tun.h', '<>')
+if not have_tuntap:
+ print("Info: Compatible header file <linux/if_tun.h> not found.")
+
+# x86 needs support for xsave. We test for the structure here since we
+# won't be able to run new tests by the time we know which ISA we're
+# targeting.
+have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
+ '#include <linux/kvm.h>') != 0
# Check if the requested target ISA is compatible with the host
def is_isa_kvm_compatible(isa):
- isa_comp_table = {
- "arm" : ( "armv7l" ),
- "x86" : ( "x86_64" ),
- }
try:
import platform
host_isa = platform.machine()
except:
- print "Warning: Failed to determine host ISA."
+ warning("Failed to determine host ISA.")
return False
- return host_isa in isa_comp_table.get(isa, [])
+ if not have_posix_timers:
+ warning("Can not enable KVM, host seems to lack support "
+ "for POSIX timers")
+ return False
+
+ if isa == "arm":
+ return host_isa in ( "armv7l", "aarch64" )
+ elif isa == "x86":
+ if host_isa != "x86_64":
+ return False
+
+ if not have_kvm_xsave:
+ warning("KVM on x86 requires xsave support in kernel headers.")
+ return False
+
+ return True
+ else:
+ return False
# Check if the exclude_host attribute is available. We want this to
main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
+def check_hdf5():
+ return \
+ conf.CheckLibWithHeader('hdf5', 'hdf5.h', 'C',
+ 'H5Fcreate("", 0, 0, 0);') and \
+ conf.CheckLibWithHeader('hdf5_cpp', 'H5Cpp.h', 'C++',
+ 'H5::H5File("", 0);')
+
+def check_hdf5_pkg(name):
+ print("Checking for %s using pkg-config..." % name, end="")
+ if not have_pkg_config:
+ print(" pkg-config not found")
+ return False
+
+ try:
+ main.ParseConfig('pkg-config --cflags-only-I --libs-only-L %s' % name)
+ print(" yes")
+ return True
+ except:
+ print(" no")
+ return False
+
+# Check if there is a pkg-config configuration for hdf5. If we find
+# it, setup the environment to enable linking and header inclusion. We
+# don't actually try to include any headers or link with hdf5 at this
+# stage.
+if not check_hdf5_pkg('hdf5-serial'):
+ check_hdf5_pkg('hdf5')
+
+# Check if the HDF5 libraries can be found. This check respects the
+# include path and library path provided by pkg-config. We perform
+# this check even if there isn't a pkg-config configuration for hdf5
+# since some installations don't use pkg-config.
+have_hdf5 = check_hdf5()
+if not have_hdf5:
+ print("Warning: Couldn't find any HDF5 C++ libraries. Disabling")
+ print(" HDF5 support.")
######################################################################
#
# Define the universe of supported ISAs
all_isa_list = [ ]
+all_gpu_isa_list = [ ]
Export('all_isa_list')
+Export('all_gpu_isa_list')
class CpuModel(object):
'''The CpuModel class encapsulates everything the ISA parser needs to
# Add self to dict
if name in CpuModel.dict:
- raise AttributeError, "CpuModel '%s' already registered" % name
+ raise AttributeError("CpuModel '%s' already registered" % name)
CpuModel.dict[name] = self
Export('CpuModel')
# Walk the tree and execute all SConsopts scripts that wil add to the
# above variables
if GetOption('verbose'):
- print "Reading SConsopts"
+ print("Reading SConsopts")
for bdir in [ base_dir ] + extras_dir_list:
if not isdir(bdir):
- print "Error: directory '%s' does not exist" % bdir
- Exit(1)
+ error("Directory '%s' does not exist." % bdir)
for root, dirs, files in os.walk(bdir):
if 'SConsopts' in files:
if GetOption('verbose'):
- print "Reading", joinpath(root, 'SConsopts')
+ print("Reading", joinpath(root, 'SConsopts'))
SConscript(joinpath(root, 'SConsopts'))
all_isa_list.sort()
+all_gpu_isa_list.sort()
sticky_vars.AddVariables(
EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
+ EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
ListVariable('CPU_MODELS', 'CPU models',
sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
sorted(CpuModel.dict.keys())),
BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
False),
- BoolVariable('SS_COMPATIBLE_FP',
- 'Make floating-point results compatible with SimpleScalar',
- False),
BoolVariable('USE_SSE2',
'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
False),
BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
- BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
- BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
+ BoolVariable('USE_PNG', 'Enable support for PNG images', have_png),
+ BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability',
+ False),
+ BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models',
+ have_kvm),
+ BoolVariable('USE_TUNTAP',
+ 'Enable using a tap device to bridge to the host network',
+ have_tuntap),
+ BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
all_protocols),
+ EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
+ backtrace_impls[-1], backtrace_impls),
+ ('NUMBER_BITS_PER_SET', 'Max elements in set (default 64)',
+ 64),
+ BoolVariable('USE_HDF5', 'Enable the HDF5 support', have_hdf5),
)
# These variables get exported to #defines in config/*.hh (see src/SConscript).
-export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'CP_ANNOTATE',
- 'USE_POSIX_CLOCK', 'PROTOCOL', 'HAVE_PROTOBUF',
- 'HAVE_PERF_ATTR_EXCLUDE_HOST']
+export_vars += ['USE_FENV', 'TARGET_ISA', 'TARGET_GPU_ISA', 'CP_ANNOTATE',
+ 'USE_POSIX_CLOCK', 'USE_KVM', 'USE_TUNTAP', 'PROTOCOL',
+ 'HAVE_PROTOBUF', 'HAVE_VALGRIND',
+ 'HAVE_PERF_ATTR_EXCLUDE_HOST', 'USE_PNG',
+ 'NUMBER_BITS_PER_SET', 'USE_HDF5']
###################################################
#
def build_config_file(target, source, env):
(variable, value) = [s.get_contents() for s in source]
f = file(str(target[0]), 'w')
- print >> f, '#define', variable, value
+ print('#define', variable, value, file=f)
f.close()
return None
main.Append(BUILDERS = { 'ConfigFile' : config_builder })
-# libelf build is shared across all configs in the build root.
-main.SConscript('ext/libelf/SConscript',
- variant_dir = joinpath(build_root, 'libelf'))
-
-# gzstream build is shared across all configs in the build root.
-main.SConscript('ext/gzstream/SConscript',
- variant_dir = joinpath(build_root, 'gzstream'))
-
-# libfdt build is shared across all configs in the build root.
-main.SConscript('ext/libfdt/SConscript',
- variant_dir = joinpath(build_root, 'libfdt'))
-
-# fputils build is shared across all configs in the build root.
-main.SConscript('ext/fputils/SConscript',
- variant_dir = joinpath(build_root, 'fputils'))
+###################################################
+#
+# Builders for static and shared partially linked object files.
+#
+###################################################
-# DRAMSim2 build is shared across all configs in the build root.
-main.SConscript('ext/dramsim2/SConscript',
- variant_dir = joinpath(build_root, 'dramsim2'))
+partial_static_builder = Builder(action=SCons.Defaults.LinkAction,
+ src_suffix='$OBJSUFFIX',
+ src_builder=['StaticObject', 'Object'],
+ LINKFLAGS='$PLINKFLAGS',
+ LIBS='')
+
+def partial_shared_emitter(target, source, env):
+ for tgt in target:
+ tgt.attributes.shared = 1
+ return (target, source)
+partial_shared_builder = Builder(action=SCons.Defaults.ShLinkAction,
+ emitter=partial_shared_emitter,
+ src_suffix='$SHOBJSUFFIX',
+ src_builder='SharedObject',
+ SHLINKFLAGS='$PSHLINKFLAGS',
+ LIBS='')
+
+main.Append(BUILDERS = { 'PartialShared' : partial_shared_builder,
+ 'PartialStatic' : partial_static_builder })
+
+def add_local_rpath(env, *targets):
+ '''Set up an RPATH for a library which lives in the build directory.
+
+ The construction environment variable BIN_RPATH_PREFIX should be set to
+ the relative path of the build directory starting from the location of the
+ binary.'''
+ for target in targets:
+ target = env.Entry(target)
+ if not isinstance(target, SCons.Node.FS.Dir):
+ target = target.dir
+ relpath = os.path.relpath(target.abspath, env['BUILDDIR'])
+ components = [
+ '\\$$ORIGIN',
+ '${BIN_RPATH_PREFIX}',
+ relpath
+ ]
+ env.Append(RPATH=[env.Literal(os.path.join(*components))])
+
+if sys.platform != "darwin":
+ main.Append(LINKFLAGS=Split('-z origin'))
+
+main.AddMethod(add_local_rpath, 'AddLocalRPATH')
+
+# builds in ext are shared across all configs in the build root.
+ext_dir = abspath(joinpath(str(main.root), 'ext'))
+ext_build_dirs = []
+for root, dirs, files in os.walk(ext_dir):
+ if 'SConscript' in files:
+ build_dir = os.path.relpath(root, ext_dir)
+ ext_build_dirs.append(build_dir)
+ main.SConscript(joinpath(root, 'SConscript'),
+ variant_dir=joinpath(build_root, build_dir))
+
+gdb_xml_dir = joinpath(ext_dir, 'gdb-xml')
+Export('gdb_xml_dir')
+
+main.Prepend(CPPPATH=Dir('ext/pybind11/include/'))
###################################################
#
-# This function is used to set up a directory with switching headers
+# This builder and wrapper method are used to set up a directory with
+# switching headers. Those are headers which are in a generic location and
+# that include more specific headers from a directory chosen at build time
+# based on the current build settings.
#
###################################################
-main['ALL_ISA_LIST'] = all_isa_list
-all_isa_deps = {}
-def make_switching_dir(dname, switch_headers, env):
- # Generate the header. target[0] is the full path of the output
- # header to generate. 'source' is a dummy variable, since we get the
- # list of ISAs from env['ALL_ISA_LIST'].
- def gen_switch_hdr(target, source, env):
- fname = str(target[0])
- isa = env['TARGET_ISA'].lower()
- try:
- f = open(fname, 'w')
- print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
- f.close()
- except IOError:
- print "Failed to create %s" % fname
- raise
-
- # Build SCons Action object. 'varlist' specifies env vars that this
- # action depends on; when env['ALL_ISA_LIST'] changes these actions
- # should get re-executed.
- switch_hdr_action = MakeAction(gen_switch_hdr,
- Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
-
- # Instantiate actions for each header
- for hdr in switch_headers:
- env.Command(hdr, [], switch_hdr_action)
-
- isa_target = Dir('.').up().name.lower().replace('_', '-')
- env['PHONY_BASE'] = '#'+isa_target
- all_isa_deps[isa_target] = None
-
-Export('make_switching_dir')
-
-# all-isas -> all-deps -> all-environs -> all_targets
-main.Alias('#all-isas', [])
-main.Alias('#all-deps', '#all-isas')
-
-# Dummy target to ensure all environments are created before telling
-# SCons what to actually make (the command line arguments). We attach
-# them to the dependence graph after the environments are complete.
-ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
-def environsComplete(target, source, env):
- for t in ORIG_BUILD_TARGETS:
- main.Depends('#all-targets', t)
-
-# Each build/* switching_dir attaches its *-environs target to #all-environs.
-main.Append(BUILDERS = {'CompleteEnvirons' :
- Builder(action=MakeAction(environsComplete, None))})
-main.CompleteEnvirons('#all-environs', [])
-
-def doNothing(**ignored): pass
-main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
-
-# The final target to which all the original targets ultimately get attached.
-main.Dummy('#all-targets', '#all-environs')
-BUILD_TARGETS[:] = ['#all-targets']
+def build_switching_header(target, source, env):
+ path = str(target[0])
+ subdir = str(source[0])
+ dp, fp = os.path.split(path)
+ dp = os.path.relpath(os.path.realpath(dp),
+ os.path.realpath(env['BUILDDIR']))
+ with open(path, 'w') as hdr:
+ print('#include "%s/%s/%s"' % (dp, subdir, fp), file=hdr)
+
+switching_header_action = MakeAction(build_switching_header,
+ Transform('GENERATE'))
+
+switching_header_builder = Builder(action=switching_header_action,
+ source_factory=Value,
+ single_source=True)
+
+main.Append(BUILDERS = { 'SwitchingHeader': switching_header_builder })
+
+def switching_headers(self, headers, source):
+ for header in headers:
+ self.SwitchingHeader(header, source)
+
+main.AddMethod(switching_headers, 'SwitchingHeaders')
###################################################
#
for variant_path in variant_paths:
if not GetOption('silent'):
- print "Building in", variant_path
+ print("Building in", variant_path)
# Make a copy of the build-root environment to use for this config.
env = main.Clone()
if isfile(current_vars_file):
sticky_vars.files.append(current_vars_file)
if not GetOption('silent'):
- print "Using saved variables file %s" % current_vars_file
+ print("Using saved variables file %s" % current_vars_file)
+ elif variant_dir in ext_build_dirs:
+ # Things in ext are built without a variant directory.
+ continue
else:
# Build dir-specific variables file doesn't exist.
if existing_files:
default_vars_file = existing_files[0]
sticky_vars.files.append(default_vars_file)
- print "Variables file %s not found,\n using defaults in %s" \
- % (current_vars_file, default_vars_file)
+ print("Variables file %s not found,\n using defaults in %s"
+ % (current_vars_file, default_vars_file))
else:
- print "Error: cannot find variables file %s or " \
- "default file(s) %s" \
- % (current_vars_file, ' or '.join(default_vars_files))
+ error("Cannot find variables file %s or default file(s) %s"
+ % (current_vars_file, ' or '.join(default_vars_files)))
Exit(1)
# Apply current variable settings to env
# Process variable settings.
if not have_fenv and env['USE_FENV']:
- print "Warning: <fenv.h> not available; " \
- "forcing USE_FENV to False in", variant_dir + "."
+ warning("<fenv.h> not available; forcing USE_FENV to False in",
+ variant_dir + ".")
env['USE_FENV'] = False
if not env['USE_FENV']:
- print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
- print " FP results may deviate slightly from other platforms."
+ warning("No IEEE FP rounding mode control in", variant_dir + ".\n"
+ "FP results may deviate slightly from other platforms.")
+
+ if not have_png and env['USE_PNG']:
+ warning("<png.h> not available; forcing USE_PNG to False in",
+ variant_dir + ".")
+ env['USE_PNG'] = False
+
+ if env['USE_PNG']:
+ env.Append(LIBS=['png'])
if env['EFENCE']:
env.Append(LIBS=['efence'])
if env['USE_KVM']:
if not have_kvm:
- print "Warning: Can not enable KVM, host seems to lack KVM support"
- env['USE_KVM'] = False
- elif not have_posix_timers:
- print "Warning: Can not enable KVM, host seems to lack support " \
- "for POSIX timers"
+ warning("Can not enable KVM, host seems to lack KVM support")
env['USE_KVM'] = False
elif not is_isa_kvm_compatible(env['TARGET_ISA']):
- print "Info: KVM support disabled due to unsupported host and " \
- "target ISA combination"
+ print("Info: KVM support disabled due to unsupported host and "
+ "target ISA combination")
env['USE_KVM'] = False
+ if env['USE_TUNTAP']:
+ if not have_tuntap:
+ warning("Can't connect EtherTap with a tap device.")
+ env['USE_TUNTAP'] = False
+
+ if env['BUILD_GPU']:
+ env.Append(CPPDEFINES=['BUILD_GPU'])
+
# Warn about missing optional functionality
if env['USE_KVM']:
if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
- print "Warning: perf_event headers lack support for the " \
- "exclude_host attribute. KVM instruction counts will " \
- "be inaccurate."
+ warning("perf_event headers lack support for the exclude_host "
+ "attribute. KVM instruction counts will be inaccurate.")
# Save sticky variable settings back to current variables file
sticky_vars.Save(current_vars_file, env)
if env['USE_SSE2']:
env.Append(CCFLAGS=['-msse2'])
+ env.Append(CCFLAGS='$CCFLAGS_EXTRA')
+ env.Append(LINKFLAGS='$LDFLAGS_EXTRA')
+
# The src/SConscript file sets up the build rules in 'env' according
# to the configured variables. It returns a list of environments,
# one for each variant build (debug, opt, etc.)
SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
-def pairwise(iterable):
- "s -> (s0,s1), (s1,s2), (s2, s3), ..."
- a, b = itertools.tee(iterable)
- b.next()
- return itertools.izip(a, b)
-
-# Create false dependencies so SCons will parse ISAs, establish
-# dependencies, and setup the build Environments serially. Either
-# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
-# greater than 1. It appears to be standard race condition stuff; it
-# doesn't always fail, but usually, and the behaviors are different.
-# Every time I tried to remove this, builds would fail in some
-# creative new way. So, don't do that. You'll want to, though, because
-# tests/SConscript takes a long time to make its Environments.
-for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
- main.Depends('#%s-deps' % t2, '#%s-deps' % t1)
- main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
-
# base help text
Help('''
Usage: scons [scons options] [build variables] [target(s)]