3 # Copyright (c) 2013, 2015, 2016 ARM Limited
6 # The license below extends only to copyright in the software and shall
7 # not be construed as granting a license to any other intellectual
8 # property including but not limited to intellectual property relating
9 # to a hardware implementation of the functionality of the software
10 # licensed hereunder. You may use the software subject to the license
11 # terms below provided that you ensure that this notice is replicated
12 # unmodified and in its entirety in all distributions of the software,
13 # modified or unmodified, in source code or in binary form.
15 # Copyright (c) 2011 Advanced Micro Devices, Inc.
16 # Copyright (c) 2009 The Hewlett-Packard Development Company
17 # Copyright (c) 2004-2005 The Regents of The University of Michigan
18 # All rights reserved.
20 # Redistribution and use in source and binary forms, with or without
21 # modification, are permitted provided that the following conditions are
22 # met: redistributions of source code must retain the above copyright
23 # notice, this list of conditions and the following disclaimer;
24 # redistributions in binary form must reproduce the above copyright
25 # notice, this list of conditions and the following disclaimer in the
26 # documentation and/or other materials provided with the distribution;
27 # neither the name of the copyright holders nor the names of its
28 # contributors may be used to endorse or promote products derived from
29 # this software without specific prior written permission.
31 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
43 # Authors: Steve Reinhardt
46 ###################################################
48 # SCons top-level build description (SConstruct) file.
50 # While in this directory ('gem5'), just type 'scons' to build the default
51 # configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52 # to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53 # the optimized full-system version).
55 # You can build gem5 in a different directory as long as there is a
56 # 'build/<CONFIG>' somewhere along the target path. The build system
57 # expects that all configs under the same build directory are being
58 # built for the same host system.
62 # The following two commands are equivalent. The '-u' option tells
63 # scons to search up the directory tree for this SConstruct file.
64 # % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65 # % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
67 # The following two commands are equivalent and demonstrate building
68 # in a directory outside of the source tree. The '-C' option tells
69 # scons to chdir to the specified directory to find this SConstruct
71 # % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72 # % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
74 # You can use 'scons -H' to print scons options. If you're in this
75 # 'gem5' directory (or use -u or -C to tell scons where to find this
76 # file), you can use 'scons -h' to print all the gem5-specific build
79 ###################################################
81 # Check for recent-enough Python and SCons versions.
83 # Really old versions of scons only take two options for the
84 # function, so check once without the revision and once with the
85 # revision, the first instance will fail for stuff other than
86 # 0.98, and the second will fail for 0.98.0
87 EnsureSConsVersion(0, 98)
88 EnsureSConsVersion(0, 98, 1)
91 For more details, see:
92 http://gem5.org/Dependencies
96 # pybind11 requires python 2.7
98 EnsurePythonVersion(2, 7)
101 You can use a non-default installation of the Python interpreter by
102 rearranging your PATH so that scons finds the non-default 'python' and
103 'python-config' first.
105 For more details, see:
106 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
110 # Global Python includes
118 from os
import mkdir
, environ
119 from os
.path
import abspath
, basename
, dirname
, expanduser
, normpath
120 from os
.path
import exists
, isdir
, isfile
121 from os
.path
import join
as joinpath
, split
as splitpath
127 extra_python_paths
= [
128 Dir('src/python').srcnode().abspath
, # gem5 includes
129 Dir('ext/ply').srcnode().abspath
, # ply is used by several files
132 sys
.path
[1:1] = extra_python_paths
134 from m5
.util
import compareVersions
, readCommand
135 from m5
.util
.terminal
import get_termcap
146 # There's a bug in scons in that (1) by default, the help texts from
147 # AddOption() are supposed to be displayed when you type 'scons -h'
148 # and (2) you can override the help displayed by 'scons -h' using the
149 # Help() function, but these two features are incompatible: once
150 # you've overridden the help text using Help(), there's no way to get
151 # at the help texts from AddOptions. See:
152 # http://scons.tigris.org/issues/show_bug.cgi?id=2356
153 # http://scons.tigris.org/issues/show_bug.cgi?id=2611
154 # This hack lets us extract the help text from AddOptions and
155 # re-inject it via Help(). Ideally someday this bug will be fixed and
156 # we can just use AddOption directly.
157 def AddLocalOption(*args
, **kwargs
):
160 help = " " + ", ".join(args
)
163 if length
>= col_width
:
164 help += "\n" + " " * col_width
166 help += " " * (col_width
- length
)
167 help += kwargs
["help"]
168 help_texts
["options"] += help + "\n"
170 AddOption(*args
, **kwargs
)
172 AddLocalOption('--colors', dest
='use_colors', action
='store_true',
173 help="Add color to abbreviated scons output")
174 AddLocalOption('--no-colors', dest
='use_colors', action
='store_false',
175 help="Don't add color to abbreviated scons output")
176 AddLocalOption('--with-cxx-config', dest
='with_cxx_config',
178 help="Build with support for C++-based configuration")
179 AddLocalOption('--default', dest
='default', type='string', action
='store',
180 help='Override which build_opts file to use for defaults')
181 AddLocalOption('--ignore-style', dest
='ignore_style', action
='store_true',
182 help='Disable style checking hooks')
183 AddLocalOption('--no-lto', dest
='no_lto', action
='store_true',
184 help='Disable Link-Time Optimization for fast')
185 AddLocalOption('--force-lto', dest
='force_lto', action
='store_true',
186 help='Use Link-Time Optimization instead of partial linking' +
187 ' when the compiler doesn\'t support using them together.')
188 AddLocalOption('--update-ref', dest
='update_ref', action
='store_true',
189 help='Update test reference outputs')
190 AddLocalOption('--verbose', dest
='verbose', action
='store_true',
191 help='Print full tool command lines')
192 AddLocalOption('--without-python', dest
='without_python',
194 help='Build without Python configuration support')
195 AddLocalOption('--without-tcmalloc', dest
='without_tcmalloc',
197 help='Disable linking against tcmalloc')
198 AddLocalOption('--with-ubsan', dest
='with_ubsan', action
='store_true',
199 help='Build with Undefined Behavior Sanitizer if available')
200 AddLocalOption('--with-asan', dest
='with_asan', action
='store_true',
201 help='Build with Address Sanitizer if available')
203 if GetOption('no_lto') and GetOption('force_lto'):
204 print '--no-lto and --force-lto are mutually exclusive'
207 termcap
= get_termcap(GetOption('use_colors'))
209 ########################################################################
211 # Set up the main build environment.
213 ########################################################################
215 # export TERM so that clang reports errors in color
216 use_vars
= set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
217 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
218 'PYTHONPATH', 'RANLIB', 'TERM' ])
221 "ASAN_", # address sanitizer symbolizer path and settings
222 "CCACHE_", # ccache (caching compiler wrapper) configuration
223 "CCC_", # clang static analyzer configuration
224 "DISTCC_", # distcc (distributed compiler wrapper) configuration
225 "INCLUDE_SERVER_", # distcc pump server settings
226 "M5", # M5 configuration (e.g., path to kernels)
230 for key
,val
in sorted(os
.environ
.iteritems()):
231 if key
in use_vars
or \
232 any([key
.startswith(prefix
) for prefix
in use_prefixes
]):
235 # Tell scons to avoid implicit command dependencies to avoid issues
236 # with the param wrappes being compiled twice (see
237 # http://scons.tigris.org/issues/show_bug.cgi?id=2811)
238 main
= Environment(ENV
=use_env
, IMPLICIT_COMMAND_DEPENDENCIES
=0)
239 main
.Decider('MD5-timestamp')
240 main
.root
= Dir(".") # The current directory (where this file lives).
241 main
.srcdir
= Dir("src") # The source directory
243 main_dict_keys
= main
.Dictionary().keys()
245 # Check that we have a C/C++ compiler
246 if not ('CC' in main_dict_keys
and 'CXX' in main_dict_keys
):
247 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
250 # add useful python code PYTHONPATH so it can be used by subprocesses
252 main
.AppendENVPath('PYTHONPATH', extra_python_paths
)
254 ########################################################################
258 # If the gem5 directory is a mercurial repository, we should do some
261 ########################################################################
263 hgdir
= main
.root
.Dir(".hg")
267 You're missing the gem5 style hook, which automatically checks your code
268 against the gem5 style rules on %s.
269 This script will now install the hook in your %s.
270 Press enter to continue, or ctrl-c to abort: """
272 mercurial_style_message
= """
273 You're missing the gem5 style hook, which automatically checks your code
274 against the gem5 style rules on hg commit and qrefresh commands.
275 This script will now install the hook in your .hg/hgrc file.
276 Press enter to continue, or ctrl-c to abort: """
278 git_style_message
= """
279 You're missing the gem5 style or commit message hook. These hooks help
280 to ensure that your code follows gem5's style rules on git commit.
281 This script will now install the hook in your .git/hooks/ directory.
282 Press enter to continue, or ctrl-c to abort: """
284 mercurial_style_upgrade_message
= """
285 Your Mercurial style hooks are not up-to-date. This script will now
286 try to automatically update them. A backup of your hgrc will be saved
288 Press enter to continue, or ctrl-c to abort: """
290 mercurial_style_hook
= """
291 # The following lines were automatically added by gem5/SConstruct
292 # to provide the gem5 style-checking hooks
294 hgstyle = %s/util/hgstyle.py
297 pretxncommit.style = python:hgstyle.check_style
298 pre-qrefresh.style = python:hgstyle.check_style
299 # End of SConstruct additions
301 """ % (main
.root
.abspath
)
303 mercurial_lib_not_found
= """
304 Mercurial libraries cannot be found, ignoring style hook. If
305 you are a gem5 developer, please fix this and run the style
306 hook. It is important.
309 # Check for style hook and prompt for installation if it's not there.
310 # Skip this if --ignore-style was specified, there's no interactive
311 # terminal to prompt, or no recognized revision control system can be
313 ignore_style
= GetOption('ignore_style') or not sys
.stdin
.isatty()
315 # Try wire up Mercurial to the style hooks
316 if not ignore_style
and hgdir
.exists():
318 style_hooks
= tuple()
319 hgrc
= hgdir
.File('hgrc')
320 hgrc_old
= hgdir
.File('hgrc.old')
322 from mercurial
import ui
324 ui
.readconfig(hgrc
.abspath
)
325 style_hooks
= (ui
.config('hooks', 'pretxncommit.style', None),
326 ui
.config('hooks', 'pre-qrefresh.style', None))
327 style_hook
= all(style_hooks
)
328 style_extension
= ui
.config('extensions', 'style', None)
330 print mercurial_lib_not_found
332 if "python:style.check_style" in style_hooks
:
333 # Try to upgrade the style hooks
334 print mercurial_style_upgrade_message
335 # continue unless user does ctrl-c/ctrl-d etc.
339 print "Input exception, exiting scons.\n"
341 shutil
.copyfile(hgrc
.abspath
, hgrc_old
.abspath
)
342 re_style_hook
= re
.compile(r
"^([^=#]+)\.style\s*=\s*([^#\s]+).*")
343 re_style_extension
= re
.compile("style\s*=\s*([^#\s]+).*")
344 old
, new
= open(hgrc_old
.abspath
, 'r'), open(hgrc
.abspath
, 'w')
346 m_hook
= re_style_hook
.match(l
)
347 m_ext
= re_style_extension
.match(l
)
349 hook
, check
= m_hook
.groups()
350 if check
!= "python:style.check_style":
351 print "Warning: %s.style is using a non-default " \
352 "checker: %s" % (hook
, check
)
353 if hook
not in ("pretxncommit", "pre-qrefresh"):
354 print "Warning: Updating unknown style hook: %s" % hook
356 l
= "%s.style = python:hgstyle.check_style\n" % hook
357 elif m_ext
and m_ext
.group(1) == style_extension
:
358 l
= "hgstyle = %s/util/hgstyle.py\n" % main
.root
.abspath
362 print mercurial_style_message
,
363 # continue unless user does ctrl-c/ctrl-d etc.
367 print "Input exception, exiting scons.\n"
369 hgrc_path
= '%s/.hg/hgrc' % main
.root
.abspath
370 print "Adding style hook to", hgrc_path
, "\n"
372 with
open(hgrc_path
, 'a') as f
:
373 f
.write(mercurial_style_hook
)
375 print "Error updating", hgrc_path
378 def install_git_style_hooks():
380 gitdir
= Dir(readCommand(
381 ["git", "rev-parse", "--git-dir"]).strip("\n"))
383 print "Warning: Failed to find git repo directory: %s" % e
386 git_hooks
= gitdir
.Dir("hooks")
387 def hook_exists(hook_name
):
388 hook
= git_hooks
.File(hook_name
)
391 def hook_install(hook_name
, script
):
392 hook
= git_hooks
.File(hook_name
)
394 print "Warning: Can't install %s, hook already exists." % hook_name
398 print "Warning: Removing broken symlink for hook %s." % hook_name
399 os
.unlink(hook
.get_abspath())
401 if not git_hooks
.exists():
402 mkdir(git_hooks
.get_abspath())
405 abs_symlink_hooks
= git_hooks
.islink() and \
406 os
.path
.isabs(os
.readlink(git_hooks
.get_abspath()))
408 # Use a relative symlink if the hooks live in the source directory,
409 # and the hooks directory is not a symlink to an absolute path.
410 if hook
.is_under(main
.root
) and not abs_symlink_hooks
:
411 script_path
= os
.path
.relpath(
412 os
.path
.realpath(script
.get_abspath()),
413 os
.path
.realpath(hook
.Dir(".").get_abspath()))
415 script_path
= script
.get_abspath()
418 os
.symlink(script_path
, hook
.get_abspath())
420 print "Error updating git %s hook" % hook_name
423 if hook_exists("pre-commit") and hook_exists("commit-msg"):
426 print git_style_message
,
430 print "Input exception, exiting scons.\n"
433 git_style_script
= File("util/git-pre-commit.py")
434 git_msg_script
= File("ext/git-commit-msg")
436 hook_install("pre-commit", git_style_script
)
437 hook_install("commit-msg", git_msg_script
)
439 # Try to wire up git to the style hooks
440 if not ignore_style
and main
.root
.Entry(".git").exists():
441 install_git_style_hooks()
443 ###################################################
445 # Figure out which configurations to set up based on the path(s) of
448 ###################################################
450 # Find default configuration & binary.
451 Default(environ
.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
453 # helper function: find last occurrence of element in list
454 def rfind(l
, elt
, offs
= -1):
455 for i
in range(len(l
)+offs
, 0, -1):
458 raise ValueError, "element not found"
460 # Take a list of paths (or SCons Nodes) and return a list with all
461 # paths made absolute and ~-expanded. Paths will be interpreted
462 # relative to the launch directory unless a different root is provided
463 def makePathListAbsolute(path_list
, root
=GetLaunchDir()):
464 return [abspath(joinpath(root
, expanduser(str(p
))))
467 # Each target must have 'build' in the interior of the path; the
468 # directory below this will determine the build parameters. For
469 # example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
470 # recognize that ALPHA_SE specifies the configuration because it
471 # follow 'build' in the build path.
473 # The funky assignment to "[:]" is needed to replace the list contents
474 # in place rather than reassign the symbol to a new list, which
475 # doesn't work (obviously!).
476 BUILD_TARGETS
[:] = makePathListAbsolute(BUILD_TARGETS
)
478 # Generate a list of the unique build roots and configs that the
479 # collected targets reference.
482 for t
in BUILD_TARGETS
:
483 path_dirs
= t
.split('/')
485 build_top
= rfind(path_dirs
, 'build', -2)
487 print "Error: no non-leaf 'build' dir found on target path", t
489 this_build_root
= joinpath('/',*path_dirs
[:build_top
+1])
491 build_root
= this_build_root
493 if this_build_root
!= build_root
:
494 print "Error: build targets not under same build root\n"\
495 " %s\n %s" % (build_root
, this_build_root
)
497 variant_path
= joinpath('/',*path_dirs
[:build_top
+2])
498 if variant_path
not in variant_paths
:
499 variant_paths
.append(variant_path
)
501 # Make sure build_root exists (might not if this is the first build there)
502 if not isdir(build_root
):
504 main
['BUILDROOT'] = build_root
508 main
.SConsignFile(joinpath(build_root
, "sconsign"))
510 # Default duplicate option is to use hard links, but this messes up
511 # when you use emacs to edit a file in the target dir, as emacs moves
512 # file to file~ then copies to file, breaking the link. Symbolic
513 # (soft) links work better.
514 main
.SetOption('duplicate', 'soft-copy')
517 # Set up global sticky variables... these are common to an entire build
518 # tree (not specific to a particular build like ALPHA_SE)
521 global_vars_file
= joinpath(build_root
, 'variables.global')
523 global_vars
= Variables(global_vars_file
, args
=ARGUMENTS
)
525 global_vars
.AddVariables(
526 ('CC', 'C compiler', environ
.get('CC', main
['CC'])),
527 ('CXX', 'C++ compiler', environ
.get('CXX', main
['CXX'])),
528 ('PROTOC', 'protoc tool', environ
.get('PROTOC', 'protoc')),
529 ('BATCH', 'Use batch pool for build and tests', False),
530 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
531 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
532 ('EXTRAS', 'Add extra directories to the compilation', '')
535 # Update main environment with values from ARGUMENTS & global_vars_file
536 global_vars
.Update(main
)
537 help_texts
["global_vars"] += global_vars
.GenerateHelpText(main
)
539 # Save sticky variable settings back to current variables file
540 global_vars
.Save(global_vars_file
, main
)
542 # Parse EXTRAS variable to build list of all directories where we're
543 # look for sources etc. This list is exported as extras_dir_list.
544 base_dir
= main
.srcdir
.abspath
546 extras_dir_list
= makePathListAbsolute(main
['EXTRAS'].split(':'))
551 Export('extras_dir_list')
553 # the ext directory should be on the #includes path
554 main
.Append(CPPPATH
=[Dir('ext')])
556 # Add shared top-level headers
557 main
.Prepend(CPPPATH
=Dir('include'))
559 def strip_build_path(path
, env
):
561 variant_base
= env
['BUILDROOT'] + os
.path
.sep
562 if path
.startswith(variant_base
):
563 path
= path
[len(variant_base
):]
564 elif path
.startswith('build/'):
568 # Generate a string of the form:
569 # common/path/prefix/src1, src2 -> tgt1, tgt2
570 # to print while building.
571 class Transform(object):
572 # all specific color settings should be here and nowhere else
573 tool_color
= termcap
.Normal
574 pfx_color
= termcap
.Yellow
575 srcs_color
= termcap
.Yellow
+ termcap
.Bold
576 arrow_color
= termcap
.Blue
+ termcap
.Bold
577 tgts_color
= termcap
.Yellow
+ termcap
.Bold
579 def __init__(self
, tool
, max_sources
=99):
580 self
.format
= self
.tool_color
+ (" [%8s] " % tool
) \
581 + self
.pfx_color
+ "%s" \
582 + self
.srcs_color
+ "%s" \
583 + self
.arrow_color
+ " -> " \
584 + self
.tgts_color
+ "%s" \
586 self
.max_sources
= max_sources
588 def __call__(self
, target
, source
, env
, for_signature
=None):
589 # truncate source list according to max_sources param
590 source
= source
[0:self
.max_sources
]
592 return strip_build_path(str(f
), env
)
594 srcs
= map(strip
, source
)
597 tgts
= map(strip
, target
)
598 # surprisingly, os.path.commonprefix is a dumb char-by-char string
599 # operation that has nothing to do with paths.
600 com_pfx
= os
.path
.commonprefix(srcs
+ tgts
)
601 com_pfx_len
= len(com_pfx
)
603 # do some cleanup and sanity checking on common prefix
604 if com_pfx
[-1] == ".":
605 # prefix matches all but file extension: ok
606 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
607 com_pfx
= com_pfx
[0:-1]
608 elif com_pfx
[-1] == "/":
609 # common prefix is directory path: OK
612 src0_len
= len(srcs
[0])
613 tgt0_len
= len(tgts
[0])
614 if src0_len
== com_pfx_len
:
615 # source is a substring of target, OK
617 elif tgt0_len
== com_pfx_len
:
618 # target is a substring of source, need to back up to
619 # avoid empty string on RHS of arrow
620 sep_idx
= com_pfx
.rfind(".")
622 com_pfx
= com_pfx
[0:sep_idx
]
625 elif src0_len
> com_pfx_len
and srcs
[0][com_pfx_len
] == ".":
626 # still splitting at file extension: ok
629 # probably a fluke; ignore it
631 # recalculate length in case com_pfx was modified
632 com_pfx_len
= len(com_pfx
)
634 f
= map(lambda s
: s
[com_pfx_len
:], files
)
636 return self
.format
% (com_pfx
, fmt(srcs
), fmt(tgts
))
640 # enable the regression script to use the termcap
641 main
['TERMCAP'] = termcap
643 if GetOption('verbose'):
644 def MakeAction(action
, string
, *args
, **kwargs
):
645 return Action(action
, *args
, **kwargs
)
648 main
['CCCOMSTR'] = Transform("CC")
649 main
['CXXCOMSTR'] = Transform("CXX")
650 main
['ASCOMSTR'] = Transform("AS")
651 main
['ARCOMSTR'] = Transform("AR", 0)
652 main
['LINKCOMSTR'] = Transform("LINK", 0)
653 main
['SHLINKCOMSTR'] = Transform("SHLINK", 0)
654 main
['RANLIBCOMSTR'] = Transform("RANLIB", 0)
655 main
['M4COMSTR'] = Transform("M4")
656 main
['SHCCCOMSTR'] = Transform("SHCC")
657 main
['SHCXXCOMSTR'] = Transform("SHCXX")
660 # Initialize the Link-Time Optimization (LTO) flags
661 main
['LTO_CCFLAGS'] = []
662 main
['LTO_LDFLAGS'] = []
664 # According to the readme, tcmalloc works best if the compiler doesn't
665 # assume that we're using the builtin malloc and friends. These flags
666 # are compiler-specific, so we need to set them after we detect which
667 # compiler we're using.
668 main
['TCMALLOC_CCFLAGS'] = []
670 CXX_version
= readCommand([main
['CXX'],'--version'], exception
=False)
671 CXX_V
= readCommand([main
['CXX'],'-V'], exception
=False)
673 main
['GCC'] = CXX_version
and CXX_version
.find('g++') >= 0
674 main
['CLANG'] = CXX_version
and CXX_version
.find('clang') >= 0
675 if main
['GCC'] + main
['CLANG'] > 1:
676 print 'Error: How can we have two at the same time?'
679 # Set up default C++ compiler flags
680 if main
['GCC'] or main
['CLANG']:
681 # As gcc and clang share many flags, do the common parts here
682 main
.Append(CCFLAGS
=['-pipe'])
683 main
.Append(CCFLAGS
=['-fno-strict-aliasing'])
684 # Enable -Wall and -Wextra and then disable the few warnings that
685 # we consistently violate
686 main
.Append(CCFLAGS
=['-Wall', '-Wundef', '-Wextra',
687 '-Wno-sign-compare', '-Wno-unused-parameter'])
688 # We always compile using C++11
689 main
.Append(CXXFLAGS
=['-std=c++11'])
690 if sys
.platform
.startswith('freebsd'):
691 main
.Append(CCFLAGS
=['-I/usr/local/include'])
692 main
.Append(CXXFLAGS
=['-I/usr/local/include'])
694 main
['FILTER_PSHLINKFLAGS'] = lambda x
: str(x
).replace(' -shared', '')
695 main
['PSHLINKFLAGS'] = main
.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}')
696 main
['PLINKFLAGS'] = main
.subst('${LINKFLAGS}')
697 shared_partial_flags
= ['-r', '-nostdlib']
698 main
.Append(PSHLINKFLAGS
=shared_partial_flags
)
699 main
.Append(PLINKFLAGS
=shared_partial_flags
)
701 print termcap
.Yellow
+ termcap
.Bold
+ 'Error' + termcap
.Normal
,
702 print "Don't know what compiler options to use for your compiler."
703 print termcap
.Yellow
+ ' compiler:' + termcap
.Normal
, main
['CXX']
704 print termcap
.Yellow
+ ' version:' + termcap
.Normal
,
706 print termcap
.Yellow
+ termcap
.Bold
+ "COMMAND NOT FOUND!" +\
709 print CXX_version
.replace('\n', '<nl>')
710 print " If you're trying to use a compiler other than GCC"
711 print " or clang, there appears to be something wrong with your"
712 print " environment."
714 print " If you are trying to use a compiler other than those listed"
715 print " above you will need to ease fix SConstruct and "
716 print " src/SConscript to support that compiler."
720 # Check for a supported version of gcc. >= 4.8 is chosen for its
721 # level of c++11 support. See
722 # http://gcc.gnu.org/projects/cxx0x.html for details.
723 gcc_version
= readCommand([main
['CXX'], '-dumpversion'], exception
=False)
724 if compareVersions(gcc_version
, "4.8") < 0:
725 print 'Error: gcc version 4.8 or newer required.'
726 print ' Installed version:', gcc_version
729 main
['GCC_VERSION'] = gcc_version
731 if compareVersions(gcc_version
, '4.9') >= 0:
732 # Incremental linking with LTO is currently broken in gcc versions
733 # 4.9 and above. A version where everything works completely hasn't
734 # yet been identified.
736 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67548
737 main
['BROKEN_INCREMENTAL_LTO'] = True
738 if compareVersions(gcc_version
, '6.0') >= 0:
739 # gcc versions 6.0 and greater accept an -flinker-output flag which
740 # selects what type of output the linker should generate. This is
741 # necessary for incremental lto to work, but is also broken in
742 # current versions of gcc. It may not be necessary in future
743 # versions. We add it here since it might be, and as a reminder that
744 # it exists. It's excluded if lto is being forced.
746 # https://gcc.gnu.org/gcc-6/changes.html
747 # https://gcc.gnu.org/ml/gcc-patches/2015-11/msg03161.html
748 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=69866
749 if not GetOption('force_lto'):
750 main
.Append(PSHLINKFLAGS
='-flinker-output=rel')
751 main
.Append(PLINKFLAGS
='-flinker-output=rel')
753 # gcc from version 4.8 and above generates "rep; ret" instructions
754 # to avoid performance penalties on certain AMD chips. Older
755 # assemblers detect this as an error, "Error: expecting string
756 # instruction after `rep'"
757 as_version_raw
= readCommand([main
['AS'], '-v', '/dev/null',
759 exception
=False).split()
761 # version strings may contain extra distro-specific
762 # qualifiers, so play it safe and keep only what comes before
764 as_version
= as_version_raw
[-1].split('-')[0] if as_version_raw
else None
766 if not as_version
or compareVersions(as_version
, "2.23") < 0:
767 print termcap
.Yellow
+ termcap
.Bold
+ \
768 'Warning: This combination of gcc and binutils have' + \
769 ' known incompatibilities.\n' + \
770 ' If you encounter build problems, please update ' + \
771 'binutils to 2.23.' + \
774 # Make sure we warn if the user has requested to compile with the
775 # Undefined Benahvior Sanitizer and this version of gcc does not
777 if GetOption('with_ubsan') and \
778 compareVersions(gcc_version
, '4.9') < 0:
779 print termcap
.Yellow
+ termcap
.Bold
+ \
780 'Warning: UBSan is only supported using gcc 4.9 and later.' + \
783 disable_lto
= GetOption('no_lto')
784 if not disable_lto
and main
.get('BROKEN_INCREMENTAL_LTO', False) and \
785 not GetOption('force_lto'):
786 print termcap
.Yellow
+ termcap
.Bold
+ \
787 'Warning: Your compiler doesn\'t support incremental linking' + \
788 ' and lto at the same time, so lto is being disabled. To force' + \
789 ' lto on anyway, use the --force-lto option. That will disable' + \
790 ' partial linking.' + \
794 # Add the appropriate Link-Time Optimization (LTO) flags
795 # unless LTO is explicitly turned off. Note that these flags
796 # are only used by the fast target.
798 # Pass the LTO flag when compiling to produce GIMPLE
799 # output, we merely create the flags here and only append
801 main
['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
803 # Use the same amount of jobs for LTO as we are running
805 main
['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
807 main
.Append(TCMALLOC_CCFLAGS
=['-fno-builtin-malloc', '-fno-builtin-calloc',
808 '-fno-builtin-realloc', '-fno-builtin-free'])
810 # add option to check for undeclared overrides
811 if compareVersions(gcc_version
, "5.0") > 0:
812 main
.Append(CCFLAGS
=['-Wno-error=suggest-override'])
815 # Check for a supported version of clang, >= 3.1 is needed to
816 # support similar features as gcc 4.8. See
817 # http://clang.llvm.org/cxx_status.html for details
818 clang_version_re
= re
.compile(".* version (\d+\.\d+)")
819 clang_version_match
= clang_version_re
.search(CXX_version
)
820 if (clang_version_match
):
821 clang_version
= clang_version_match
.groups()[0]
822 if compareVersions(clang_version
, "3.1") < 0:
823 print 'Error: clang version 3.1 or newer required.'
824 print ' Installed version:', clang_version
827 print 'Error: Unable to determine clang version.'
830 # clang has a few additional warnings that we disable, extraneous
831 # parantheses are allowed due to Ruby's printing of the AST,
832 # finally self assignments are allowed as the generated CPU code
834 main
.Append(CCFLAGS
=['-Wno-parentheses',
836 # Some versions of libstdc++ (4.8?) seem to
837 # use struct hash and class hash
839 '-Wno-mismatched-tags',
842 main
.Append(TCMALLOC_CCFLAGS
=['-fno-builtin'])
844 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
845 # opposed to libstdc++, as the later is dated.
846 if sys
.platform
== "darwin":
847 main
.Append(CXXFLAGS
=['-stdlib=libc++'])
848 main
.Append(LIBS
=['c++'])
850 # On FreeBSD we need libthr.
851 if sys
.platform
.startswith('freebsd'):
852 main
.Append(LIBS
=['thr'])
855 print termcap
.Yellow
+ termcap
.Bold
+ 'Error' + termcap
.Normal
,
856 print "Don't know what compiler options to use for your compiler."
857 print termcap
.Yellow
+ ' compiler:' + termcap
.Normal
, main
['CXX']
858 print termcap
.Yellow
+ ' version:' + termcap
.Normal
,
860 print termcap
.Yellow
+ termcap
.Bold
+ "COMMAND NOT FOUND!" +\
863 print CXX_version
.replace('\n', '<nl>')
864 print " If you're trying to use a compiler other than GCC"
865 print " or clang, there appears to be something wrong with your"
866 print " environment."
868 print " If you are trying to use a compiler other than those listed"
869 print " above you will need to ease fix SConstruct and "
870 print " src/SConscript to support that compiler."
873 # Set up common yacc/bison flags (needed for Ruby)
874 main
['YACCFLAGS'] = '-d'
875 main
['YACCHXXFILESUFFIX'] = '.hh'
877 # Do this after we save setting back, or else we'll tack on an
878 # extra 'qdo' every time we run scons.
880 main
['CC'] = main
['BATCH_CMD'] + ' ' + main
['CC']
881 main
['CXX'] = main
['BATCH_CMD'] + ' ' + main
['CXX']
882 main
['AS'] = main
['BATCH_CMD'] + ' ' + main
['AS']
883 main
['AR'] = main
['BATCH_CMD'] + ' ' + main
['AR']
884 main
['RANLIB'] = main
['BATCH_CMD'] + ' ' + main
['RANLIB']
886 if sys
.platform
== 'cygwin':
887 # cygwin has some header file issues...
888 main
.Append(CCFLAGS
=["-Wno-uninitialized"])
890 # Check for the protobuf compiler
891 protoc_version
= readCommand([main
['PROTOC'], '--version'],
892 exception
='').split()
894 # First two words should be "libprotoc x.y.z"
895 if len(protoc_version
) < 2 or protoc_version
[0] != 'libprotoc':
896 print termcap
.Yellow
+ termcap
.Bold
+ \
897 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
898 ' Please install protobuf-compiler for tracing support.' + \
900 main
['PROTOC'] = False
902 # Based on the availability of the compress stream wrappers,
904 min_protoc_version
= '2.1.0'
905 if compareVersions(protoc_version
[1], min_protoc_version
) < 0:
906 print termcap
.Yellow
+ termcap
.Bold
+ \
907 'Warning: protoc version', min_protoc_version
, \
908 'or newer required.\n' + \
909 ' Installed version:', protoc_version
[1], \
911 main
['PROTOC'] = False
913 # Attempt to determine the appropriate include path and
914 # library path using pkg-config, that means we also need to
915 # check for pkg-config. Note that it is possible to use
916 # protobuf without the involvement of pkg-config. Later on we
917 # check go a library config check and at that point the test
918 # will fail if libprotobuf cannot be found.
919 if readCommand(['pkg-config', '--version'], exception
=''):
921 # Attempt to establish what linking flags to add for protobuf
923 main
.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
925 print termcap
.Yellow
+ termcap
.Bold
+ \
926 'Warning: pkg-config could not get protobuf flags.' + \
930 # Check for 'timeout' from GNU coreutils. If present, regressions will
931 # be run with a time limit. We require version 8.13 since we rely on
932 # support for the '--foreground' option.
933 if sys
.platform
.startswith('freebsd'):
934 timeout_lines
= readCommand(['gtimeout', '--version'],
935 exception
='').splitlines()
937 timeout_lines
= readCommand(['timeout', '--version'],
938 exception
='').splitlines()
939 # Get the first line and tokenize it
940 timeout_version
= timeout_lines
[0].split() if timeout_lines
else []
941 main
['TIMEOUT'] = timeout_version
and \
942 compareVersions(timeout_version
[-1], '8.13') >= 0
944 # Add a custom Check function to test for structure members.
945 def CheckMember(context
, include
, decl
, member
, include_quotes
="<>"):
946 context
.Message("Checking for member %s in %s..." %
952 (void)test.%(member)s;
955 """ % { "header" : include_quotes
[0] + include
+ include_quotes
[1],
960 ret
= context
.TryCompile(text
, extension
=".cc")
964 # Platform-specific configuration. Note again that we assume that all
965 # builds under a given build root run on the same host platform.
966 conf
= Configure(main
,
967 conf_dir
= joinpath(build_root
, '.scons_config'),
968 log_file
= joinpath(build_root
, 'scons_config.log'),
970 'CheckMember' : CheckMember
,
973 # Check if we should compile a 64 bit binary on Mac OS X/Darwin
976 uname
= platform
.uname()
977 if uname
[0] == 'Darwin' and compareVersions(uname
[2], '9.0.0') >= 0:
978 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
979 main
.Append(CCFLAGS
=['-arch', 'x86_64'])
980 main
.Append(CFLAGS
=['-arch', 'x86_64'])
981 main
.Append(LINKFLAGS
=['-arch', 'x86_64'])
982 main
.Append(ASFLAGS
=['-arch', 'x86_64'])
986 # Recent versions of scons substitute a "Null" object for Configure()
987 # when configuration isn't necessary, e.g., if the "--help" option is
988 # present. Unfortuantely this Null object always returns false,
989 # breaking all our configuration checks. We replace it with our own
990 # more optimistic null object that returns True instead.
992 def NullCheck(*args
, **kwargs
):
996 def __init__(self
, env
):
1000 def __getattr__(self
, mname
):
1003 conf
= NullConf(main
)
1005 # Cache build files in the supplied directory.
1006 if main
['M5_BUILD_CACHE']:
1007 print 'Using build cache located at', main
['M5_BUILD_CACHE']
1008 CacheDir(main
['M5_BUILD_CACHE'])
1010 main
['USE_PYTHON'] = not GetOption('without_python')
1011 if main
['USE_PYTHON']:
1012 # Find Python include and library directories for embedding the
1013 # interpreter. We rely on python-config to resolve the appropriate
1014 # includes and linker flags. ParseConfig does not seem to understand
1015 # the more exotic linker flags such as -Xlinker and -export-dynamic so
1016 # we add them explicitly below. If you want to link in an alternate
1017 # version of python, see above for instructions on how to invoke
1018 # scons with the appropriate PATH set.
1020 # First we check if python2-config exists, else we use python-config
1021 python_config
= readCommand(['which', 'python2-config'],
1022 exception
='').strip()
1023 if not os
.path
.exists(python_config
):
1024 python_config
= readCommand(['which', 'python-config'],
1025 exception
='').strip()
1026 py_includes
= readCommand([python_config
, '--includes'],
1027 exception
='').split()
1028 # Strip the -I from the include folders before adding them to the
1030 main
.Append(CPPPATH
=map(lambda inc
: inc
[2:], py_includes
))
1032 # Read the linker flags and split them into libraries and other link
1033 # flags. The libraries are added later through the call the CheckLib.
1034 py_ld_flags
= readCommand([python_config
, '--ldflags'],
1035 exception
='').split()
1037 for lib
in py_ld_flags
:
1038 if not lib
.startswith('-l'):
1039 main
.Append(LINKFLAGS
=[lib
])
1042 if lib
not in py_libs
:
1045 # verify that this stuff works
1046 if not conf
.CheckHeader('Python.h', '<>'):
1047 print "Error: can't find Python.h header in", py_includes
1048 print "Install Python headers (package python-dev on Ubuntu and RedHat)"
1052 if not conf
.CheckLib(lib
):
1053 print "Error: can't find library %s required by python" % lib
1056 # On Solaris you need to use libsocket for socket ops
1057 if not conf
.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1058 if not conf
.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1059 print "Can't find library with socket calls (e.g. accept())"
1062 # Check for zlib. If the check passes, libz will be automatically
1063 # added to the LIBS environment variable.
1064 if not conf
.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
1065 print 'Error: did not find needed zlib compression library '\
1066 'and/or zlib.h header file.'
1067 print ' Please install zlib and try again.'
1070 # If we have the protobuf compiler, also make sure we have the
1071 # development libraries. If the check passes, libprotobuf will be
1072 # automatically added to the LIBS environment variable. After
1073 # this, we can use the HAVE_PROTOBUF flag to determine if we have
1074 # got both protoc and libprotobuf available.
1075 main
['HAVE_PROTOBUF'] = main
['PROTOC'] and \
1076 conf
.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
1077 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
1079 # If we have the compiler but not the library, print another warning.
1080 if main
['PROTOC'] and not main
['HAVE_PROTOBUF']:
1081 print termcap
.Yellow
+ termcap
.Bold
+ \
1082 'Warning: did not find protocol buffer library and/or headers.\n' + \
1083 ' Please install libprotobuf-dev for tracing support.' + \
1087 have_posix_clock
= \
1088 conf
.CheckLibWithHeader(None, 'time.h', 'C',
1089 'clock_nanosleep(0,0,NULL,NULL);') or \
1090 conf
.CheckLibWithHeader('rt', 'time.h', 'C',
1091 'clock_nanosleep(0,0,NULL,NULL);')
1093 have_posix_timers
= \
1094 conf
.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
1095 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1097 if not GetOption('without_tcmalloc'):
1098 if conf
.CheckLib('tcmalloc'):
1099 main
.Append(CCFLAGS
=main
['TCMALLOC_CCFLAGS'])
1100 elif conf
.CheckLib('tcmalloc_minimal'):
1101 main
.Append(CCFLAGS
=main
['TCMALLOC_CCFLAGS'])
1103 print termcap
.Yellow
+ termcap
.Bold
+ \
1104 "You can get a 12% performance improvement by "\
1105 "installing tcmalloc (libgoogle-perftools-dev package "\
1106 "on Ubuntu or RedHat)." + termcap
.Normal
1109 # Detect back trace implementations. The last implementation in the
1110 # list will be used by default.
1111 backtrace_impls
= [ "none" ]
1113 if conf
.CheckLibWithHeader(None, 'execinfo.h', 'C',
1114 'backtrace_symbols_fd((void*)0, 0, 0);'):
1115 backtrace_impls
.append("glibc")
1116 elif conf
.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
1117 'backtrace_symbols_fd((void*)0, 0, 0);'):
1118 # NetBSD and FreeBSD need libexecinfo.
1119 backtrace_impls
.append("glibc")
1120 main
.Append(LIBS
=['execinfo'])
1122 if backtrace_impls
[-1] == "none":
1123 default_backtrace_impl
= "none"
1124 print termcap
.Yellow
+ termcap
.Bold
+ \
1125 "No suitable back trace implementation found." + \
1128 if not have_posix_clock
:
1129 print "Can't find library for POSIX clocks."
1131 # Check for <fenv.h> (C99 FP environment control)
1132 have_fenv
= conf
.CheckHeader('fenv.h', '<>')
1134 print "Warning: Header file <fenv.h> not found."
1135 print " This host has no IEEE FP rounding mode control."
1137 # Check if we should enable KVM-based hardware virtualization. The API
1138 # we rely on exists since version 2.6.36 of the kernel, but somehow
1139 # the KVM_API_VERSION does not reflect the change. We test for one of
1140 # the types as a fall back.
1141 have_kvm
= conf
.CheckHeader('linux/kvm.h', '<>')
1143 print "Info: Compatible header file <linux/kvm.h> not found, " \
1144 "disabling KVM support."
1146 # Check if the TUN/TAP driver is available.
1147 have_tuntap
= conf
.CheckHeader('linux/if_tun.h', '<>')
1149 print "Info: Compatible header file <linux/if_tun.h> not found."
1151 # x86 needs support for xsave. We test for the structure here since we
1152 # won't be able to run new tests by the time we know which ISA we're
1154 have_kvm_xsave
= conf
.CheckTypeSize('struct kvm_xsave',
1155 '#include <linux/kvm.h>') != 0
1157 # Check if the requested target ISA is compatible with the host
1158 def is_isa_kvm_compatible(isa
):
1161 host_isa
= platform
.machine()
1163 print "Warning: Failed to determine host ISA."
1166 if not have_posix_timers
:
1167 print "Warning: Can not enable KVM, host seems to lack support " \
1172 return host_isa
in ( "armv7l", "aarch64" )
1174 if host_isa
!= "x86_64":
1177 if not have_kvm_xsave
:
1178 print "KVM on x86 requires xsave support in kernel headers."
1186 # Check if the exclude_host attribute is available. We want this to
1187 # get accurate instruction counts in KVM.
1188 main
['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf
.CheckMember(
1189 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1192 ######################################################################
1194 # Finish the configuration
1196 main
= conf
.Finish()
1198 ######################################################################
1200 # Collect all non-global variables
1203 # Define the universe of supported ISAs
1205 all_gpu_isa_list
= [ ]
1206 Export('all_isa_list')
1207 Export('all_gpu_isa_list')
1209 class CpuModel(object):
1210 '''The CpuModel class encapsulates everything the ISA parser needs to
1211 know about a particular CPU model.'''
1213 # Dict of available CPU model objects. Accessible as CpuModel.dict.
1216 # Constructor. Automatically adds models to CpuModel.dict.
1217 def __init__(self
, name
, default
=False):
1218 self
.name
= name
# name of model
1220 # This cpu is enabled by default
1221 self
.default
= default
1224 if name
in CpuModel
.dict:
1225 raise AttributeError, "CpuModel '%s' already registered" % name
1226 CpuModel
.dict[name
] = self
1230 # Sticky variables get saved in the variables file so they persist from
1231 # one invocation to the next (unless overridden, in which case the new
1232 # value becomes sticky).
1233 sticky_vars
= Variables(args
=ARGUMENTS
)
1234 Export('sticky_vars')
1236 # Sticky variables that should be exported
1238 Export('export_vars')
1242 Export('all_protocols')
1244 Export('protocol_dirs')
1246 Export('slicc_includes')
1248 # Walk the tree and execute all SConsopts scripts that wil add to the
1250 if GetOption('verbose'):
1251 print "Reading SConsopts"
1252 for bdir
in [ base_dir
] + extras_dir_list
:
1254 print "Error: directory '%s' does not exist" % bdir
1256 for root
, dirs
, files
in os
.walk(bdir
):
1257 if 'SConsopts' in files
:
1258 if GetOption('verbose'):
1259 print "Reading", joinpath(root
, 'SConsopts')
1260 SConscript(joinpath(root
, 'SConsopts'))
1263 all_gpu_isa_list
.sort()
1265 sticky_vars
.AddVariables(
1266 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list
),
1267 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list
),
1268 ListVariable('CPU_MODELS', 'CPU models',
1269 sorted(n
for n
,m
in CpuModel
.dict.iteritems() if m
.default
),
1270 sorted(CpuModel
.dict.keys())),
1271 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1273 BoolVariable('SS_COMPATIBLE_FP',
1274 'Make floating-point results compatible with SimpleScalar',
1276 BoolVariable('USE_SSE2',
1277 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1279 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock
),
1280 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv
),
1281 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1282 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm
),
1283 BoolVariable('USE_TUNTAP',
1284 'Enable using a tap device to bridge to the host network',
1286 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1287 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1289 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1290 backtrace_impls
[-1], backtrace_impls
)
1293 # These variables get exported to #defines in config/*.hh (see src/SConscript).
1294 export_vars
+= ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1295 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'USE_TUNTAP',
1296 'PROTOCOL', 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1298 ###################################################
1300 # Define a SCons builder for configuration flag headers.
1302 ###################################################
1304 # This function generates a config header file that #defines the
1305 # variable symbol to the current variable setting (0 or 1). The source
1306 # operands are the name of the variable and a Value node containing the
1307 # value of the variable.
1308 def build_config_file(target
, source
, env
):
1309 (variable
, value
) = [s
.get_contents() for s
in source
]
1310 f
= file(str(target
[0]), 'w')
1311 print >> f
, '#define', variable
, value
1315 # Combine the two functions into a scons Action object.
1316 config_action
= MakeAction(build_config_file
, Transform("CONFIG H", 2))
1318 # The emitter munges the source & target node lists to reflect what
1319 # we're really doing.
1320 def config_emitter(target
, source
, env
):
1321 # extract variable name from Builder arg
1322 variable
= str(target
[0])
1323 # True target is config header file
1324 target
= joinpath('config', variable
.lower() + '.hh')
1326 if isinstance(val
, bool):
1327 # Force value to 0/1
1329 elif isinstance(val
, str):
1330 val
= '"' + val
+ '"'
1332 # Sources are variable name & value (packaged in SCons Value nodes)
1333 return ([target
], [Value(variable
), Value(val
)])
1335 config_builder
= Builder(emitter
= config_emitter
, action
= config_action
)
1337 main
.Append(BUILDERS
= { 'ConfigFile' : config_builder
})
1339 ###################################################
1341 # Builders for static and shared partially linked object files.
1343 ###################################################
1345 partial_static_builder
= Builder(action
=SCons
.Defaults
.LinkAction
,
1346 src_suffix
='$OBJSUFFIX',
1347 src_builder
=['StaticObject', 'Object'],
1348 LINKFLAGS
='$PLINKFLAGS',
1351 def partial_shared_emitter(target
, source
, env
):
1353 tgt
.attributes
.shared
= 1
1354 return (target
, source
)
1355 partial_shared_builder
= Builder(action
=SCons
.Defaults
.ShLinkAction
,
1356 emitter
=partial_shared_emitter
,
1357 src_suffix
='$SHOBJSUFFIX',
1358 src_builder
='SharedObject',
1359 SHLINKFLAGS
='$PSHLINKFLAGS',
1362 main
.Append(BUILDERS
= { 'PartialShared' : partial_shared_builder
,
1363 'PartialStatic' : partial_static_builder
})
1365 # builds in ext are shared across all configs in the build root.
1366 ext_dir
= abspath(joinpath(str(main
.root
), 'ext'))
1368 for root
, dirs
, files
in os
.walk(ext_dir
):
1369 if 'SConscript' in files
:
1370 build_dir
= os
.path
.relpath(root
, ext_dir
)
1371 ext_build_dirs
.append(build_dir
)
1372 main
.SConscript(joinpath(root
, 'SConscript'),
1373 variant_dir
=joinpath(build_root
, build_dir
))
1375 main
.Prepend(CPPPATH
=Dir('ext/pybind11/include/'))
1377 ###################################################
1379 # This builder and wrapper method are used to set up a directory with
1380 # switching headers. Those are headers which are in a generic location and
1381 # that include more specific headers from a directory chosen at build time
1382 # based on the current build settings.
1384 ###################################################
1386 def build_switching_header(target
, source
, env
):
1387 path
= str(target
[0])
1388 subdir
= str(source
[0])
1389 dp
, fp
= os
.path
.split(path
)
1390 dp
= os
.path
.relpath(os
.path
.realpath(dp
),
1391 os
.path
.realpath(env
['BUILDDIR']))
1392 with
open(path
, 'w') as hdr
:
1393 print >>hdr
, '#include "%s/%s/%s"' % (dp
, subdir
, fp
)
1395 switching_header_action
= MakeAction(build_switching_header
,
1396 Transform('GENERATE'))
1398 switching_header_builder
= Builder(action
=switching_header_action
,
1399 source_factory
=Value
,
1402 main
.Append(BUILDERS
= { 'SwitchingHeader': switching_header_builder
})
1404 def switching_headers(self
, headers
, source
):
1405 for header
in headers
:
1406 self
.SwitchingHeader(header
, source
)
1408 main
.AddMethod(switching_headers
, 'SwitchingHeaders')
1410 # all-isas -> all-deps -> all-environs -> all_targets
1411 main
.Alias('#all-isas', [])
1412 main
.Alias('#all-deps', '#all-isas')
1414 # Dummy target to ensure all environments are created before telling
1415 # SCons what to actually make (the command line arguments). We attach
1416 # them to the dependence graph after the environments are complete.
1417 ORIG_BUILD_TARGETS
= list(BUILD_TARGETS
) # force a copy; gets closure to work.
1418 def environsComplete(target
, source
, env
):
1419 for t
in ORIG_BUILD_TARGETS
:
1420 main
.Depends('#all-targets', t
)
1422 # Each build/* switching_dir attaches its *-environs target to #all-environs.
1423 main
.Append(BUILDERS
= {'CompleteEnvirons' :
1424 Builder(action
=MakeAction(environsComplete
, None))})
1425 main
.CompleteEnvirons('#all-environs', [])
1427 def doNothing(**ignored
): pass
1428 main
.Append(BUILDERS
= {'Dummy': Builder(action
=MakeAction(doNothing
, None))})
1430 # The final target to which all the original targets ultimately get attached.
1431 main
.Dummy('#all-targets', '#all-environs')
1432 BUILD_TARGETS
[:] = ['#all-targets']
1434 ###################################################
1436 # Define build environments for selected configurations.
1438 ###################################################
1440 def variant_name(path
):
1441 return os
.path
.basename(path
).lower().replace('_', '-')
1442 main
['variant_name'] = variant_name
1443 main
['VARIANT_NAME'] = '${variant_name(BUILDDIR)}'
1445 for variant_path
in variant_paths
:
1446 if not GetOption('silent'):
1447 print "Building in", variant_path
1449 # Make a copy of the build-root environment to use for this config.
1451 env
['BUILDDIR'] = variant_path
1453 # variant_dir is the tail component of build path, and is used to
1454 # determine the build parameters (e.g., 'ALPHA_SE')
1455 (build_root
, variant_dir
) = splitpath(variant_path
)
1457 # Set env variables according to the build directory config.
1458 sticky_vars
.files
= []
1459 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1460 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1461 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1462 current_vars_file
= joinpath(build_root
, 'variables', variant_dir
)
1463 if isfile(current_vars_file
):
1464 sticky_vars
.files
.append(current_vars_file
)
1465 if not GetOption('silent'):
1466 print "Using saved variables file %s" % current_vars_file
1467 elif variant_dir
in ext_build_dirs
:
1468 # Things in ext are built without a variant directory.
1471 # Build dir-specific variables file doesn't exist.
1473 # Make sure the directory is there so we can create it later
1474 opt_dir
= dirname(current_vars_file
)
1475 if not isdir(opt_dir
):
1478 # Get default build variables from source tree. Variables are
1479 # normally determined by name of $VARIANT_DIR, but can be
1480 # overridden by '--default=' arg on command line.
1481 default
= GetOption('default')
1482 opts_dir
= joinpath(main
.root
.abspath
, 'build_opts')
1484 default_vars_files
= [joinpath(build_root
, 'variables', default
),
1485 joinpath(opts_dir
, default
)]
1487 default_vars_files
= [joinpath(opts_dir
, variant_dir
)]
1488 existing_files
= filter(isfile
, default_vars_files
)
1490 default_vars_file
= existing_files
[0]
1491 sticky_vars
.files
.append(default_vars_file
)
1492 print "Variables file %s not found,\n using defaults in %s" \
1493 % (current_vars_file
, default_vars_file
)
1495 print "Error: cannot find variables file %s or " \
1496 "default file(s) %s" \
1497 % (current_vars_file
, ' or '.join(default_vars_files
))
1500 # Apply current variable settings to env
1501 sticky_vars
.Update(env
)
1503 help_texts
["local_vars"] += \
1504 "Build variables for %s:\n" % variant_dir \
1505 + sticky_vars
.GenerateHelpText(env
)
1507 # Process variable settings.
1509 if not have_fenv
and env
['USE_FENV']:
1510 print "Warning: <fenv.h> not available; " \
1511 "forcing USE_FENV to False in", variant_dir
+ "."
1512 env
['USE_FENV'] = False
1514 if not env
['USE_FENV']:
1515 print "Warning: No IEEE FP rounding mode control in", variant_dir
+ "."
1516 print " FP results may deviate slightly from other platforms."
1519 env
.Append(LIBS
=['efence'])
1523 print "Warning: Can not enable KVM, host seems to lack KVM support"
1524 env
['USE_KVM'] = False
1525 elif not is_isa_kvm_compatible(env
['TARGET_ISA']):
1526 print "Info: KVM support disabled due to unsupported host and " \
1527 "target ISA combination"
1528 env
['USE_KVM'] = False
1530 if env
['USE_TUNTAP']:
1532 print "Warning: Can't connect EtherTap with a tap device."
1533 env
['USE_TUNTAP'] = False
1535 if env
['BUILD_GPU']:
1536 env
.Append(CPPDEFINES
=['BUILD_GPU'])
1538 # Warn about missing optional functionality
1540 if not main
['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1541 print "Warning: perf_event headers lack support for the " \
1542 "exclude_host attribute. KVM instruction counts will " \
1545 # Save sticky variable settings back to current variables file
1546 sticky_vars
.Save(current_vars_file
, env
)
1549 env
.Append(CCFLAGS
=['-msse2'])
1551 # The src/SConscript file sets up the build rules in 'env' according
1552 # to the configured variables. It returns a list of environments,
1553 # one for each variant build (debug, opt, etc.)
1554 SConscript('src/SConscript', variant_dir
= variant_path
, exports
= 'env')
1556 def pairwise(iterable
):
1557 "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1558 a
, b
= itertools
.tee(iterable
)
1560 return itertools
.izip(a
, b
)
1562 variant_names
= [variant_name(path
) for path
in variant_paths
]
1564 # Create false dependencies so SCons will parse ISAs, establish
1565 # dependencies, and setup the build Environments serially. Either
1566 # SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1567 # greater than 1. It appears to be standard race condition stuff; it
1568 # doesn't always fail, but usually, and the behaviors are different.
1569 # Every time I tried to remove this, builds would fail in some
1570 # creative new way. So, don't do that. You'll want to, though, because
1571 # tests/SConscript takes a long time to make its Environments.
1572 for t1
, t2
in pairwise(sorted(variant_names
)):
1573 main
.Depends('#%s-deps' % t2
, '#%s-deps' % t1
)
1574 main
.Depends('#%s-environs' % t2
, '#%s-environs' % t1
)
1578 Usage: scons [scons options] [build variables] [target(s)]
1580 Extra scons options:
1583 Global build variables: