3 # Copyright (c) 2013, 2015, 2016 ARM Limited
6 # The license below extends only to copyright in the software and shall
7 # not be construed as granting a license to any other intellectual
8 # property including but not limited to intellectual property relating
9 # to a hardware implementation of the functionality of the software
10 # licensed hereunder. You may use the software subject to the license
11 # terms below provided that you ensure that this notice is replicated
12 # unmodified and in its entirety in all distributions of the software,
13 # modified or unmodified, in source code or in binary form.
15 # Copyright (c) 2011 Advanced Micro Devices, Inc.
16 # Copyright (c) 2009 The Hewlett-Packard Development Company
17 # Copyright (c) 2004-2005 The Regents of The University of Michigan
18 # All rights reserved.
20 # Redistribution and use in source and binary forms, with or without
21 # modification, are permitted provided that the following conditions are
22 # met: redistributions of source code must retain the above copyright
23 # notice, this list of conditions and the following disclaimer;
24 # redistributions in binary form must reproduce the above copyright
25 # notice, this list of conditions and the following disclaimer in the
26 # documentation and/or other materials provided with the distribution;
27 # neither the name of the copyright holders nor the names of its
28 # contributors may be used to endorse or promote products derived from
29 # this software without specific prior written permission.
31 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
43 # Authors: Steve Reinhardt
46 ###################################################
48 # SCons top-level build description (SConstruct) file.
50 # While in this directory ('gem5'), just type 'scons' to build the default
51 # configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52 # to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53 # the optimized full-system version).
55 # You can build gem5 in a different directory as long as there is a
56 # 'build/<CONFIG>' somewhere along the target path. The build system
57 # expects that all configs under the same build directory are being
58 # built for the same host system.
62 # The following two commands are equivalent. The '-u' option tells
63 # scons to search up the directory tree for this SConstruct file.
64 # % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65 # % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
67 # The following two commands are equivalent and demonstrate building
68 # in a directory outside of the source tree. The '-C' option tells
69 # scons to chdir to the specified directory to find this SConstruct
71 # % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72 # % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
74 # You can use 'scons -H' to print scons options. If you're in this
75 # 'gem5' directory (or use -u or -C to tell scons where to find this
76 # file), you can use 'scons -h' to print all the gem5-specific build
79 ###################################################
81 # Check for recent-enough Python and SCons versions.
83 # Really old versions of scons only take two options for the
84 # function, so check once without the revision and once with the
85 # revision, the first instance will fail for stuff other than
86 # 0.98, and the second will fail for 0.98.0
87 EnsureSConsVersion(0, 98)
88 EnsureSConsVersion(0, 98, 1)
91 For more details, see:
92 http://gem5.org/Dependencies
96 # We ensure the python version early because because python-config
99 EnsurePythonVersion(2, 5)
100 except SystemExit, e
:
102 You can use a non-default installation of the Python interpreter by
103 rearranging your PATH so that scons finds the non-default 'python' and
104 'python-config' first.
106 For more details, see:
107 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
111 # Global Python includes
119 from os
import mkdir
, environ
120 from os
.path
import abspath
, basename
, dirname
, expanduser
, normpath
121 from os
.path
import exists
, isdir
, isfile
122 from os
.path
import join
as joinpath
, split
as splitpath
128 extra_python_paths
= [
129 Dir('src/python').srcnode().abspath
, # gem5 includes
130 Dir('ext/ply').srcnode().abspath
, # ply is used by several files
133 sys
.path
[1:1] = extra_python_paths
135 from m5
.util
import compareVersions
, readCommand
136 from m5
.util
.terminal
import get_termcap
147 # There's a bug in scons in that (1) by default, the help texts from
148 # AddOption() are supposed to be displayed when you type 'scons -h'
149 # and (2) you can override the help displayed by 'scons -h' using the
150 # Help() function, but these two features are incompatible: once
151 # you've overridden the help text using Help(), there's no way to get
152 # at the help texts from AddOptions. See:
153 # http://scons.tigris.org/issues/show_bug.cgi?id=2356
154 # http://scons.tigris.org/issues/show_bug.cgi?id=2611
155 # This hack lets us extract the help text from AddOptions and
156 # re-inject it via Help(). Ideally someday this bug will be fixed and
157 # we can just use AddOption directly.
158 def AddLocalOption(*args
, **kwargs
):
161 help = " " + ", ".join(args
)
164 if length
>= col_width
:
165 help += "\n" + " " * col_width
167 help += " " * (col_width
- length
)
168 help += kwargs
["help"]
169 help_texts
["options"] += help + "\n"
171 AddOption(*args
, **kwargs
)
173 AddLocalOption('--colors', dest
='use_colors', action
='store_true',
174 help="Add color to abbreviated scons output")
175 AddLocalOption('--no-colors', dest
='use_colors', action
='store_false',
176 help="Don't add color to abbreviated scons output")
177 AddLocalOption('--with-cxx-config', dest
='with_cxx_config',
179 help="Build with support for C++-based configuration")
180 AddLocalOption('--default', dest
='default', type='string', action
='store',
181 help='Override which build_opts file to use for defaults')
182 AddLocalOption('--ignore-style', dest
='ignore_style', action
='store_true',
183 help='Disable style checking hooks')
184 AddLocalOption('--no-lto', dest
='no_lto', action
='store_true',
185 help='Disable Link-Time Optimization for fast')
186 AddLocalOption('--update-ref', dest
='update_ref', action
='store_true',
187 help='Update test reference outputs')
188 AddLocalOption('--verbose', dest
='verbose', action
='store_true',
189 help='Print full tool command lines')
190 AddLocalOption('--without-python', dest
='without_python',
192 help='Build without Python configuration support')
193 AddLocalOption('--without-tcmalloc', dest
='without_tcmalloc',
195 help='Disable linking against tcmalloc')
196 AddLocalOption('--with-ubsan', dest
='with_ubsan', action
='store_true',
197 help='Build with Undefined Behavior Sanitizer if available')
198 AddLocalOption('--with-asan', dest
='with_asan', action
='store_true',
199 help='Build with Address Sanitizer if available')
201 termcap
= get_termcap(GetOption('use_colors'))
203 ########################################################################
205 # Set up the main build environment.
207 ########################################################################
209 # export TERM so that clang reports errors in color
210 use_vars
= set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
211 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
212 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
215 "ASAN_", # address sanitizer symbolizer path and settings
216 "CCACHE_", # ccache (caching compiler wrapper) configuration
217 "CCC_", # clang static analyzer configuration
218 "DISTCC_", # distcc (distributed compiler wrapper) configuration
219 "INCLUDE_SERVER_", # distcc pump server settings
220 "M5", # M5 configuration (e.g., path to kernels)
224 for key
,val
in sorted(os
.environ
.iteritems()):
225 if key
in use_vars
or \
226 any([key
.startswith(prefix
) for prefix
in use_prefixes
]):
229 # Tell scons to avoid implicit command dependencies to avoid issues
230 # with the param wrappes being compiled twice (see
231 # http://scons.tigris.org/issues/show_bug.cgi?id=2811)
232 main
= Environment(ENV
=use_env
, IMPLICIT_COMMAND_DEPENDENCIES
=0)
233 main
.Decider('MD5-timestamp')
234 main
.root
= Dir(".") # The current directory (where this file lives).
235 main
.srcdir
= Dir("src") # The source directory
237 main_dict_keys
= main
.Dictionary().keys()
239 # Check that we have a C/C++ compiler
240 if not ('CC' in main_dict_keys
and 'CXX' in main_dict_keys
):
241 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
244 # Check that swig is present
245 if not 'SWIG' in main_dict_keys
:
246 print "swig is not installed (package swig on Ubuntu and RedHat)"
249 # add useful python code PYTHONPATH so it can be used by subprocesses
251 main
.AppendENVPath('PYTHONPATH', extra_python_paths
)
253 ########################################################################
257 # If the gem5 directory is a mercurial repository, we should do some
260 ########################################################################
262 hgdir
= main
.root
.Dir(".hg")
266 You're missing the gem5 style hook, which automatically checks your code
267 against the gem5 style rules on %s.
268 This script will now install the hook in your %s.
269 Press enter to continue, or ctrl-c to abort: """
271 mercurial_style_message
= style_message
% ("hg commit and qrefresh commands",
273 git_style_message
= style_message
% ("'git commit'",
274 ".git/hooks/ directory")
276 mercurial_style_upgrade_message
= """
277 Your Mercurial style hooks are not up-to-date. This script will now
278 try to automatically update them. A backup of your hgrc will be saved
280 Press enter to continue, or ctrl-c to abort: """
282 mercurial_style_hook
= """
283 # The following lines were automatically added by gem5/SConstruct
284 # to provide the gem5 style-checking hooks
286 hgstyle = %s/util/hgstyle.py
289 pretxncommit.style = python:hgstyle.check_style
290 pre-qrefresh.style = python:hgstyle.check_style
291 # End of SConstruct additions
293 """ % (main
.root
.abspath
)
295 mercurial_lib_not_found
= """
296 Mercurial libraries cannot be found, ignoring style hook. If
297 you are a gem5 developer, please fix this and run the style
298 hook. It is important.
301 # Check for style hook and prompt for installation if it's not there.
302 # Skip this if --ignore-style was specified, there's no interactive
303 # terminal to prompt, or no recognized revision control system can be
305 ignore_style
= GetOption('ignore_style') or not sys
.stdin
.isatty()
307 # Try wire up Mercurial to the style hooks
308 if not ignore_style
and hgdir
.exists():
310 style_hooks
= tuple()
311 hgrc
= hgdir
.File('hgrc')
312 hgrc_old
= hgdir
.File('hgrc.old')
314 from mercurial
import ui
316 ui
.readconfig(hgrc
.abspath
)
317 style_hooks
= (ui
.config('hooks', 'pretxncommit.style', None),
318 ui
.config('hooks', 'pre-qrefresh.style', None))
319 style_hook
= all(style_hooks
)
320 style_extension
= ui
.config('extensions', 'style', None)
322 print mercurial_lib_not_found
324 if "python:style.check_style" in style_hooks
:
325 # Try to upgrade the style hooks
326 print mercurial_style_upgrade_message
327 # continue unless user does ctrl-c/ctrl-d etc.
331 print "Input exception, exiting scons.\n"
333 shutil
.copyfile(hgrc
.abspath
, hgrc_old
.abspath
)
334 re_style_hook
= re
.compile(r
"^([^=#]+)\.style\s*=\s*([^#\s]+).*")
335 re_style_extension
= re
.compile("style\s*=\s*([^#\s]+).*")
336 old
, new
= open(hgrc_old
.abspath
, 'r'), open(hgrc
.abspath
, 'w')
338 m_hook
= re_style_hook
.match(l
)
339 m_ext
= re_style_extension
.match(l
)
341 hook
, check
= m_hook
.groups()
342 if check
!= "python:style.check_style":
343 print "Warning: %s.style is using a non-default " \
344 "checker: %s" % (hook
, check
)
345 if hook
not in ("pretxncommit", "pre-qrefresh"):
346 print "Warning: Updating unknown style hook: %s" % hook
348 l
= "%s.style = python:hgstyle.check_style\n" % hook
349 elif m_ext
and m_ext
.group(1) == style_extension
:
350 l
= "hgstyle = %s/util/hgstyle.py\n" % main
.root
.abspath
354 print mercurial_style_message
,
355 # continue unless user does ctrl-c/ctrl-d etc.
359 print "Input exception, exiting scons.\n"
361 hgrc_path
= '%s/.hg/hgrc' % main
.root
.abspath
362 print "Adding style hook to", hgrc_path
, "\n"
364 with
open(hgrc_path
, 'a') as f
:
365 f
.write(mercurial_style_hook
)
367 print "Error updating", hgrc_path
370 def install_git_style_hooks():
372 gitdir
= Dir(readCommand(
373 ["git", "rev-parse", "--git-dir"]).strip("\n"))
375 print "Warning: Failed to find git repo directory: %s" % e
378 git_hooks
= gitdir
.Dir("hooks")
379 git_pre_commit_hook
= git_hooks
.File("pre-commit")
380 git_style_script
= File("util/git-pre-commit.py")
382 if git_pre_commit_hook
.exists():
385 print git_style_message
,
389 print "Input exception, exiting scons.\n"
392 if not git_hooks
.exists():
393 mkdir(git_hooks
.get_abspath())
395 # Use a relative symlink if the hooks live in the source directory
396 if git_pre_commit_hook
.is_under(main
.root
):
397 script_path
= os
.path
.relpath(
398 git_style_script
.get_abspath(),
399 git_pre_commit_hook
.Dir(".").get_abspath())
401 script_path
= git_style_script
.get_abspath()
404 os
.symlink(script_path
, git_pre_commit_hook
.get_abspath())
406 print "Error updating git pre-commit hook"
409 # Try to wire up git to the style hooks
410 if not ignore_style
and main
.root
.Entry(".git").exists():
411 install_git_style_hooks()
413 ###################################################
415 # Figure out which configurations to set up based on the path(s) of
418 ###################################################
420 # Find default configuration & binary.
421 Default(environ
.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
423 # helper function: find last occurrence of element in list
424 def rfind(l
, elt
, offs
= -1):
425 for i
in range(len(l
)+offs
, 0, -1):
428 raise ValueError, "element not found"
430 # Take a list of paths (or SCons Nodes) and return a list with all
431 # paths made absolute and ~-expanded. Paths will be interpreted
432 # relative to the launch directory unless a different root is provided
433 def makePathListAbsolute(path_list
, root
=GetLaunchDir()):
434 return [abspath(joinpath(root
, expanduser(str(p
))))
437 # Each target must have 'build' in the interior of the path; the
438 # directory below this will determine the build parameters. For
439 # example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
440 # recognize that ALPHA_SE specifies the configuration because it
441 # follow 'build' in the build path.
443 # The funky assignment to "[:]" is needed to replace the list contents
444 # in place rather than reassign the symbol to a new list, which
445 # doesn't work (obviously!).
446 BUILD_TARGETS
[:] = makePathListAbsolute(BUILD_TARGETS
)
448 # Generate a list of the unique build roots and configs that the
449 # collected targets reference.
452 for t
in BUILD_TARGETS
:
453 path_dirs
= t
.split('/')
455 build_top
= rfind(path_dirs
, 'build', -2)
457 print "Error: no non-leaf 'build' dir found on target path", t
459 this_build_root
= joinpath('/',*path_dirs
[:build_top
+1])
461 build_root
= this_build_root
463 if this_build_root
!= build_root
:
464 print "Error: build targets not under same build root\n"\
465 " %s\n %s" % (build_root
, this_build_root
)
467 variant_path
= joinpath('/',*path_dirs
[:build_top
+2])
468 if variant_path
not in variant_paths
:
469 variant_paths
.append(variant_path
)
471 # Make sure build_root exists (might not if this is the first build there)
472 if not isdir(build_root
):
474 main
['BUILDROOT'] = build_root
478 main
.SConsignFile(joinpath(build_root
, "sconsign"))
480 # Default duplicate option is to use hard links, but this messes up
481 # when you use emacs to edit a file in the target dir, as emacs moves
482 # file to file~ then copies to file, breaking the link. Symbolic
483 # (soft) links work better.
484 main
.SetOption('duplicate', 'soft-copy')
487 # Set up global sticky variables... these are common to an entire build
488 # tree (not specific to a particular build like ALPHA_SE)
491 global_vars_file
= joinpath(build_root
, 'variables.global')
493 global_vars
= Variables(global_vars_file
, args
=ARGUMENTS
)
495 global_vars
.AddVariables(
496 ('CC', 'C compiler', environ
.get('CC', main
['CC'])),
497 ('CXX', 'C++ compiler', environ
.get('CXX', main
['CXX'])),
498 ('SWIG', 'SWIG tool', environ
.get('SWIG', main
['SWIG'])),
499 ('PROTOC', 'protoc tool', environ
.get('PROTOC', 'protoc')),
500 ('BATCH', 'Use batch pool for build and tests', False),
501 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
502 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
503 ('EXTRAS', 'Add extra directories to the compilation', '')
506 # Update main environment with values from ARGUMENTS & global_vars_file
507 global_vars
.Update(main
)
508 help_texts
["global_vars"] += global_vars
.GenerateHelpText(main
)
510 # Save sticky variable settings back to current variables file
511 global_vars
.Save(global_vars_file
, main
)
513 # Parse EXTRAS variable to build list of all directories where we're
514 # look for sources etc. This list is exported as extras_dir_list.
515 base_dir
= main
.srcdir
.abspath
517 extras_dir_list
= makePathListAbsolute(main
['EXTRAS'].split(':'))
522 Export('extras_dir_list')
524 # the ext directory should be on the #includes path
525 main
.Append(CPPPATH
=[Dir('ext')])
527 def strip_build_path(path
, env
):
529 variant_base
= env
['BUILDROOT'] + os
.path
.sep
530 if path
.startswith(variant_base
):
531 path
= path
[len(variant_base
):]
532 elif path
.startswith('build/'):
536 # Generate a string of the form:
537 # common/path/prefix/src1, src2 -> tgt1, tgt2
538 # to print while building.
539 class Transform(object):
540 # all specific color settings should be here and nowhere else
541 tool_color
= termcap
.Normal
542 pfx_color
= termcap
.Yellow
543 srcs_color
= termcap
.Yellow
+ termcap
.Bold
544 arrow_color
= termcap
.Blue
+ termcap
.Bold
545 tgts_color
= termcap
.Yellow
+ termcap
.Bold
547 def __init__(self
, tool
, max_sources
=99):
548 self
.format
= self
.tool_color
+ (" [%8s] " % tool
) \
549 + self
.pfx_color
+ "%s" \
550 + self
.srcs_color
+ "%s" \
551 + self
.arrow_color
+ " -> " \
552 + self
.tgts_color
+ "%s" \
554 self
.max_sources
= max_sources
556 def __call__(self
, target
, source
, env
, for_signature
=None):
557 # truncate source list according to max_sources param
558 source
= source
[0:self
.max_sources
]
560 return strip_build_path(str(f
), env
)
562 srcs
= map(strip
, source
)
565 tgts
= map(strip
, target
)
566 # surprisingly, os.path.commonprefix is a dumb char-by-char string
567 # operation that has nothing to do with paths.
568 com_pfx
= os
.path
.commonprefix(srcs
+ tgts
)
569 com_pfx_len
= len(com_pfx
)
571 # do some cleanup and sanity checking on common prefix
572 if com_pfx
[-1] == ".":
573 # prefix matches all but file extension: ok
574 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
575 com_pfx
= com_pfx
[0:-1]
576 elif com_pfx
[-1] == "/":
577 # common prefix is directory path: OK
580 src0_len
= len(srcs
[0])
581 tgt0_len
= len(tgts
[0])
582 if src0_len
== com_pfx_len
:
583 # source is a substring of target, OK
585 elif tgt0_len
== com_pfx_len
:
586 # target is a substring of source, need to back up to
587 # avoid empty string on RHS of arrow
588 sep_idx
= com_pfx
.rfind(".")
590 com_pfx
= com_pfx
[0:sep_idx
]
593 elif src0_len
> com_pfx_len
and srcs
[0][com_pfx_len
] == ".":
594 # still splitting at file extension: ok
597 # probably a fluke; ignore it
599 # recalculate length in case com_pfx was modified
600 com_pfx_len
= len(com_pfx
)
602 f
= map(lambda s
: s
[com_pfx_len
:], files
)
604 return self
.format
% (com_pfx
, fmt(srcs
), fmt(tgts
))
608 # enable the regression script to use the termcap
609 main
['TERMCAP'] = termcap
611 if GetOption('verbose'):
612 def MakeAction(action
, string
, *args
, **kwargs
):
613 return Action(action
, *args
, **kwargs
)
616 main
['CCCOMSTR'] = Transform("CC")
617 main
['CXXCOMSTR'] = Transform("CXX")
618 main
['ASCOMSTR'] = Transform("AS")
619 main
['SWIGCOMSTR'] = Transform("SWIG")
620 main
['ARCOMSTR'] = Transform("AR", 0)
621 main
['LINKCOMSTR'] = Transform("LINK", 0)
622 main
['RANLIBCOMSTR'] = Transform("RANLIB", 0)
623 main
['M4COMSTR'] = Transform("M4")
624 main
['SHCCCOMSTR'] = Transform("SHCC")
625 main
['SHCXXCOMSTR'] = Transform("SHCXX")
628 # Initialize the Link-Time Optimization (LTO) flags
629 main
['LTO_CCFLAGS'] = []
630 main
['LTO_LDFLAGS'] = []
632 # According to the readme, tcmalloc works best if the compiler doesn't
633 # assume that we're using the builtin malloc and friends. These flags
634 # are compiler-specific, so we need to set them after we detect which
635 # compiler we're using.
636 main
['TCMALLOC_CCFLAGS'] = []
638 CXX_version
= readCommand([main
['CXX'],'--version'], exception
=False)
639 CXX_V
= readCommand([main
['CXX'],'-V'], exception
=False)
641 main
['GCC'] = CXX_version
and CXX_version
.find('g++') >= 0
642 main
['CLANG'] = CXX_version
and CXX_version
.find('clang') >= 0
643 if main
['GCC'] + main
['CLANG'] > 1:
644 print 'Error: How can we have two at the same time?'
647 # Set up default C++ compiler flags
648 if main
['GCC'] or main
['CLANG']:
649 # As gcc and clang share many flags, do the common parts here
650 main
.Append(CCFLAGS
=['-pipe'])
651 main
.Append(CCFLAGS
=['-fno-strict-aliasing'])
652 # Enable -Wall and -Wextra and then disable the few warnings that
653 # we consistently violate
654 main
.Append(CCFLAGS
=['-Wall', '-Wundef', '-Wextra',
655 '-Wno-sign-compare', '-Wno-unused-parameter'])
656 # We always compile using C++11
657 main
.Append(CXXFLAGS
=['-std=c++11'])
658 if sys
.platform
.startswith('freebsd'):
659 main
.Append(CCFLAGS
=['-I/usr/local/include'])
660 main
.Append(CXXFLAGS
=['-I/usr/local/include'])
662 print termcap
.Yellow
+ termcap
.Bold
+ 'Error' + termcap
.Normal
,
663 print "Don't know what compiler options to use for your compiler."
664 print termcap
.Yellow
+ ' compiler:' + termcap
.Normal
, main
['CXX']
665 print termcap
.Yellow
+ ' version:' + termcap
.Normal
,
667 print termcap
.Yellow
+ termcap
.Bold
+ "COMMAND NOT FOUND!" +\
670 print CXX_version
.replace('\n', '<nl>')
671 print " If you're trying to use a compiler other than GCC"
672 print " or clang, there appears to be something wrong with your"
673 print " environment."
675 print " If you are trying to use a compiler other than those listed"
676 print " above you will need to ease fix SConstruct and "
677 print " src/SConscript to support that compiler."
681 # Check for a supported version of gcc. >= 4.8 is chosen for its
682 # level of c++11 support. See
683 # http://gcc.gnu.org/projects/cxx0x.html for details.
684 gcc_version
= readCommand([main
['CXX'], '-dumpversion'], exception
=False)
685 if compareVersions(gcc_version
, "4.8") < 0:
686 print 'Error: gcc version 4.8 or newer required.'
687 print ' Installed version:', gcc_version
690 main
['GCC_VERSION'] = gcc_version
692 # gcc from version 4.8 and above generates "rep; ret" instructions
693 # to avoid performance penalties on certain AMD chips. Older
694 # assemblers detect this as an error, "Error: expecting string
695 # instruction after `rep'"
696 as_version_raw
= readCommand([main
['AS'], '-v', '/dev/null'],
697 exception
=False).split()
699 # version strings may contain extra distro-specific
700 # qualifiers, so play it safe and keep only what comes before
702 as_version
= as_version_raw
[-1].split('-')[0] if as_version_raw
else None
704 if not as_version
or compareVersions(as_version
, "2.23") < 0:
705 print termcap
.Yellow
+ termcap
.Bold
+ \
706 'Warning: This combination of gcc and binutils have' + \
707 ' known incompatibilities.\n' + \
708 ' If you encounter build problems, please update ' + \
709 'binutils to 2.23.' + \
712 # Make sure we warn if the user has requested to compile with the
713 # Undefined Benahvior Sanitizer and this version of gcc does not
715 if GetOption('with_ubsan') and \
716 compareVersions(gcc_version
, '4.9') < 0:
717 print termcap
.Yellow
+ termcap
.Bold
+ \
718 'Warning: UBSan is only supported using gcc 4.9 and later.' + \
721 # Add the appropriate Link-Time Optimization (LTO) flags
722 # unless LTO is explicitly turned off. Note that these flags
723 # are only used by the fast target.
724 if not GetOption('no_lto'):
725 # Pass the LTO flag when compiling to produce GIMPLE
726 # output, we merely create the flags here and only append
728 main
['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
730 # Use the same amount of jobs for LTO as we are running
732 main
['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
734 main
.Append(TCMALLOC_CCFLAGS
=['-fno-builtin-malloc', '-fno-builtin-calloc',
735 '-fno-builtin-realloc', '-fno-builtin-free'])
737 # add option to check for undeclared overrides
738 if compareVersions(gcc_version
, "5.0") > 0:
739 main
.Append(CCFLAGS
=['-Wno-error=suggest-override'])
742 # Check for a supported version of clang, >= 3.1 is needed to
743 # support similar features as gcc 4.8. See
744 # http://clang.llvm.org/cxx_status.html for details
745 clang_version_re
= re
.compile(".* version (\d+\.\d+)")
746 clang_version_match
= clang_version_re
.search(CXX_version
)
747 if (clang_version_match
):
748 clang_version
= clang_version_match
.groups()[0]
749 if compareVersions(clang_version
, "3.1") < 0:
750 print 'Error: clang version 3.1 or newer required.'
751 print ' Installed version:', clang_version
754 print 'Error: Unable to determine clang version.'
757 # clang has a few additional warnings that we disable, extraneous
758 # parantheses are allowed due to Ruby's printing of the AST,
759 # finally self assignments are allowed as the generated CPU code
761 main
.Append(CCFLAGS
=['-Wno-parentheses',
763 # Some versions of libstdc++ (4.8?) seem to
764 # use struct hash and class hash
766 '-Wno-mismatched-tags',
769 main
.Append(TCMALLOC_CCFLAGS
=['-fno-builtin'])
771 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
772 # opposed to libstdc++, as the later is dated.
773 if sys
.platform
== "darwin":
774 main
.Append(CXXFLAGS
=['-stdlib=libc++'])
775 main
.Append(LIBS
=['c++'])
777 # On FreeBSD we need libthr.
778 if sys
.platform
.startswith('freebsd'):
779 main
.Append(LIBS
=['thr'])
782 print termcap
.Yellow
+ termcap
.Bold
+ 'Error' + termcap
.Normal
,
783 print "Don't know what compiler options to use for your compiler."
784 print termcap
.Yellow
+ ' compiler:' + termcap
.Normal
, main
['CXX']
785 print termcap
.Yellow
+ ' version:' + termcap
.Normal
,
787 print termcap
.Yellow
+ termcap
.Bold
+ "COMMAND NOT FOUND!" +\
790 print CXX_version
.replace('\n', '<nl>')
791 print " If you're trying to use a compiler other than GCC"
792 print " or clang, there appears to be something wrong with your"
793 print " environment."
795 print " If you are trying to use a compiler other than those listed"
796 print " above you will need to ease fix SConstruct and "
797 print " src/SConscript to support that compiler."
800 # Set up common yacc/bison flags (needed for Ruby)
801 main
['YACCFLAGS'] = '-d'
802 main
['YACCHXXFILESUFFIX'] = '.hh'
804 # Do this after we save setting back, or else we'll tack on an
805 # extra 'qdo' every time we run scons.
807 main
['CC'] = main
['BATCH_CMD'] + ' ' + main
['CC']
808 main
['CXX'] = main
['BATCH_CMD'] + ' ' + main
['CXX']
809 main
['AS'] = main
['BATCH_CMD'] + ' ' + main
['AS']
810 main
['AR'] = main
['BATCH_CMD'] + ' ' + main
['AR']
811 main
['RANLIB'] = main
['BATCH_CMD'] + ' ' + main
['RANLIB']
813 if sys
.platform
== 'cygwin':
814 # cygwin has some header file issues...
815 main
.Append(CCFLAGS
=["-Wno-uninitialized"])
817 # Check for the protobuf compiler
818 protoc_version
= readCommand([main
['PROTOC'], '--version'],
819 exception
='').split()
821 # First two words should be "libprotoc x.y.z"
822 if len(protoc_version
) < 2 or protoc_version
[0] != 'libprotoc':
823 print termcap
.Yellow
+ termcap
.Bold
+ \
824 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
825 ' Please install protobuf-compiler for tracing support.' + \
827 main
['PROTOC'] = False
829 # Based on the availability of the compress stream wrappers,
831 min_protoc_version
= '2.1.0'
832 if compareVersions(protoc_version
[1], min_protoc_version
) < 0:
833 print termcap
.Yellow
+ termcap
.Bold
+ \
834 'Warning: protoc version', min_protoc_version
, \
835 'or newer required.\n' + \
836 ' Installed version:', protoc_version
[1], \
838 main
['PROTOC'] = False
840 # Attempt to determine the appropriate include path and
841 # library path using pkg-config, that means we also need to
842 # check for pkg-config. Note that it is possible to use
843 # protobuf without the involvement of pkg-config. Later on we
844 # check go a library config check and at that point the test
845 # will fail if libprotobuf cannot be found.
846 if readCommand(['pkg-config', '--version'], exception
=''):
848 # Attempt to establish what linking flags to add for protobuf
850 main
.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
852 print termcap
.Yellow
+ termcap
.Bold
+ \
853 'Warning: pkg-config could not get protobuf flags.' + \
857 if not main
.has_key('SWIG'):
858 print 'Error: SWIG utility not found.'
859 print ' Please install (see http://www.swig.org) and retry.'
862 # Check for appropriate SWIG version
863 swig_version
= readCommand([main
['SWIG'], '-version'], exception
='').split()
864 # First 3 words should be "SWIG Version x.y.z"
865 if len(swig_version
) < 3 or \
866 swig_version
[0] != 'SWIG' or swig_version
[1] != 'Version':
867 print 'Error determining SWIG version.'
870 min_swig_version
= '2.0.4'
871 if compareVersions(swig_version
[2], min_swig_version
) < 0:
872 print 'Error: SWIG version', min_swig_version
, 'or newer required.'
873 print ' Installed version:', swig_version
[2]
876 # Check for known incompatibilities. The standard library shipped with
877 # gcc >= 4.9 does not play well with swig versions prior to 3.0
878 if main
['GCC'] and compareVersions(gcc_version
, '4.9') >= 0 and \
879 compareVersions(swig_version
[2], '3.0') < 0:
880 print termcap
.Yellow
+ termcap
.Bold
+ \
881 'Warning: This combination of gcc and swig have' + \
882 ' known incompatibilities.\n' + \
883 ' If you encounter build problems, please update ' + \
884 'swig to 3.0 or later.' + \
887 # Set up SWIG flags & scanner
888 swig_flags
=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
889 main
.Append(SWIGFLAGS
=swig_flags
)
891 # Check for 'timeout' from GNU coreutils. If present, regressions will
892 # be run with a time limit. We require version 8.13 since we rely on
893 # support for the '--foreground' option.
894 if sys
.platform
.startswith('freebsd'):
895 timeout_lines
= readCommand(['gtimeout', '--version'],
896 exception
='').splitlines()
898 timeout_lines
= readCommand(['timeout', '--version'],
899 exception
='').splitlines()
900 # Get the first line and tokenize it
901 timeout_version
= timeout_lines
[0].split() if timeout_lines
else []
902 main
['TIMEOUT'] = timeout_version
and \
903 compareVersions(timeout_version
[-1], '8.13') >= 0
905 # filter out all existing swig scanners, they mess up the dependency
906 # stuff for some reason
908 for scanner
in main
['SCANNERS']:
909 skeys
= scanner
.skeys
913 if isinstance(skeys
, (list, tuple)) and '.i' in skeys
:
916 scanners
.append(scanner
)
918 # add the new swig scanner that we like better
919 from SCons
.Scanner
import ClassicCPP
as CPPScanner
920 swig_inc_re
= '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
921 scanners
.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re
))
923 # replace the scanners list that has what we want
924 main
['SCANNERS'] = scanners
926 # Add a custom Check function to test for structure members.
927 def CheckMember(context
, include
, decl
, member
, include_quotes
="<>"):
928 context
.Message("Checking for member %s in %s..." %
934 (void)test.%(member)s;
937 """ % { "header" : include_quotes
[0] + include
+ include_quotes
[1],
942 ret
= context
.TryCompile(text
, extension
=".cc")
946 # Platform-specific configuration. Note again that we assume that all
947 # builds under a given build root run on the same host platform.
948 conf
= Configure(main
,
949 conf_dir
= joinpath(build_root
, '.scons_config'),
950 log_file
= joinpath(build_root
, 'scons_config.log'),
952 'CheckMember' : CheckMember
,
955 # Check if we should compile a 64 bit binary on Mac OS X/Darwin
958 uname
= platform
.uname()
959 if uname
[0] == 'Darwin' and compareVersions(uname
[2], '9.0.0') >= 0:
960 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
961 main
.Append(CCFLAGS
=['-arch', 'x86_64'])
962 main
.Append(CFLAGS
=['-arch', 'x86_64'])
963 main
.Append(LINKFLAGS
=['-arch', 'x86_64'])
964 main
.Append(ASFLAGS
=['-arch', 'x86_64'])
968 # Recent versions of scons substitute a "Null" object for Configure()
969 # when configuration isn't necessary, e.g., if the "--help" option is
970 # present. Unfortuantely this Null object always returns false,
971 # breaking all our configuration checks. We replace it with our own
972 # more optimistic null object that returns True instead.
974 def NullCheck(*args
, **kwargs
):
978 def __init__(self
, env
):
982 def __getattr__(self
, mname
):
985 conf
= NullConf(main
)
987 # Cache build files in the supplied directory.
988 if main
['M5_BUILD_CACHE']:
989 print 'Using build cache located at', main
['M5_BUILD_CACHE']
990 CacheDir(main
['M5_BUILD_CACHE'])
992 if not GetOption('without_python'):
993 # Find Python include and library directories for embedding the
994 # interpreter. We rely on python-config to resolve the appropriate
995 # includes and linker flags. ParseConfig does not seem to understand
996 # the more exotic linker flags such as -Xlinker and -export-dynamic so
997 # we add them explicitly below. If you want to link in an alternate
998 # version of python, see above for instructions on how to invoke
999 # scons with the appropriate PATH set.
1001 # First we check if python2-config exists, else we use python-config
1002 python_config
= readCommand(['which', 'python2-config'],
1003 exception
='').strip()
1004 if not os
.path
.exists(python_config
):
1005 python_config
= readCommand(['which', 'python-config'],
1006 exception
='').strip()
1007 py_includes
= readCommand([python_config
, '--includes'],
1008 exception
='').split()
1009 # Strip the -I from the include folders before adding them to the
1011 main
.Append(CPPPATH
=map(lambda inc
: inc
[2:], py_includes
))
1013 # Read the linker flags and split them into libraries and other link
1014 # flags. The libraries are added later through the call the CheckLib.
1015 py_ld_flags
= readCommand([python_config
, '--ldflags'],
1016 exception
='').split()
1018 for lib
in py_ld_flags
:
1019 if not lib
.startswith('-l'):
1020 main
.Append(LINKFLAGS
=[lib
])
1023 if lib
not in py_libs
:
1026 # verify that this stuff works
1027 if not conf
.CheckHeader('Python.h', '<>'):
1028 print "Error: can't find Python.h header in", py_includes
1029 print "Install Python headers (package python-dev on Ubuntu and RedHat)"
1033 if not conf
.CheckLib(lib
):
1034 print "Error: can't find library %s required by python" % lib
1037 # On Solaris you need to use libsocket for socket ops
1038 if not conf
.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1039 if not conf
.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1040 print "Can't find library with socket calls (e.g. accept())"
1043 # Check for zlib. If the check passes, libz will be automatically
1044 # added to the LIBS environment variable.
1045 if not conf
.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
1046 print 'Error: did not find needed zlib compression library '\
1047 'and/or zlib.h header file.'
1048 print ' Please install zlib and try again.'
1051 # If we have the protobuf compiler, also make sure we have the
1052 # development libraries. If the check passes, libprotobuf will be
1053 # automatically added to the LIBS environment variable. After
1054 # this, we can use the HAVE_PROTOBUF flag to determine if we have
1055 # got both protoc and libprotobuf available.
1056 main
['HAVE_PROTOBUF'] = main
['PROTOC'] and \
1057 conf
.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
1058 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
1060 # If we have the compiler but not the library, print another warning.
1061 if main
['PROTOC'] and not main
['HAVE_PROTOBUF']:
1062 print termcap
.Yellow
+ termcap
.Bold
+ \
1063 'Warning: did not find protocol buffer library and/or headers.\n' + \
1064 ' Please install libprotobuf-dev for tracing support.' + \
1068 have_posix_clock
= \
1069 conf
.CheckLibWithHeader(None, 'time.h', 'C',
1070 'clock_nanosleep(0,0,NULL,NULL);') or \
1071 conf
.CheckLibWithHeader('rt', 'time.h', 'C',
1072 'clock_nanosleep(0,0,NULL,NULL);')
1074 have_posix_timers
= \
1075 conf
.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
1076 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1078 if not GetOption('without_tcmalloc'):
1079 if conf
.CheckLib('tcmalloc'):
1080 main
.Append(CCFLAGS
=main
['TCMALLOC_CCFLAGS'])
1081 elif conf
.CheckLib('tcmalloc_minimal'):
1082 main
.Append(CCFLAGS
=main
['TCMALLOC_CCFLAGS'])
1084 print termcap
.Yellow
+ termcap
.Bold
+ \
1085 "You can get a 12% performance improvement by "\
1086 "installing tcmalloc (libgoogle-perftools-dev package "\
1087 "on Ubuntu or RedHat)." + termcap
.Normal
1090 # Detect back trace implementations. The last implementation in the
1091 # list will be used by default.
1092 backtrace_impls
= [ "none" ]
1094 if conf
.CheckLibWithHeader(None, 'execinfo.h', 'C',
1095 'backtrace_symbols_fd((void*)0, 0, 0);'):
1096 backtrace_impls
.append("glibc")
1097 elif conf
.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
1098 'backtrace_symbols_fd((void*)0, 0, 0);'):
1099 # NetBSD and FreeBSD need libexecinfo.
1100 backtrace_impls
.append("glibc")
1101 main
.Append(LIBS
=['execinfo'])
1103 if backtrace_impls
[-1] == "none":
1104 default_backtrace_impl
= "none"
1105 print termcap
.Yellow
+ termcap
.Bold
+ \
1106 "No suitable back trace implementation found." + \
1109 if not have_posix_clock
:
1110 print "Can't find library for POSIX clocks."
1112 # Check for <fenv.h> (C99 FP environment control)
1113 have_fenv
= conf
.CheckHeader('fenv.h', '<>')
1115 print "Warning: Header file <fenv.h> not found."
1116 print " This host has no IEEE FP rounding mode control."
1118 # Check if we should enable KVM-based hardware virtualization. The API
1119 # we rely on exists since version 2.6.36 of the kernel, but somehow
1120 # the KVM_API_VERSION does not reflect the change. We test for one of
1121 # the types as a fall back.
1122 have_kvm
= conf
.CheckHeader('linux/kvm.h', '<>')
1124 print "Info: Compatible header file <linux/kvm.h> not found, " \
1125 "disabling KVM support."
1127 # x86 needs support for xsave. We test for the structure here since we
1128 # won't be able to run new tests by the time we know which ISA we're
1130 have_kvm_xsave
= conf
.CheckTypeSize('struct kvm_xsave',
1131 '#include <linux/kvm.h>') != 0
1133 # Check if the requested target ISA is compatible with the host
1134 def is_isa_kvm_compatible(isa
):
1137 host_isa
= platform
.machine()
1139 print "Warning: Failed to determine host ISA."
1142 if not have_posix_timers
:
1143 print "Warning: Can not enable KVM, host seems to lack support " \
1148 return host_isa
in ( "armv7l", "aarch64" )
1150 if host_isa
!= "x86_64":
1153 if not have_kvm_xsave
:
1154 print "KVM on x86 requires xsave support in kernel headers."
1162 # Check if the exclude_host attribute is available. We want this to
1163 # get accurate instruction counts in KVM.
1164 main
['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf
.CheckMember(
1165 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1168 ######################################################################
1170 # Finish the configuration
1172 main
= conf
.Finish()
1174 ######################################################################
1176 # Collect all non-global variables
1179 # Define the universe of supported ISAs
1181 all_gpu_isa_list
= [ ]
1182 Export('all_isa_list')
1183 Export('all_gpu_isa_list')
1185 class CpuModel(object):
1186 '''The CpuModel class encapsulates everything the ISA parser needs to
1187 know about a particular CPU model.'''
1189 # Dict of available CPU model objects. Accessible as CpuModel.dict.
1192 # Constructor. Automatically adds models to CpuModel.dict.
1193 def __init__(self
, name
, default
=False):
1194 self
.name
= name
# name of model
1196 # This cpu is enabled by default
1197 self
.default
= default
1200 if name
in CpuModel
.dict:
1201 raise AttributeError, "CpuModel '%s' already registered" % name
1202 CpuModel
.dict[name
] = self
1206 # Sticky variables get saved in the variables file so they persist from
1207 # one invocation to the next (unless overridden, in which case the new
1208 # value becomes sticky).
1209 sticky_vars
= Variables(args
=ARGUMENTS
)
1210 Export('sticky_vars')
1212 # Sticky variables that should be exported
1214 Export('export_vars')
1218 Export('all_protocols')
1220 Export('protocol_dirs')
1222 Export('slicc_includes')
1224 # Walk the tree and execute all SConsopts scripts that wil add to the
1226 if GetOption('verbose'):
1227 print "Reading SConsopts"
1228 for bdir
in [ base_dir
] + extras_dir_list
:
1230 print "Error: directory '%s' does not exist" % bdir
1232 for root
, dirs
, files
in os
.walk(bdir
):
1233 if 'SConsopts' in files
:
1234 if GetOption('verbose'):
1235 print "Reading", joinpath(root
, 'SConsopts')
1236 SConscript(joinpath(root
, 'SConsopts'))
1239 all_gpu_isa_list
.sort()
1241 sticky_vars
.AddVariables(
1242 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list
),
1243 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list
),
1244 ListVariable('CPU_MODELS', 'CPU models',
1245 sorted(n
for n
,m
in CpuModel
.dict.iteritems() if m
.default
),
1246 sorted(CpuModel
.dict.keys())),
1247 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1249 BoolVariable('SS_COMPATIBLE_FP',
1250 'Make floating-point results compatible with SimpleScalar',
1252 BoolVariable('USE_SSE2',
1253 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1255 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock
),
1256 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv
),
1257 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1258 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm
),
1259 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1260 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1262 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1263 backtrace_impls
[-1], backtrace_impls
)
1266 # These variables get exported to #defines in config/*.hh (see src/SConscript).
1267 export_vars
+= ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1268 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL',
1269 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1271 ###################################################
1273 # Define a SCons builder for configuration flag headers.
1275 ###################################################
1277 # This function generates a config header file that #defines the
1278 # variable symbol to the current variable setting (0 or 1). The source
1279 # operands are the name of the variable and a Value node containing the
1280 # value of the variable.
1281 def build_config_file(target
, source
, env
):
1282 (variable
, value
) = [s
.get_contents() for s
in source
]
1283 f
= file(str(target
[0]), 'w')
1284 print >> f
, '#define', variable
, value
1288 # Combine the two functions into a scons Action object.
1289 config_action
= MakeAction(build_config_file
, Transform("CONFIG H", 2))
1291 # The emitter munges the source & target node lists to reflect what
1292 # we're really doing.
1293 def config_emitter(target
, source
, env
):
1294 # extract variable name from Builder arg
1295 variable
= str(target
[0])
1296 # True target is config header file
1297 target
= joinpath('config', variable
.lower() + '.hh')
1299 if isinstance(val
, bool):
1300 # Force value to 0/1
1302 elif isinstance(val
, str):
1303 val
= '"' + val
+ '"'
1305 # Sources are variable name & value (packaged in SCons Value nodes)
1306 return ([target
], [Value(variable
), Value(val
)])
1308 config_builder
= Builder(emitter
= config_emitter
, action
= config_action
)
1310 main
.Append(BUILDERS
= { 'ConfigFile' : config_builder
})
1312 # libelf build is shared across all configs in the build root.
1313 main
.SConscript('ext/libelf/SConscript',
1314 variant_dir
= joinpath(build_root
, 'libelf'))
1316 # iostream3 build is shared across all configs in the build root.
1317 main
.SConscript('ext/iostream3/SConscript',
1318 variant_dir
= joinpath(build_root
, 'iostream3'))
1320 # libfdt build is shared across all configs in the build root.
1321 main
.SConscript('ext/libfdt/SConscript',
1322 variant_dir
= joinpath(build_root
, 'libfdt'))
1324 # fputils build is shared across all configs in the build root.
1325 main
.SConscript('ext/fputils/SConscript',
1326 variant_dir
= joinpath(build_root
, 'fputils'))
1328 # DRAMSim2 build is shared across all configs in the build root.
1329 main
.SConscript('ext/dramsim2/SConscript',
1330 variant_dir
= joinpath(build_root
, 'dramsim2'))
1332 # DRAMPower build is shared across all configs in the build root.
1333 main
.SConscript('ext/drampower/SConscript',
1334 variant_dir
= joinpath(build_root
, 'drampower'))
1336 # nomali build is shared across all configs in the build root.
1337 main
.SConscript('ext/nomali/SConscript',
1338 variant_dir
= joinpath(build_root
, 'nomali'))
1340 ###################################################
1342 # This function is used to set up a directory with switching headers
1344 ###################################################
1346 main
['ALL_ISA_LIST'] = all_isa_list
1347 main
['ALL_GPU_ISA_LIST'] = all_gpu_isa_list
1349 def make_switching_dir(dname
, switch_headers
, env
):
1350 # Generate the header. target[0] is the full path of the output
1351 # header to generate. 'source' is a dummy variable, since we get the
1352 # list of ISAs from env['ALL_ISA_LIST'].
1353 def gen_switch_hdr(target
, source
, env
):
1354 fname
= str(target
[0])
1355 isa
= env
['TARGET_ISA'].lower()
1357 f
= open(fname
, 'w')
1358 print >>f
, '#include "%s/%s/%s"' % (dname
, isa
, basename(fname
))
1361 print "Failed to create %s" % fname
1364 # Build SCons Action object. 'varlist' specifies env vars that this
1365 # action depends on; when env['ALL_ISA_LIST'] changes these actions
1366 # should get re-executed.
1367 switch_hdr_action
= MakeAction(gen_switch_hdr
,
1368 Transform("GENERATE"), varlist
=['ALL_ISA_LIST'])
1370 # Instantiate actions for each header
1371 for hdr
in switch_headers
:
1372 env
.Command(hdr
, [], switch_hdr_action
)
1374 isa_target
= Dir('.').up().name
.lower().replace('_', '-')
1375 env
['PHONY_BASE'] = '#'+isa_target
1376 all_isa_deps
[isa_target
] = None
1378 Export('make_switching_dir')
1380 def make_gpu_switching_dir(dname
, switch_headers
, env
):
1381 # Generate the header. target[0] is the full path of the output
1382 # header to generate. 'source' is a dummy variable, since we get the
1383 # list of ISAs from env['ALL_ISA_LIST'].
1384 def gen_switch_hdr(target
, source
, env
):
1385 fname
= str(target
[0])
1387 isa
= env
['TARGET_GPU_ISA'].lower()
1390 f
= open(fname
, 'w')
1391 print >>f
, '#include "%s/%s/%s"' % (dname
, isa
, basename(fname
))
1394 print "Failed to create %s" % fname
1397 # Build SCons Action object. 'varlist' specifies env vars that this
1398 # action depends on; when env['ALL_ISA_LIST'] changes these actions
1399 # should get re-executed.
1400 switch_hdr_action
= MakeAction(gen_switch_hdr
,
1401 Transform("GENERATE"), varlist
=['ALL_ISA_GPU_LIST'])
1403 # Instantiate actions for each header
1404 for hdr
in switch_headers
:
1405 env
.Command(hdr
, [], switch_hdr_action
)
1407 Export('make_gpu_switching_dir')
1409 # all-isas -> all-deps -> all-environs -> all_targets
1410 main
.Alias('#all-isas', [])
1411 main
.Alias('#all-deps', '#all-isas')
1413 # Dummy target to ensure all environments are created before telling
1414 # SCons what to actually make (the command line arguments). We attach
1415 # them to the dependence graph after the environments are complete.
1416 ORIG_BUILD_TARGETS
= list(BUILD_TARGETS
) # force a copy; gets closure to work.
1417 def environsComplete(target
, source
, env
):
1418 for t
in ORIG_BUILD_TARGETS
:
1419 main
.Depends('#all-targets', t
)
1421 # Each build/* switching_dir attaches its *-environs target to #all-environs.
1422 main
.Append(BUILDERS
= {'CompleteEnvirons' :
1423 Builder(action
=MakeAction(environsComplete
, None))})
1424 main
.CompleteEnvirons('#all-environs', [])
1426 def doNothing(**ignored
): pass
1427 main
.Append(BUILDERS
= {'Dummy': Builder(action
=MakeAction(doNothing
, None))})
1429 # The final target to which all the original targets ultimately get attached.
1430 main
.Dummy('#all-targets', '#all-environs')
1431 BUILD_TARGETS
[:] = ['#all-targets']
1433 ###################################################
1435 # Define build environments for selected configurations.
1437 ###################################################
1439 for variant_path
in variant_paths
:
1440 if not GetOption('silent'):
1441 print "Building in", variant_path
1443 # Make a copy of the build-root environment to use for this config.
1445 env
['BUILDDIR'] = variant_path
1447 # variant_dir is the tail component of build path, and is used to
1448 # determine the build parameters (e.g., 'ALPHA_SE')
1449 (build_root
, variant_dir
) = splitpath(variant_path
)
1451 # Set env variables according to the build directory config.
1452 sticky_vars
.files
= []
1453 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1454 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1455 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1456 current_vars_file
= joinpath(build_root
, 'variables', variant_dir
)
1457 if isfile(current_vars_file
):
1458 sticky_vars
.files
.append(current_vars_file
)
1459 if not GetOption('silent'):
1460 print "Using saved variables file %s" % current_vars_file
1462 # Build dir-specific variables file doesn't exist.
1464 # Make sure the directory is there so we can create it later
1465 opt_dir
= dirname(current_vars_file
)
1466 if not isdir(opt_dir
):
1469 # Get default build variables from source tree. Variables are
1470 # normally determined by name of $VARIANT_DIR, but can be
1471 # overridden by '--default=' arg on command line.
1472 default
= GetOption('default')
1473 opts_dir
= joinpath(main
.root
.abspath
, 'build_opts')
1475 default_vars_files
= [joinpath(build_root
, 'variables', default
),
1476 joinpath(opts_dir
, default
)]
1478 default_vars_files
= [joinpath(opts_dir
, variant_dir
)]
1479 existing_files
= filter(isfile
, default_vars_files
)
1481 default_vars_file
= existing_files
[0]
1482 sticky_vars
.files
.append(default_vars_file
)
1483 print "Variables file %s not found,\n using defaults in %s" \
1484 % (current_vars_file
, default_vars_file
)
1486 print "Error: cannot find variables file %s or " \
1487 "default file(s) %s" \
1488 % (current_vars_file
, ' or '.join(default_vars_files
))
1491 # Apply current variable settings to env
1492 sticky_vars
.Update(env
)
1494 help_texts
["local_vars"] += \
1495 "Build variables for %s:\n" % variant_dir \
1496 + sticky_vars
.GenerateHelpText(env
)
1498 # Process variable settings.
1500 if not have_fenv
and env
['USE_FENV']:
1501 print "Warning: <fenv.h> not available; " \
1502 "forcing USE_FENV to False in", variant_dir
+ "."
1503 env
['USE_FENV'] = False
1505 if not env
['USE_FENV']:
1506 print "Warning: No IEEE FP rounding mode control in", variant_dir
+ "."
1507 print " FP results may deviate slightly from other platforms."
1510 env
.Append(LIBS
=['efence'])
1514 print "Warning: Can not enable KVM, host seems to lack KVM support"
1515 env
['USE_KVM'] = False
1516 elif not is_isa_kvm_compatible(env
['TARGET_ISA']):
1517 print "Info: KVM support disabled due to unsupported host and " \
1518 "target ISA combination"
1519 env
['USE_KVM'] = False
1521 if env
['BUILD_GPU']:
1522 env
.Append(CPPDEFINES
=['BUILD_GPU'])
1524 # Warn about missing optional functionality
1526 if not main
['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1527 print "Warning: perf_event headers lack support for the " \
1528 "exclude_host attribute. KVM instruction counts will " \
1531 # Save sticky variable settings back to current variables file
1532 sticky_vars
.Save(current_vars_file
, env
)
1535 env
.Append(CCFLAGS
=['-msse2'])
1537 # The src/SConscript file sets up the build rules in 'env' according
1538 # to the configured variables. It returns a list of environments,
1539 # one for each variant build (debug, opt, etc.)
1540 SConscript('src/SConscript', variant_dir
= variant_path
, exports
= 'env')
1542 def pairwise(iterable
):
1543 "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1544 a
, b
= itertools
.tee(iterable
)
1546 return itertools
.izip(a
, b
)
1548 # Create false dependencies so SCons will parse ISAs, establish
1549 # dependencies, and setup the build Environments serially. Either
1550 # SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1551 # greater than 1. It appears to be standard race condition stuff; it
1552 # doesn't always fail, but usually, and the behaviors are different.
1553 # Every time I tried to remove this, builds would fail in some
1554 # creative new way. So, don't do that. You'll want to, though, because
1555 # tests/SConscript takes a long time to make its Environments.
1556 for t1
, t2
in pairwise(sorted(all_isa_deps
.iterkeys())):
1557 main
.Depends('#%s-deps' % t2
, '#%s-deps' % t1
)
1558 main
.Depends('#%s-environs' % t2
, '#%s-environs' % t1
)
1562 Usage: scons [scons options] [build variables] [target(s)]
1564 Extra scons options:
1567 Global build variables: