3 # Copyright (c) 2013, 2015-2017 ARM Limited
6 # The license below extends only to copyright in the software and shall
7 # not be construed as granting a license to any other intellectual
8 # property including but not limited to intellectual property relating
9 # to a hardware implementation of the functionality of the software
10 # licensed hereunder. You may use the software subject to the license
11 # terms below provided that you ensure that this notice is replicated
12 # unmodified and in its entirety in all distributions of the software,
13 # modified or unmodified, in source code or in binary form.
15 # Copyright (c) 2011 Advanced Micro Devices, Inc.
16 # Copyright (c) 2009 The Hewlett-Packard Development Company
17 # Copyright (c) 2004-2005 The Regents of The University of Michigan
18 # All rights reserved.
20 # Redistribution and use in source and binary forms, with or without
21 # modification, are permitted provided that the following conditions are
22 # met: redistributions of source code must retain the above copyright
23 # notice, this list of conditions and the following disclaimer;
24 # redistributions in binary form must reproduce the above copyright
25 # notice, this list of conditions and the following disclaimer in the
26 # documentation and/or other materials provided with the distribution;
27 # neither the name of the copyright holders nor the names of its
28 # contributors may be used to endorse or promote products derived from
29 # this software without specific prior written permission.
31 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
43 # Authors: Steve Reinhardt
46 ###################################################
48 # SCons top-level build description (SConstruct) file.
50 # While in this directory ('gem5'), just type 'scons' to build the default
51 # configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52 # to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53 # the optimized full-system version).
55 # You can build gem5 in a different directory as long as there is a
56 # 'build/<CONFIG>' somewhere along the target path. The build system
57 # expects that all configs under the same build directory are being
58 # built for the same host system.
62 # The following two commands are equivalent. The '-u' option tells
63 # scons to search up the directory tree for this SConstruct file.
64 # % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65 # % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
67 # The following two commands are equivalent and demonstrate building
68 # in a directory outside of the source tree. The '-C' option tells
69 # scons to chdir to the specified directory to find this SConstruct
71 # % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72 # % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
74 # You can use 'scons -H' to print scons options. If you're in this
75 # 'gem5' directory (or use -u or -C to tell scons where to find this
76 # file), you can use 'scons -h' to print all the gem5-specific build
79 ###################################################
81 # Global Python includes
89 from os
import mkdir
, environ
90 from os
.path
import abspath
, basename
, dirname
, expanduser
, normpath
91 from os
.path
import exists
, isdir
, isfile
92 from os
.path
import join
as joinpath
, split
as splitpath
98 extra_python_paths
= [
99 Dir('src/python').srcnode().abspath
, # gem5 includes
100 Dir('ext/ply').srcnode().abspath
, # ply is used by several files
103 sys
.path
[1:1] = extra_python_paths
105 from m5
.util
import compareVersions
, readCommand
106 from m5
.util
.terminal
import get_termcap
117 # There's a bug in scons in that (1) by default, the help texts from
118 # AddOption() are supposed to be displayed when you type 'scons -h'
119 # and (2) you can override the help displayed by 'scons -h' using the
120 # Help() function, but these two features are incompatible: once
121 # you've overridden the help text using Help(), there's no way to get
122 # at the help texts from AddOptions. See:
123 # http://scons.tigris.org/issues/show_bug.cgi?id=2356
124 # http://scons.tigris.org/issues/show_bug.cgi?id=2611
125 # This hack lets us extract the help text from AddOptions and
126 # re-inject it via Help(). Ideally someday this bug will be fixed and
127 # we can just use AddOption directly.
128 def AddLocalOption(*args
, **kwargs
):
131 help = " " + ", ".join(args
)
134 if length
>= col_width
:
135 help += "\n" + " " * col_width
137 help += " " * (col_width
- length
)
138 help += kwargs
["help"]
139 help_texts
["options"] += help + "\n"
141 AddOption(*args
, **kwargs
)
143 AddLocalOption('--colors', dest
='use_colors', action
='store_true',
144 help="Add color to abbreviated scons output")
145 AddLocalOption('--no-colors', dest
='use_colors', action
='store_false',
146 help="Don't add color to abbreviated scons output")
147 AddLocalOption('--with-cxx-config', dest
='with_cxx_config',
149 help="Build with support for C++-based configuration")
150 AddLocalOption('--default', dest
='default', type='string', action
='store',
151 help='Override which build_opts file to use for defaults')
152 AddLocalOption('--ignore-style', dest
='ignore_style', action
='store_true',
153 help='Disable style checking hooks')
154 AddLocalOption('--no-lto', dest
='no_lto', action
='store_true',
155 help='Disable Link-Time Optimization for fast')
156 AddLocalOption('--force-lto', dest
='force_lto', action
='store_true',
157 help='Use Link-Time Optimization instead of partial linking' +
158 ' when the compiler doesn\'t support using them together.')
159 AddLocalOption('--update-ref', dest
='update_ref', action
='store_true',
160 help='Update test reference outputs')
161 AddLocalOption('--verbose', dest
='verbose', action
='store_true',
162 help='Print full tool command lines')
163 AddLocalOption('--without-python', dest
='without_python',
165 help='Build without Python configuration support')
166 AddLocalOption('--without-tcmalloc', dest
='without_tcmalloc',
168 help='Disable linking against tcmalloc')
169 AddLocalOption('--with-ubsan', dest
='with_ubsan', action
='store_true',
170 help='Build with Undefined Behavior Sanitizer if available')
171 AddLocalOption('--with-asan', dest
='with_asan', action
='store_true',
172 help='Build with Address Sanitizer if available')
174 if GetOption('no_lto') and GetOption('force_lto'):
175 print '--no-lto and --force-lto are mutually exclusive'
178 termcap
= get_termcap(GetOption('use_colors'))
180 ########################################################################
182 # Set up the main build environment.
184 ########################################################################
188 main_dict_keys
= main
.Dictionary().keys()
190 # Check that we have a C/C++ compiler
191 if not ('CC' in main_dict_keys
and 'CXX' in main_dict_keys
):
192 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
195 # add useful python code PYTHONPATH so it can be used by subprocesses
197 main
.AppendENVPath('PYTHONPATH', extra_python_paths
)
199 ###################################################
201 # Figure out which configurations to set up based on the path(s) of
204 ###################################################
206 # Find default configuration & binary.
207 Default(environ
.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
209 # helper function: find last occurrence of element in list
210 def rfind(l
, elt
, offs
= -1):
211 for i
in range(len(l
)+offs
, 0, -1):
214 raise ValueError, "element not found"
216 # Take a list of paths (or SCons Nodes) and return a list with all
217 # paths made absolute and ~-expanded. Paths will be interpreted
218 # relative to the launch directory unless a different root is provided
219 def makePathListAbsolute(path_list
, root
=GetLaunchDir()):
220 return [abspath(joinpath(root
, expanduser(str(p
))))
223 # Each target must have 'build' in the interior of the path; the
224 # directory below this will determine the build parameters. For
225 # example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
226 # recognize that ALPHA_SE specifies the configuration because it
227 # follow 'build' in the build path.
229 # The funky assignment to "[:]" is needed to replace the list contents
230 # in place rather than reassign the symbol to a new list, which
231 # doesn't work (obviously!).
232 BUILD_TARGETS
[:] = makePathListAbsolute(BUILD_TARGETS
)
234 # Generate a list of the unique build roots and configs that the
235 # collected targets reference.
238 for t
in BUILD_TARGETS
:
239 path_dirs
= t
.split('/')
241 build_top
= rfind(path_dirs
, 'build', -2)
243 print "Error: no non-leaf 'build' dir found on target path", t
245 this_build_root
= joinpath('/',*path_dirs
[:build_top
+1])
247 build_root
= this_build_root
249 if this_build_root
!= build_root
:
250 print "Error: build targets not under same build root\n"\
251 " %s\n %s" % (build_root
, this_build_root
)
253 variant_path
= joinpath('/',*path_dirs
[:build_top
+2])
254 if variant_path
not in variant_paths
:
255 variant_paths
.append(variant_path
)
257 # Make sure build_root exists (might not if this is the first build there)
258 if not isdir(build_root
):
260 main
['BUILDROOT'] = build_root
264 main
.SConsignFile(joinpath(build_root
, "sconsign"))
266 # Default duplicate option is to use hard links, but this messes up
267 # when you use emacs to edit a file in the target dir, as emacs moves
268 # file to file~ then copies to file, breaking the link. Symbolic
269 # (soft) links work better.
270 main
.SetOption('duplicate', 'soft-copy')
273 # Set up global sticky variables... these are common to an entire build
274 # tree (not specific to a particular build like ALPHA_SE)
277 global_vars_file
= joinpath(build_root
, 'variables.global')
279 global_vars
= Variables(global_vars_file
, args
=ARGUMENTS
)
281 global_vars
.AddVariables(
282 ('CC', 'C compiler', environ
.get('CC', main
['CC'])),
283 ('CXX', 'C++ compiler', environ
.get('CXX', main
['CXX'])),
284 ('PROTOC', 'protoc tool', environ
.get('PROTOC', 'protoc')),
285 ('BATCH', 'Use batch pool for build and tests', False),
286 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
287 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
288 ('EXTRAS', 'Add extra directories to the compilation', '')
291 # Update main environment with values from ARGUMENTS & global_vars_file
292 global_vars
.Update(main
)
293 help_texts
["global_vars"] += global_vars
.GenerateHelpText(main
)
295 # Save sticky variable settings back to current variables file
296 global_vars
.Save(global_vars_file
, main
)
298 # Parse EXTRAS variable to build list of all directories where we're
299 # look for sources etc. This list is exported as extras_dir_list.
300 base_dir
= main
.srcdir
.abspath
302 extras_dir_list
= makePathListAbsolute(main
['EXTRAS'].split(':'))
307 Export('extras_dir_list')
309 # the ext directory should be on the #includes path
310 main
.Append(CPPPATH
=[Dir('ext')])
312 # Add shared top-level headers
313 main
.Prepend(CPPPATH
=Dir('include'))
315 def strip_build_path(path
, env
):
317 variant_base
= env
['BUILDROOT'] + os
.path
.sep
318 if path
.startswith(variant_base
):
319 path
= path
[len(variant_base
):]
320 elif path
.startswith('build/'):
324 # Generate a string of the form:
325 # common/path/prefix/src1, src2 -> tgt1, tgt2
326 # to print while building.
327 class Transform(object):
328 # all specific color settings should be here and nowhere else
329 tool_color
= termcap
.Normal
330 pfx_color
= termcap
.Yellow
331 srcs_color
= termcap
.Yellow
+ termcap
.Bold
332 arrow_color
= termcap
.Blue
+ termcap
.Bold
333 tgts_color
= termcap
.Yellow
+ termcap
.Bold
335 def __init__(self
, tool
, max_sources
=99):
336 self
.format
= self
.tool_color
+ (" [%8s] " % tool
) \
337 + self
.pfx_color
+ "%s" \
338 + self
.srcs_color
+ "%s" \
339 + self
.arrow_color
+ " -> " \
340 + self
.tgts_color
+ "%s" \
342 self
.max_sources
= max_sources
344 def __call__(self
, target
, source
, env
, for_signature
=None):
345 # truncate source list according to max_sources param
346 source
= source
[0:self
.max_sources
]
348 return strip_build_path(str(f
), env
)
350 srcs
= map(strip
, source
)
353 tgts
= map(strip
, target
)
354 # surprisingly, os.path.commonprefix is a dumb char-by-char string
355 # operation that has nothing to do with paths.
356 com_pfx
= os
.path
.commonprefix(srcs
+ tgts
)
357 com_pfx_len
= len(com_pfx
)
359 # do some cleanup and sanity checking on common prefix
360 if com_pfx
[-1] == ".":
361 # prefix matches all but file extension: ok
362 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
363 com_pfx
= com_pfx
[0:-1]
364 elif com_pfx
[-1] == "/":
365 # common prefix is directory path: OK
368 src0_len
= len(srcs
[0])
369 tgt0_len
= len(tgts
[0])
370 if src0_len
== com_pfx_len
:
371 # source is a substring of target, OK
373 elif tgt0_len
== com_pfx_len
:
374 # target is a substring of source, need to back up to
375 # avoid empty string on RHS of arrow
376 sep_idx
= com_pfx
.rfind(".")
378 com_pfx
= com_pfx
[0:sep_idx
]
381 elif src0_len
> com_pfx_len
and srcs
[0][com_pfx_len
] == ".":
382 # still splitting at file extension: ok
385 # probably a fluke; ignore it
387 # recalculate length in case com_pfx was modified
388 com_pfx_len
= len(com_pfx
)
390 f
= map(lambda s
: s
[com_pfx_len
:], files
)
392 return self
.format
% (com_pfx
, fmt(srcs
), fmt(tgts
))
396 # enable the regression script to use the termcap
397 main
['TERMCAP'] = termcap
399 if GetOption('verbose'):
400 def MakeAction(action
, string
, *args
, **kwargs
):
401 return Action(action
, *args
, **kwargs
)
404 main
['CCCOMSTR'] = Transform("CC")
405 main
['CXXCOMSTR'] = Transform("CXX")
406 main
['ASCOMSTR'] = Transform("AS")
407 main
['ARCOMSTR'] = Transform("AR", 0)
408 main
['LINKCOMSTR'] = Transform("LINK", 0)
409 main
['SHLINKCOMSTR'] = Transform("SHLINK", 0)
410 main
['RANLIBCOMSTR'] = Transform("RANLIB", 0)
411 main
['M4COMSTR'] = Transform("M4")
412 main
['SHCCCOMSTR'] = Transform("SHCC")
413 main
['SHCXXCOMSTR'] = Transform("SHCXX")
416 # Initialize the Link-Time Optimization (LTO) flags
417 main
['LTO_CCFLAGS'] = []
418 main
['LTO_LDFLAGS'] = []
420 # According to the readme, tcmalloc works best if the compiler doesn't
421 # assume that we're using the builtin malloc and friends. These flags
422 # are compiler-specific, so we need to set them after we detect which
423 # compiler we're using.
424 main
['TCMALLOC_CCFLAGS'] = []
426 CXX_version
= readCommand([main
['CXX'],'--version'], exception
=False)
427 CXX_V
= readCommand([main
['CXX'],'-V'], exception
=False)
429 main
['GCC'] = CXX_version
and CXX_version
.find('g++') >= 0
430 main
['CLANG'] = CXX_version
and CXX_version
.find('clang') >= 0
431 if main
['GCC'] + main
['CLANG'] > 1:
432 print 'Error: How can we have two at the same time?'
435 # Set up default C++ compiler flags
436 if main
['GCC'] or main
['CLANG']:
437 # As gcc and clang share many flags, do the common parts here
438 main
.Append(CCFLAGS
=['-pipe'])
439 main
.Append(CCFLAGS
=['-fno-strict-aliasing'])
440 # Enable -Wall and -Wextra and then disable the few warnings that
441 # we consistently violate
442 main
.Append(CCFLAGS
=['-Wall', '-Wundef', '-Wextra',
443 '-Wno-sign-compare', '-Wno-unused-parameter'])
444 # We always compile using C++11
445 main
.Append(CXXFLAGS
=['-std=c++11'])
446 if sys
.platform
.startswith('freebsd'):
447 main
.Append(CCFLAGS
=['-I/usr/local/include'])
448 main
.Append(CXXFLAGS
=['-I/usr/local/include'])
450 main
['FILTER_PSHLINKFLAGS'] = lambda x
: str(x
).replace(' -shared', '')
451 main
['PSHLINKFLAGS'] = main
.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}')
452 main
['PLINKFLAGS'] = main
.subst('${LINKFLAGS}')
453 shared_partial_flags
= ['-r', '-nostdlib']
454 main
.Append(PSHLINKFLAGS
=shared_partial_flags
)
455 main
.Append(PLINKFLAGS
=shared_partial_flags
)
457 print termcap
.Yellow
+ termcap
.Bold
+ 'Error' + termcap
.Normal
,
458 print "Don't know what compiler options to use for your compiler."
459 print termcap
.Yellow
+ ' compiler:' + termcap
.Normal
, main
['CXX']
460 print termcap
.Yellow
+ ' version:' + termcap
.Normal
,
462 print termcap
.Yellow
+ termcap
.Bold
+ "COMMAND NOT FOUND!" +\
465 print CXX_version
.replace('\n', '<nl>')
466 print " If you're trying to use a compiler other than GCC"
467 print " or clang, there appears to be something wrong with your"
468 print " environment."
470 print " If you are trying to use a compiler other than those listed"
471 print " above you will need to ease fix SConstruct and "
472 print " src/SConscript to support that compiler."
476 # Check for a supported version of gcc. >= 4.8 is chosen for its
477 # level of c++11 support. See
478 # http://gcc.gnu.org/projects/cxx0x.html for details.
479 gcc_version
= readCommand([main
['CXX'], '-dumpversion'], exception
=False)
480 if compareVersions(gcc_version
, "4.8") < 0:
481 print 'Error: gcc version 4.8 or newer required.'
482 print ' Installed version:', gcc_version
485 main
['GCC_VERSION'] = gcc_version
487 if compareVersions(gcc_version
, '4.9') >= 0:
488 # Incremental linking with LTO is currently broken in gcc versions
489 # 4.9 and above. A version where everything works completely hasn't
490 # yet been identified.
492 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67548
493 main
['BROKEN_INCREMENTAL_LTO'] = True
494 if compareVersions(gcc_version
, '6.0') >= 0:
495 # gcc versions 6.0 and greater accept an -flinker-output flag which
496 # selects what type of output the linker should generate. This is
497 # necessary for incremental lto to work, but is also broken in
498 # current versions of gcc. It may not be necessary in future
499 # versions. We add it here since it might be, and as a reminder that
500 # it exists. It's excluded if lto is being forced.
502 # https://gcc.gnu.org/gcc-6/changes.html
503 # https://gcc.gnu.org/ml/gcc-patches/2015-11/msg03161.html
504 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=69866
505 if not GetOption('force_lto'):
506 main
.Append(PSHLINKFLAGS
='-flinker-output=rel')
507 main
.Append(PLINKFLAGS
='-flinker-output=rel')
509 # gcc from version 4.8 and above generates "rep; ret" instructions
510 # to avoid performance penalties on certain AMD chips. Older
511 # assemblers detect this as an error, "Error: expecting string
512 # instruction after `rep'"
513 as_version_raw
= readCommand([main
['AS'], '-v', '/dev/null',
515 exception
=False).split()
517 # version strings may contain extra distro-specific
518 # qualifiers, so play it safe and keep only what comes before
520 as_version
= as_version_raw
[-1].split('-')[0] if as_version_raw
else None
522 if not as_version
or compareVersions(as_version
, "2.23") < 0:
523 print termcap
.Yellow
+ termcap
.Bold
+ \
524 'Warning: This combination of gcc and binutils have' + \
525 ' known incompatibilities.\n' + \
526 ' If you encounter build problems, please update ' + \
527 'binutils to 2.23.' + \
530 # Make sure we warn if the user has requested to compile with the
531 # Undefined Benahvior Sanitizer and this version of gcc does not
533 if GetOption('with_ubsan') and \
534 compareVersions(gcc_version
, '4.9') < 0:
535 print termcap
.Yellow
+ termcap
.Bold
+ \
536 'Warning: UBSan is only supported using gcc 4.9 and later.' + \
539 disable_lto
= GetOption('no_lto')
540 if not disable_lto
and main
.get('BROKEN_INCREMENTAL_LTO', False) and \
541 not GetOption('force_lto'):
542 print termcap
.Yellow
+ termcap
.Bold
+ \
543 'Warning: Your compiler doesn\'t support incremental linking' + \
544 ' and lto at the same time, so lto is being disabled. To force' + \
545 ' lto on anyway, use the --force-lto option. That will disable' + \
546 ' partial linking.' + \
550 # Add the appropriate Link-Time Optimization (LTO) flags
551 # unless LTO is explicitly turned off. Note that these flags
552 # are only used by the fast target.
554 # Pass the LTO flag when compiling to produce GIMPLE
555 # output, we merely create the flags here and only append
557 main
['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
559 # Use the same amount of jobs for LTO as we are running
561 main
['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
563 main
.Append(TCMALLOC_CCFLAGS
=['-fno-builtin-malloc', '-fno-builtin-calloc',
564 '-fno-builtin-realloc', '-fno-builtin-free'])
566 # add option to check for undeclared overrides
567 if compareVersions(gcc_version
, "5.0") > 0:
568 main
.Append(CCFLAGS
=['-Wno-error=suggest-override'])
571 # Check for a supported version of clang, >= 3.1 is needed to
572 # support similar features as gcc 4.8. See
573 # http://clang.llvm.org/cxx_status.html for details
574 clang_version_re
= re
.compile(".* version (\d+\.\d+)")
575 clang_version_match
= clang_version_re
.search(CXX_version
)
576 if (clang_version_match
):
577 clang_version
= clang_version_match
.groups()[0]
578 if compareVersions(clang_version
, "3.1") < 0:
579 print 'Error: clang version 3.1 or newer required.'
580 print ' Installed version:', clang_version
583 print 'Error: Unable to determine clang version.'
586 # clang has a few additional warnings that we disable, extraneous
587 # parantheses are allowed due to Ruby's printing of the AST,
588 # finally self assignments are allowed as the generated CPU code
590 main
.Append(CCFLAGS
=['-Wno-parentheses',
592 # Some versions of libstdc++ (4.8?) seem to
593 # use struct hash and class hash
595 '-Wno-mismatched-tags',
598 main
.Append(TCMALLOC_CCFLAGS
=['-fno-builtin'])
600 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
601 # opposed to libstdc++, as the later is dated.
602 if sys
.platform
== "darwin":
603 main
.Append(CXXFLAGS
=['-stdlib=libc++'])
604 main
.Append(LIBS
=['c++'])
606 # On FreeBSD we need libthr.
607 if sys
.platform
.startswith('freebsd'):
608 main
.Append(LIBS
=['thr'])
611 print termcap
.Yellow
+ termcap
.Bold
+ 'Error' + termcap
.Normal
,
612 print "Don't know what compiler options to use for your compiler."
613 print termcap
.Yellow
+ ' compiler:' + termcap
.Normal
, main
['CXX']
614 print termcap
.Yellow
+ ' version:' + termcap
.Normal
,
616 print termcap
.Yellow
+ termcap
.Bold
+ "COMMAND NOT FOUND!" +\
619 print CXX_version
.replace('\n', '<nl>')
620 print " If you're trying to use a compiler other than GCC"
621 print " or clang, there appears to be something wrong with your"
622 print " environment."
624 print " If you are trying to use a compiler other than those listed"
625 print " above you will need to ease fix SConstruct and "
626 print " src/SConscript to support that compiler."
629 # Set up common yacc/bison flags (needed for Ruby)
630 main
['YACCFLAGS'] = '-d'
631 main
['YACCHXXFILESUFFIX'] = '.hh'
633 # Do this after we save setting back, or else we'll tack on an
634 # extra 'qdo' every time we run scons.
636 main
['CC'] = main
['BATCH_CMD'] + ' ' + main
['CC']
637 main
['CXX'] = main
['BATCH_CMD'] + ' ' + main
['CXX']
638 main
['AS'] = main
['BATCH_CMD'] + ' ' + main
['AS']
639 main
['AR'] = main
['BATCH_CMD'] + ' ' + main
['AR']
640 main
['RANLIB'] = main
['BATCH_CMD'] + ' ' + main
['RANLIB']
642 if sys
.platform
== 'cygwin':
643 # cygwin has some header file issues...
644 main
.Append(CCFLAGS
=["-Wno-uninitialized"])
646 # Check for the protobuf compiler
647 protoc_version
= readCommand([main
['PROTOC'], '--version'],
648 exception
='').split()
650 # First two words should be "libprotoc x.y.z"
651 if len(protoc_version
) < 2 or protoc_version
[0] != 'libprotoc':
652 print termcap
.Yellow
+ termcap
.Bold
+ \
653 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
654 ' Please install protobuf-compiler for tracing support.' + \
656 main
['PROTOC'] = False
658 # Based on the availability of the compress stream wrappers,
660 min_protoc_version
= '2.1.0'
661 if compareVersions(protoc_version
[1], min_protoc_version
) < 0:
662 print termcap
.Yellow
+ termcap
.Bold
+ \
663 'Warning: protoc version', min_protoc_version
, \
664 'or newer required.\n' + \
665 ' Installed version:', protoc_version
[1], \
667 main
['PROTOC'] = False
669 # Attempt to determine the appropriate include path and
670 # library path using pkg-config, that means we also need to
671 # check for pkg-config. Note that it is possible to use
672 # protobuf without the involvement of pkg-config. Later on we
673 # check go a library config check and at that point the test
674 # will fail if libprotobuf cannot be found.
675 if readCommand(['pkg-config', '--version'], exception
=''):
677 # Attempt to establish what linking flags to add for protobuf
679 main
.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
681 print termcap
.Yellow
+ termcap
.Bold
+ \
682 'Warning: pkg-config could not get protobuf flags.' + \
686 # Check for 'timeout' from GNU coreutils. If present, regressions will
687 # be run with a time limit. We require version 8.13 since we rely on
688 # support for the '--foreground' option.
689 if sys
.platform
.startswith('freebsd'):
690 timeout_lines
= readCommand(['gtimeout', '--version'],
691 exception
='').splitlines()
693 timeout_lines
= readCommand(['timeout', '--version'],
694 exception
='').splitlines()
695 # Get the first line and tokenize it
696 timeout_version
= timeout_lines
[0].split() if timeout_lines
else []
697 main
['TIMEOUT'] = timeout_version
and \
698 compareVersions(timeout_version
[-1], '8.13') >= 0
700 # Add a custom Check function to test for structure members.
701 def CheckMember(context
, include
, decl
, member
, include_quotes
="<>"):
702 context
.Message("Checking for member %s in %s..." %
708 (void)test.%(member)s;
711 """ % { "header" : include_quotes
[0] + include
+ include_quotes
[1],
716 ret
= context
.TryCompile(text
, extension
=".cc")
720 # Platform-specific configuration. Note again that we assume that all
721 # builds under a given build root run on the same host platform.
722 conf
= Configure(main
,
723 conf_dir
= joinpath(build_root
, '.scons_config'),
724 log_file
= joinpath(build_root
, 'scons_config.log'),
726 'CheckMember' : CheckMember
,
729 # Check if we should compile a 64 bit binary on Mac OS X/Darwin
732 uname
= platform
.uname()
733 if uname
[0] == 'Darwin' and compareVersions(uname
[2], '9.0.0') >= 0:
734 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
735 main
.Append(CCFLAGS
=['-arch', 'x86_64'])
736 main
.Append(CFLAGS
=['-arch', 'x86_64'])
737 main
.Append(LINKFLAGS
=['-arch', 'x86_64'])
738 main
.Append(ASFLAGS
=['-arch', 'x86_64'])
742 # Recent versions of scons substitute a "Null" object for Configure()
743 # when configuration isn't necessary, e.g., if the "--help" option is
744 # present. Unfortuantely this Null object always returns false,
745 # breaking all our configuration checks. We replace it with our own
746 # more optimistic null object that returns True instead.
748 def NullCheck(*args
, **kwargs
):
752 def __init__(self
, env
):
756 def __getattr__(self
, mname
):
759 conf
= NullConf(main
)
761 # Cache build files in the supplied directory.
762 if main
['M5_BUILD_CACHE']:
763 print 'Using build cache located at', main
['M5_BUILD_CACHE']
764 CacheDir(main
['M5_BUILD_CACHE'])
766 main
['USE_PYTHON'] = not GetOption('without_python')
767 if main
['USE_PYTHON']:
768 # Find Python include and library directories for embedding the
769 # interpreter. We rely on python-config to resolve the appropriate
770 # includes and linker flags. ParseConfig does not seem to understand
771 # the more exotic linker flags such as -Xlinker and -export-dynamic so
772 # we add them explicitly below. If you want to link in an alternate
773 # version of python, see above for instructions on how to invoke
774 # scons with the appropriate PATH set.
776 # First we check if python2-config exists, else we use python-config
777 python_config
= readCommand(['which', 'python2-config'],
778 exception
='').strip()
779 if not os
.path
.exists(python_config
):
780 python_config
= readCommand(['which', 'python-config'],
781 exception
='').strip()
782 py_includes
= readCommand([python_config
, '--includes'],
783 exception
='').split()
784 # Strip the -I from the include folders before adding them to the
786 main
.Append(CPPPATH
=map(lambda inc
: inc
[2:], py_includes
))
788 # Read the linker flags and split them into libraries and other link
789 # flags. The libraries are added later through the call the CheckLib.
790 py_ld_flags
= readCommand([python_config
, '--ldflags'],
791 exception
='').split()
793 for lib
in py_ld_flags
:
794 if not lib
.startswith('-l'):
795 main
.Append(LINKFLAGS
=[lib
])
798 if lib
not in py_libs
:
801 # verify that this stuff works
802 if not conf
.CheckHeader('Python.h', '<>'):
803 print "Error: can't find Python.h header in", py_includes
804 print "Install Python headers (package python-dev on Ubuntu and RedHat)"
808 if not conf
.CheckLib(lib
):
809 print "Error: can't find library %s required by python" % lib
812 # On Solaris you need to use libsocket for socket ops
813 if not conf
.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
814 if not conf
.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
815 print "Can't find library with socket calls (e.g. accept())"
818 # Check for zlib. If the check passes, libz will be automatically
819 # added to the LIBS environment variable.
820 if not conf
.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
821 print 'Error: did not find needed zlib compression library '\
822 'and/or zlib.h header file.'
823 print ' Please install zlib and try again.'
826 # If we have the protobuf compiler, also make sure we have the
827 # development libraries. If the check passes, libprotobuf will be
828 # automatically added to the LIBS environment variable. After
829 # this, we can use the HAVE_PROTOBUF flag to determine if we have
830 # got both protoc and libprotobuf available.
831 main
['HAVE_PROTOBUF'] = main
['PROTOC'] and \
832 conf
.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
833 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
835 # If we have the compiler but not the library, print another warning.
836 if main
['PROTOC'] and not main
['HAVE_PROTOBUF']:
837 print termcap
.Yellow
+ termcap
.Bold
+ \
838 'Warning: did not find protocol buffer library and/or headers.\n' + \
839 ' Please install libprotobuf-dev for tracing support.' + \
844 conf
.CheckLibWithHeader(None, 'time.h', 'C',
845 'clock_nanosleep(0,0,NULL,NULL);') or \
846 conf
.CheckLibWithHeader('rt', 'time.h', 'C',
847 'clock_nanosleep(0,0,NULL,NULL);')
849 have_posix_timers
= \
850 conf
.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
851 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
853 if not GetOption('without_tcmalloc'):
854 if conf
.CheckLib('tcmalloc'):
855 main
.Append(CCFLAGS
=main
['TCMALLOC_CCFLAGS'])
856 elif conf
.CheckLib('tcmalloc_minimal'):
857 main
.Append(CCFLAGS
=main
['TCMALLOC_CCFLAGS'])
859 print termcap
.Yellow
+ termcap
.Bold
+ \
860 "You can get a 12% performance improvement by "\
861 "installing tcmalloc (libgoogle-perftools-dev package "\
862 "on Ubuntu or RedHat)." + termcap
.Normal
865 # Detect back trace implementations. The last implementation in the
866 # list will be used by default.
867 backtrace_impls
= [ "none" ]
869 if conf
.CheckLibWithHeader(None, 'execinfo.h', 'C',
870 'backtrace_symbols_fd((void*)0, 0, 0);'):
871 backtrace_impls
.append("glibc")
872 elif conf
.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
873 'backtrace_symbols_fd((void*)0, 0, 0);'):
874 # NetBSD and FreeBSD need libexecinfo.
875 backtrace_impls
.append("glibc")
876 main
.Append(LIBS
=['execinfo'])
878 if backtrace_impls
[-1] == "none":
879 default_backtrace_impl
= "none"
880 print termcap
.Yellow
+ termcap
.Bold
+ \
881 "No suitable back trace implementation found." + \
884 if not have_posix_clock
:
885 print "Can't find library for POSIX clocks."
887 # Check for <fenv.h> (C99 FP environment control)
888 have_fenv
= conf
.CheckHeader('fenv.h', '<>')
890 print "Warning: Header file <fenv.h> not found."
891 print " This host has no IEEE FP rounding mode control."
893 # Check for <png.h> (libpng library needed if wanting to dump
894 # frame buffer image in png format)
895 have_png
= conf
.CheckHeader('png.h', '<>')
897 print "Warning: Header file <png.h> not found."
898 print " This host has no libpng library."
899 print " Disabling support for PNG framebuffers."
901 # Check if we should enable KVM-based hardware virtualization. The API
902 # we rely on exists since version 2.6.36 of the kernel, but somehow
903 # the KVM_API_VERSION does not reflect the change. We test for one of
904 # the types as a fall back.
905 have_kvm
= conf
.CheckHeader('linux/kvm.h', '<>')
907 print "Info: Compatible header file <linux/kvm.h> not found, " \
908 "disabling KVM support."
910 # Check if the TUN/TAP driver is available.
911 have_tuntap
= conf
.CheckHeader('linux/if_tun.h', '<>')
913 print "Info: Compatible header file <linux/if_tun.h> not found."
915 # x86 needs support for xsave. We test for the structure here since we
916 # won't be able to run new tests by the time we know which ISA we're
918 have_kvm_xsave
= conf
.CheckTypeSize('struct kvm_xsave',
919 '#include <linux/kvm.h>') != 0
921 # Check if the requested target ISA is compatible with the host
922 def is_isa_kvm_compatible(isa
):
925 host_isa
= platform
.machine()
927 print "Warning: Failed to determine host ISA."
930 if not have_posix_timers
:
931 print "Warning: Can not enable KVM, host seems to lack support " \
936 return host_isa
in ( "armv7l", "aarch64" )
938 if host_isa
!= "x86_64":
941 if not have_kvm_xsave
:
942 print "KVM on x86 requires xsave support in kernel headers."
950 # Check if the exclude_host attribute is available. We want this to
951 # get accurate instruction counts in KVM.
952 main
['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf
.CheckMember(
953 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
956 ######################################################################
958 # Finish the configuration
962 ######################################################################
964 # Collect all non-global variables
967 # Define the universe of supported ISAs
969 all_gpu_isa_list
= [ ]
970 Export('all_isa_list')
971 Export('all_gpu_isa_list')
973 class CpuModel(object):
974 '''The CpuModel class encapsulates everything the ISA parser needs to
975 know about a particular CPU model.'''
977 # Dict of available CPU model objects. Accessible as CpuModel.dict.
980 # Constructor. Automatically adds models to CpuModel.dict.
981 def __init__(self
, name
, default
=False):
982 self
.name
= name
# name of model
984 # This cpu is enabled by default
985 self
.default
= default
988 if name
in CpuModel
.dict:
989 raise AttributeError, "CpuModel '%s' already registered" % name
990 CpuModel
.dict[name
] = self
994 # Sticky variables get saved in the variables file so they persist from
995 # one invocation to the next (unless overridden, in which case the new
996 # value becomes sticky).
997 sticky_vars
= Variables(args
=ARGUMENTS
)
998 Export('sticky_vars')
1000 # Sticky variables that should be exported
1002 Export('export_vars')
1006 Export('all_protocols')
1008 Export('protocol_dirs')
1010 Export('slicc_includes')
1012 # Walk the tree and execute all SConsopts scripts that wil add to the
1014 if GetOption('verbose'):
1015 print "Reading SConsopts"
1016 for bdir
in [ base_dir
] + extras_dir_list
:
1018 print "Error: directory '%s' does not exist" % bdir
1020 for root
, dirs
, files
in os
.walk(bdir
):
1021 if 'SConsopts' in files
:
1022 if GetOption('verbose'):
1023 print "Reading", joinpath(root
, 'SConsopts')
1024 SConscript(joinpath(root
, 'SConsopts'))
1027 all_gpu_isa_list
.sort()
1029 sticky_vars
.AddVariables(
1030 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list
),
1031 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list
),
1032 ListVariable('CPU_MODELS', 'CPU models',
1033 sorted(n
for n
,m
in CpuModel
.dict.iteritems() if m
.default
),
1034 sorted(CpuModel
.dict.keys())),
1035 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1037 BoolVariable('SS_COMPATIBLE_FP',
1038 'Make floating-point results compatible with SimpleScalar',
1040 BoolVariable('USE_SSE2',
1041 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1043 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock
),
1044 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv
),
1045 BoolVariable('USE_PNG', 'Enable support for PNG images', have_png
),
1046 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability',
1048 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models',
1050 BoolVariable('USE_TUNTAP',
1051 'Enable using a tap device to bridge to the host network',
1053 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1054 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1056 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1057 backtrace_impls
[-1], backtrace_impls
)
1060 # These variables get exported to #defines in config/*.hh (see src/SConscript).
1061 export_vars
+= ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1062 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'USE_TUNTAP',
1063 'PROTOCOL', 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST',
1066 ###################################################
1068 # Define a SCons builder for configuration flag headers.
1070 ###################################################
1072 # This function generates a config header file that #defines the
1073 # variable symbol to the current variable setting (0 or 1). The source
1074 # operands are the name of the variable and a Value node containing the
1075 # value of the variable.
1076 def build_config_file(target
, source
, env
):
1077 (variable
, value
) = [s
.get_contents() for s
in source
]
1078 f
= file(str(target
[0]), 'w')
1079 print >> f
, '#define', variable
, value
1083 # Combine the two functions into a scons Action object.
1084 config_action
= MakeAction(build_config_file
, Transform("CONFIG H", 2))
1086 # The emitter munges the source & target node lists to reflect what
1087 # we're really doing.
1088 def config_emitter(target
, source
, env
):
1089 # extract variable name from Builder arg
1090 variable
= str(target
[0])
1091 # True target is config header file
1092 target
= joinpath('config', variable
.lower() + '.hh')
1094 if isinstance(val
, bool):
1095 # Force value to 0/1
1097 elif isinstance(val
, str):
1098 val
= '"' + val
+ '"'
1100 # Sources are variable name & value (packaged in SCons Value nodes)
1101 return ([target
], [Value(variable
), Value(val
)])
1103 config_builder
= Builder(emitter
= config_emitter
, action
= config_action
)
1105 main
.Append(BUILDERS
= { 'ConfigFile' : config_builder
})
1107 ###################################################
1109 # Builders for static and shared partially linked object files.
1111 ###################################################
1113 partial_static_builder
= Builder(action
=SCons
.Defaults
.LinkAction
,
1114 src_suffix
='$OBJSUFFIX',
1115 src_builder
=['StaticObject', 'Object'],
1116 LINKFLAGS
='$PLINKFLAGS',
1119 def partial_shared_emitter(target
, source
, env
):
1121 tgt
.attributes
.shared
= 1
1122 return (target
, source
)
1123 partial_shared_builder
= Builder(action
=SCons
.Defaults
.ShLinkAction
,
1124 emitter
=partial_shared_emitter
,
1125 src_suffix
='$SHOBJSUFFIX',
1126 src_builder
='SharedObject',
1127 SHLINKFLAGS
='$PSHLINKFLAGS',
1130 main
.Append(BUILDERS
= { 'PartialShared' : partial_shared_builder
,
1131 'PartialStatic' : partial_static_builder
})
1133 # builds in ext are shared across all configs in the build root.
1134 ext_dir
= abspath(joinpath(str(main
.root
), 'ext'))
1136 for root
, dirs
, files
in os
.walk(ext_dir
):
1137 if 'SConscript' in files
:
1138 build_dir
= os
.path
.relpath(root
, ext_dir
)
1139 ext_build_dirs
.append(build_dir
)
1140 main
.SConscript(joinpath(root
, 'SConscript'),
1141 variant_dir
=joinpath(build_root
, build_dir
))
1143 main
.Prepend(CPPPATH
=Dir('ext/pybind11/include/'))
1145 ###################################################
1147 # This builder and wrapper method are used to set up a directory with
1148 # switching headers. Those are headers which are in a generic location and
1149 # that include more specific headers from a directory chosen at build time
1150 # based on the current build settings.
1152 ###################################################
1154 def build_switching_header(target
, source
, env
):
1155 path
= str(target
[0])
1156 subdir
= str(source
[0])
1157 dp
, fp
= os
.path
.split(path
)
1158 dp
= os
.path
.relpath(os
.path
.realpath(dp
),
1159 os
.path
.realpath(env
['BUILDDIR']))
1160 with
open(path
, 'w') as hdr
:
1161 print >>hdr
, '#include "%s/%s/%s"' % (dp
, subdir
, fp
)
1163 switching_header_action
= MakeAction(build_switching_header
,
1164 Transform('GENERATE'))
1166 switching_header_builder
= Builder(action
=switching_header_action
,
1167 source_factory
=Value
,
1170 main
.Append(BUILDERS
= { 'SwitchingHeader': switching_header_builder
})
1172 def switching_headers(self
, headers
, source
):
1173 for header
in headers
:
1174 self
.SwitchingHeader(header
, source
)
1176 main
.AddMethod(switching_headers
, 'SwitchingHeaders')
1178 ###################################################
1180 # Define build environments for selected configurations.
1182 ###################################################
1184 for variant_path
in variant_paths
:
1185 if not GetOption('silent'):
1186 print "Building in", variant_path
1188 # Make a copy of the build-root environment to use for this config.
1190 env
['BUILDDIR'] = variant_path
1192 # variant_dir is the tail component of build path, and is used to
1193 # determine the build parameters (e.g., 'ALPHA_SE')
1194 (build_root
, variant_dir
) = splitpath(variant_path
)
1196 # Set env variables according to the build directory config.
1197 sticky_vars
.files
= []
1198 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1199 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1200 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1201 current_vars_file
= joinpath(build_root
, 'variables', variant_dir
)
1202 if isfile(current_vars_file
):
1203 sticky_vars
.files
.append(current_vars_file
)
1204 if not GetOption('silent'):
1205 print "Using saved variables file %s" % current_vars_file
1206 elif variant_dir
in ext_build_dirs
:
1207 # Things in ext are built without a variant directory.
1210 # Build dir-specific variables file doesn't exist.
1212 # Make sure the directory is there so we can create it later
1213 opt_dir
= dirname(current_vars_file
)
1214 if not isdir(opt_dir
):
1217 # Get default build variables from source tree. Variables are
1218 # normally determined by name of $VARIANT_DIR, but can be
1219 # overridden by '--default=' arg on command line.
1220 default
= GetOption('default')
1221 opts_dir
= joinpath(main
.root
.abspath
, 'build_opts')
1223 default_vars_files
= [joinpath(build_root
, 'variables', default
),
1224 joinpath(opts_dir
, default
)]
1226 default_vars_files
= [joinpath(opts_dir
, variant_dir
)]
1227 existing_files
= filter(isfile
, default_vars_files
)
1229 default_vars_file
= existing_files
[0]
1230 sticky_vars
.files
.append(default_vars_file
)
1231 print "Variables file %s not found,\n using defaults in %s" \
1232 % (current_vars_file
, default_vars_file
)
1234 print "Error: cannot find variables file %s or " \
1235 "default file(s) %s" \
1236 % (current_vars_file
, ' or '.join(default_vars_files
))
1239 # Apply current variable settings to env
1240 sticky_vars
.Update(env
)
1242 help_texts
["local_vars"] += \
1243 "Build variables for %s:\n" % variant_dir \
1244 + sticky_vars
.GenerateHelpText(env
)
1246 # Process variable settings.
1248 if not have_fenv
and env
['USE_FENV']:
1249 print "Warning: <fenv.h> not available; " \
1250 "forcing USE_FENV to False in", variant_dir
+ "."
1251 env
['USE_FENV'] = False
1253 if not env
['USE_FENV']:
1254 print "Warning: No IEEE FP rounding mode control in", variant_dir
+ "."
1255 print " FP results may deviate slightly from other platforms."
1257 if not have_png
and env
['USE_PNG']:
1258 print "Warning: <png.h> not available; " \
1259 "forcing USE_PNG to False in", variant_dir
+ "."
1260 env
['USE_PNG'] = False
1263 env
.Append(LIBS
=['png'])
1266 env
.Append(LIBS
=['efence'])
1270 print "Warning: Can not enable KVM, host seems to lack KVM support"
1271 env
['USE_KVM'] = False
1272 elif not is_isa_kvm_compatible(env
['TARGET_ISA']):
1273 print "Info: KVM support disabled due to unsupported host and " \
1274 "target ISA combination"
1275 env
['USE_KVM'] = False
1277 if env
['USE_TUNTAP']:
1279 print "Warning: Can't connect EtherTap with a tap device."
1280 env
['USE_TUNTAP'] = False
1282 if env
['BUILD_GPU']:
1283 env
.Append(CPPDEFINES
=['BUILD_GPU'])
1285 # Warn about missing optional functionality
1287 if not main
['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1288 print "Warning: perf_event headers lack support for the " \
1289 "exclude_host attribute. KVM instruction counts will " \
1292 # Save sticky variable settings back to current variables file
1293 sticky_vars
.Save(current_vars_file
, env
)
1296 env
.Append(CCFLAGS
=['-msse2'])
1298 # The src/SConscript file sets up the build rules in 'env' according
1299 # to the configured variables. It returns a list of environments,
1300 # one for each variant build (debug, opt, etc.)
1301 SConscript('src/SConscript', variant_dir
= variant_path
, exports
= 'env')
1305 Usage: scons [scons options] [build variables] [target(s)]
1307 Extra scons options:
1310 Global build variables: