sim: Don't add the NULL SimObject as a child of other SimObjects.
[gem5.git] / SConstruct
1 # -*- mode:python -*-
2
3 # Copyright (c) 2013, 2015, 2016 ARM Limited
4 # All rights reserved.
5 #
6 # The license below extends only to copyright in the software and shall
7 # not be construed as granting a license to any other intellectual
8 # property including but not limited to intellectual property relating
9 # to a hardware implementation of the functionality of the software
10 # licensed hereunder. You may use the software subject to the license
11 # terms below provided that you ensure that this notice is replicated
12 # unmodified and in its entirety in all distributions of the software,
13 # modified or unmodified, in source code or in binary form.
14 #
15 # Copyright (c) 2011 Advanced Micro Devices, Inc.
16 # Copyright (c) 2009 The Hewlett-Packard Development Company
17 # Copyright (c) 2004-2005 The Regents of The University of Michigan
18 # All rights reserved.
19 #
20 # Redistribution and use in source and binary forms, with or without
21 # modification, are permitted provided that the following conditions are
22 # met: redistributions of source code must retain the above copyright
23 # notice, this list of conditions and the following disclaimer;
24 # redistributions in binary form must reproduce the above copyright
25 # notice, this list of conditions and the following disclaimer in the
26 # documentation and/or other materials provided with the distribution;
27 # neither the name of the copyright holders nor the names of its
28 # contributors may be used to endorse or promote products derived from
29 # this software without specific prior written permission.
30 #
31 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42 #
43 # Authors: Steve Reinhardt
44 # Nathan Binkert
45
46 ###################################################
47 #
48 # SCons top-level build description (SConstruct) file.
49 #
50 # While in this directory ('gem5'), just type 'scons' to build the default
51 # configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52 # to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53 # the optimized full-system version).
54 #
55 # You can build gem5 in a different directory as long as there is a
56 # 'build/<CONFIG>' somewhere along the target path. The build system
57 # expects that all configs under the same build directory are being
58 # built for the same host system.
59 #
60 # Examples:
61 #
62 # The following two commands are equivalent. The '-u' option tells
63 # scons to search up the directory tree for this SConstruct file.
64 # % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65 # % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66 #
67 # The following two commands are equivalent and demonstrate building
68 # in a directory outside of the source tree. The '-C' option tells
69 # scons to chdir to the specified directory to find this SConstruct
70 # file.
71 # % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72 # % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73 #
74 # You can use 'scons -H' to print scons options. If you're in this
75 # 'gem5' directory (or use -u or -C to tell scons where to find this
76 # file), you can use 'scons -h' to print all the gem5-specific build
77 # options as well.
78 #
79 ###################################################
80
81 # Check for recent-enough Python and SCons versions.
82 try:
83 # Really old versions of scons only take two options for the
84 # function, so check once without the revision and once with the
85 # revision, the first instance will fail for stuff other than
86 # 0.98, and the second will fail for 0.98.0
87 EnsureSConsVersion(0, 98)
88 EnsureSConsVersion(0, 98, 1)
89 except SystemExit, e:
90 print """
91 For more details, see:
92 http://gem5.org/Dependencies
93 """
94 raise
95
96 # pybind11 requires python 2.7
97 try:
98 EnsurePythonVersion(2, 7)
99 except SystemExit, e:
100 print """
101 You can use a non-default installation of the Python interpreter by
102 rearranging your PATH so that scons finds the non-default 'python' and
103 'python-config' first.
104
105 For more details, see:
106 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
107 """
108 raise
109
110 # Global Python includes
111 import itertools
112 import os
113 import re
114 import shutil
115 import subprocess
116 import sys
117
118 from os import mkdir, environ
119 from os.path import abspath, basename, dirname, expanduser, normpath
120 from os.path import exists, isdir, isfile
121 from os.path import join as joinpath, split as splitpath
122
123 # SCons includes
124 import SCons
125 import SCons.Node
126
127 extra_python_paths = [
128 Dir('src/python').srcnode().abspath, # gem5 includes
129 Dir('ext/ply').srcnode().abspath, # ply is used by several files
130 ]
131
132 sys.path[1:1] = extra_python_paths
133
134 from m5.util import compareVersions, readCommand
135 from m5.util.terminal import get_termcap
136
137 help_texts = {
138 "options" : "",
139 "global_vars" : "",
140 "local_vars" : ""
141 }
142
143 Export("help_texts")
144
145
146 # There's a bug in scons in that (1) by default, the help texts from
147 # AddOption() are supposed to be displayed when you type 'scons -h'
148 # and (2) you can override the help displayed by 'scons -h' using the
149 # Help() function, but these two features are incompatible: once
150 # you've overridden the help text using Help(), there's no way to get
151 # at the help texts from AddOptions. See:
152 # http://scons.tigris.org/issues/show_bug.cgi?id=2356
153 # http://scons.tigris.org/issues/show_bug.cgi?id=2611
154 # This hack lets us extract the help text from AddOptions and
155 # re-inject it via Help(). Ideally someday this bug will be fixed and
156 # we can just use AddOption directly.
157 def AddLocalOption(*args, **kwargs):
158 col_width = 30
159
160 help = " " + ", ".join(args)
161 if "help" in kwargs:
162 length = len(help)
163 if length >= col_width:
164 help += "\n" + " " * col_width
165 else:
166 help += " " * (col_width - length)
167 help += kwargs["help"]
168 help_texts["options"] += help + "\n"
169
170 AddOption(*args, **kwargs)
171
172 AddLocalOption('--colors', dest='use_colors', action='store_true',
173 help="Add color to abbreviated scons output")
174 AddLocalOption('--no-colors', dest='use_colors', action='store_false',
175 help="Don't add color to abbreviated scons output")
176 AddLocalOption('--with-cxx-config', dest='with_cxx_config',
177 action='store_true',
178 help="Build with support for C++-based configuration")
179 AddLocalOption('--default', dest='default', type='string', action='store',
180 help='Override which build_opts file to use for defaults')
181 AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
182 help='Disable style checking hooks')
183 AddLocalOption('--no-lto', dest='no_lto', action='store_true',
184 help='Disable Link-Time Optimization for fast')
185 AddLocalOption('--force-lto', dest='force_lto', action='store_true',
186 help='Use Link-Time Optimization instead of partial linking' +
187 ' when the compiler doesn\'t support using them together.')
188 AddLocalOption('--update-ref', dest='update_ref', action='store_true',
189 help='Update test reference outputs')
190 AddLocalOption('--verbose', dest='verbose', action='store_true',
191 help='Print full tool command lines')
192 AddLocalOption('--without-python', dest='without_python',
193 action='store_true',
194 help='Build without Python configuration support')
195 AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
196 action='store_true',
197 help='Disable linking against tcmalloc')
198 AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
199 help='Build with Undefined Behavior Sanitizer if available')
200 AddLocalOption('--with-asan', dest='with_asan', action='store_true',
201 help='Build with Address Sanitizer if available')
202
203 if GetOption('no_lto') and GetOption('force_lto'):
204 print '--no-lto and --force-lto are mutually exclusive'
205 Exit(1)
206
207 termcap = get_termcap(GetOption('use_colors'))
208
209 ########################################################################
210 #
211 # Set up the main build environment.
212 #
213 ########################################################################
214
215 # export TERM so that clang reports errors in color
216 use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
217 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
218 'PYTHONPATH', 'RANLIB', 'TERM' ])
219
220 use_prefixes = [
221 "ASAN_", # address sanitizer symbolizer path and settings
222 "CCACHE_", # ccache (caching compiler wrapper) configuration
223 "CCC_", # clang static analyzer configuration
224 "DISTCC_", # distcc (distributed compiler wrapper) configuration
225 "INCLUDE_SERVER_", # distcc pump server settings
226 "M5", # M5 configuration (e.g., path to kernels)
227 ]
228
229 use_env = {}
230 for key,val in sorted(os.environ.iteritems()):
231 if key in use_vars or \
232 any([key.startswith(prefix) for prefix in use_prefixes]):
233 use_env[key] = val
234
235 # Tell scons to avoid implicit command dependencies to avoid issues
236 # with the param wrappes being compiled twice (see
237 # http://scons.tigris.org/issues/show_bug.cgi?id=2811)
238 main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
239 main.Decider('MD5-timestamp')
240 main.root = Dir(".") # The current directory (where this file lives).
241 main.srcdir = Dir("src") # The source directory
242
243 main_dict_keys = main.Dictionary().keys()
244
245 # Check that we have a C/C++ compiler
246 if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
247 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
248 Exit(1)
249
250 # add useful python code PYTHONPATH so it can be used by subprocesses
251 # as well
252 main.AppendENVPath('PYTHONPATH', extra_python_paths)
253
254 ########################################################################
255 #
256 # Mercurial Stuff.
257 #
258 # If the gem5 directory is a mercurial repository, we should do some
259 # extra things.
260 #
261 ########################################################################
262
263 hgdir = main.root.Dir(".hg")
264
265
266 style_message = """
267 You're missing the gem5 style hook, which automatically checks your code
268 against the gem5 style rules on %s.
269 This script will now install the hook in your %s.
270 Press enter to continue, or ctrl-c to abort: """
271
272 mercurial_style_message = """
273 You're missing the gem5 style hook, which automatically checks your code
274 against the gem5 style rules on hg commit and qrefresh commands.
275 This script will now install the hook in your .hg/hgrc file.
276 Press enter to continue, or ctrl-c to abort: """
277
278 git_style_message = """
279 You're missing the gem5 style or commit message hook. These hooks help
280 to ensure that your code follows gem5's style rules on git commit.
281 This script will now install the hook in your .git/hooks/ directory.
282 Press enter to continue, or ctrl-c to abort: """
283
284 mercurial_style_upgrade_message = """
285 Your Mercurial style hooks are not up-to-date. This script will now
286 try to automatically update them. A backup of your hgrc will be saved
287 in .hg/hgrc.old.
288 Press enter to continue, or ctrl-c to abort: """
289
290 mercurial_style_hook = """
291 # The following lines were automatically added by gem5/SConstruct
292 # to provide the gem5 style-checking hooks
293 [extensions]
294 hgstyle = %s/util/hgstyle.py
295
296 [hooks]
297 pretxncommit.style = python:hgstyle.check_style
298 pre-qrefresh.style = python:hgstyle.check_style
299 # End of SConstruct additions
300
301 """ % (main.root.abspath)
302
303 mercurial_lib_not_found = """
304 Mercurial libraries cannot be found, ignoring style hook. If
305 you are a gem5 developer, please fix this and run the style
306 hook. It is important.
307 """
308
309 # Check for style hook and prompt for installation if it's not there.
310 # Skip this if --ignore-style was specified, there's no interactive
311 # terminal to prompt, or no recognized revision control system can be
312 # found.
313 ignore_style = GetOption('ignore_style') or not sys.stdin.isatty()
314
315 # Try wire up Mercurial to the style hooks
316 if not ignore_style and hgdir.exists():
317 style_hook = True
318 style_hooks = tuple()
319 hgrc = hgdir.File('hgrc')
320 hgrc_old = hgdir.File('hgrc.old')
321 try:
322 from mercurial import ui
323 ui = ui.ui()
324 ui.readconfig(hgrc.abspath)
325 style_hooks = (ui.config('hooks', 'pretxncommit.style', None),
326 ui.config('hooks', 'pre-qrefresh.style', None))
327 style_hook = all(style_hooks)
328 style_extension = ui.config('extensions', 'style', None)
329 except ImportError:
330 print mercurial_lib_not_found
331
332 if "python:style.check_style" in style_hooks:
333 # Try to upgrade the style hooks
334 print mercurial_style_upgrade_message
335 # continue unless user does ctrl-c/ctrl-d etc.
336 try:
337 raw_input()
338 except:
339 print "Input exception, exiting scons.\n"
340 sys.exit(1)
341 shutil.copyfile(hgrc.abspath, hgrc_old.abspath)
342 re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*")
343 re_style_extension = re.compile("style\s*=\s*([^#\s]+).*")
344 old, new = open(hgrc_old.abspath, 'r'), open(hgrc.abspath, 'w')
345 for l in old:
346 m_hook = re_style_hook.match(l)
347 m_ext = re_style_extension.match(l)
348 if m_hook:
349 hook, check = m_hook.groups()
350 if check != "python:style.check_style":
351 print "Warning: %s.style is using a non-default " \
352 "checker: %s" % (hook, check)
353 if hook not in ("pretxncommit", "pre-qrefresh"):
354 print "Warning: Updating unknown style hook: %s" % hook
355
356 l = "%s.style = python:hgstyle.check_style\n" % hook
357 elif m_ext and m_ext.group(1) == style_extension:
358 l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath
359
360 new.write(l)
361 elif not style_hook:
362 print mercurial_style_message,
363 # continue unless user does ctrl-c/ctrl-d etc.
364 try:
365 raw_input()
366 except:
367 print "Input exception, exiting scons.\n"
368 sys.exit(1)
369 hgrc_path = '%s/.hg/hgrc' % main.root.abspath
370 print "Adding style hook to", hgrc_path, "\n"
371 try:
372 with open(hgrc_path, 'a') as f:
373 f.write(mercurial_style_hook)
374 except:
375 print "Error updating", hgrc_path
376 sys.exit(1)
377
378 def install_git_style_hooks():
379 try:
380 gitdir = Dir(readCommand(
381 ["git", "rev-parse", "--git-dir"]).strip("\n"))
382 except Exception, e:
383 print "Warning: Failed to find git repo directory: %s" % e
384 return
385
386 git_hooks = gitdir.Dir("hooks")
387 def hook_exists(hook_name):
388 hook = git_hooks.File(hook_name)
389 return hook.exists()
390
391 def hook_install(hook_name, script):
392 hook = git_hooks.File(hook_name)
393 if hook.exists():
394 print "Warning: Can't install %s, hook already exists." % hook_name
395 return
396
397 if hook.islink():
398 print "Warning: Removing broken symlink for hook %s." % hook_name
399 os.unlink(hook.get_abspath())
400
401 if not git_hooks.exists():
402 mkdir(git_hooks.get_abspath())
403 git_hooks.clear()
404
405 abs_symlink_hooks = git_hooks.islink() and \
406 os.path.isabs(os.readlink(git_hooks.get_abspath()))
407
408 # Use a relative symlink if the hooks live in the source directory,
409 # and the hooks directory is not a symlink to an absolute path.
410 if hook.is_under(main.root) and not abs_symlink_hooks:
411 script_path = os.path.relpath(
412 os.path.realpath(script.get_abspath()),
413 os.path.realpath(hook.Dir(".").get_abspath()))
414 else:
415 script_path = script.get_abspath()
416
417 try:
418 os.symlink(script_path, hook.get_abspath())
419 except:
420 print "Error updating git %s hook" % hook_name
421 raise
422
423 if hook_exists("pre-commit") and hook_exists("commit-msg"):
424 return
425
426 print git_style_message,
427 try:
428 raw_input()
429 except:
430 print "Input exception, exiting scons.\n"
431 sys.exit(1)
432
433 git_style_script = File("util/git-pre-commit.py")
434 git_msg_script = File("ext/git-commit-msg")
435
436 hook_install("pre-commit", git_style_script)
437 hook_install("commit-msg", git_msg_script)
438
439 # Try to wire up git to the style hooks
440 if not ignore_style and main.root.Entry(".git").exists():
441 install_git_style_hooks()
442
443 ###################################################
444 #
445 # Figure out which configurations to set up based on the path(s) of
446 # the target(s).
447 #
448 ###################################################
449
450 # Find default configuration & binary.
451 Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
452
453 # helper function: find last occurrence of element in list
454 def rfind(l, elt, offs = -1):
455 for i in range(len(l)+offs, 0, -1):
456 if l[i] == elt:
457 return i
458 raise ValueError, "element not found"
459
460 # Take a list of paths (or SCons Nodes) and return a list with all
461 # paths made absolute and ~-expanded. Paths will be interpreted
462 # relative to the launch directory unless a different root is provided
463 def makePathListAbsolute(path_list, root=GetLaunchDir()):
464 return [abspath(joinpath(root, expanduser(str(p))))
465 for p in path_list]
466
467 # Each target must have 'build' in the interior of the path; the
468 # directory below this will determine the build parameters. For
469 # example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
470 # recognize that ALPHA_SE specifies the configuration because it
471 # follow 'build' in the build path.
472
473 # The funky assignment to "[:]" is needed to replace the list contents
474 # in place rather than reassign the symbol to a new list, which
475 # doesn't work (obviously!).
476 BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
477
478 # Generate a list of the unique build roots and configs that the
479 # collected targets reference.
480 variant_paths = []
481 build_root = None
482 for t in BUILD_TARGETS:
483 path_dirs = t.split('/')
484 try:
485 build_top = rfind(path_dirs, 'build', -2)
486 except:
487 print "Error: no non-leaf 'build' dir found on target path", t
488 Exit(1)
489 this_build_root = joinpath('/',*path_dirs[:build_top+1])
490 if not build_root:
491 build_root = this_build_root
492 else:
493 if this_build_root != build_root:
494 print "Error: build targets not under same build root\n"\
495 " %s\n %s" % (build_root, this_build_root)
496 Exit(1)
497 variant_path = joinpath('/',*path_dirs[:build_top+2])
498 if variant_path not in variant_paths:
499 variant_paths.append(variant_path)
500
501 # Make sure build_root exists (might not if this is the first build there)
502 if not isdir(build_root):
503 mkdir(build_root)
504 main['BUILDROOT'] = build_root
505
506 Export('main')
507
508 main.SConsignFile(joinpath(build_root, "sconsign"))
509
510 # Default duplicate option is to use hard links, but this messes up
511 # when you use emacs to edit a file in the target dir, as emacs moves
512 # file to file~ then copies to file, breaking the link. Symbolic
513 # (soft) links work better.
514 main.SetOption('duplicate', 'soft-copy')
515
516 #
517 # Set up global sticky variables... these are common to an entire build
518 # tree (not specific to a particular build like ALPHA_SE)
519 #
520
521 global_vars_file = joinpath(build_root, 'variables.global')
522
523 global_vars = Variables(global_vars_file, args=ARGUMENTS)
524
525 global_vars.AddVariables(
526 ('CC', 'C compiler', environ.get('CC', main['CC'])),
527 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
528 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
529 ('BATCH', 'Use batch pool for build and tests', False),
530 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
531 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
532 ('EXTRAS', 'Add extra directories to the compilation', '')
533 )
534
535 # Update main environment with values from ARGUMENTS & global_vars_file
536 global_vars.Update(main)
537 help_texts["global_vars"] += global_vars.GenerateHelpText(main)
538
539 # Save sticky variable settings back to current variables file
540 global_vars.Save(global_vars_file, main)
541
542 # Parse EXTRAS variable to build list of all directories where we're
543 # look for sources etc. This list is exported as extras_dir_list.
544 base_dir = main.srcdir.abspath
545 if main['EXTRAS']:
546 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
547 else:
548 extras_dir_list = []
549
550 Export('base_dir')
551 Export('extras_dir_list')
552
553 # the ext directory should be on the #includes path
554 main.Append(CPPPATH=[Dir('ext')])
555
556 # Add shared top-level headers
557 main.Prepend(CPPPATH=Dir('include'))
558
559 def strip_build_path(path, env):
560 path = str(path)
561 variant_base = env['BUILDROOT'] + os.path.sep
562 if path.startswith(variant_base):
563 path = path[len(variant_base):]
564 elif path.startswith('build/'):
565 path = path[6:]
566 return path
567
568 # Generate a string of the form:
569 # common/path/prefix/src1, src2 -> tgt1, tgt2
570 # to print while building.
571 class Transform(object):
572 # all specific color settings should be here and nowhere else
573 tool_color = termcap.Normal
574 pfx_color = termcap.Yellow
575 srcs_color = termcap.Yellow + termcap.Bold
576 arrow_color = termcap.Blue + termcap.Bold
577 tgts_color = termcap.Yellow + termcap.Bold
578
579 def __init__(self, tool, max_sources=99):
580 self.format = self.tool_color + (" [%8s] " % tool) \
581 + self.pfx_color + "%s" \
582 + self.srcs_color + "%s" \
583 + self.arrow_color + " -> " \
584 + self.tgts_color + "%s" \
585 + termcap.Normal
586 self.max_sources = max_sources
587
588 def __call__(self, target, source, env, for_signature=None):
589 # truncate source list according to max_sources param
590 source = source[0:self.max_sources]
591 def strip(f):
592 return strip_build_path(str(f), env)
593 if len(source) > 0:
594 srcs = map(strip, source)
595 else:
596 srcs = ['']
597 tgts = map(strip, target)
598 # surprisingly, os.path.commonprefix is a dumb char-by-char string
599 # operation that has nothing to do with paths.
600 com_pfx = os.path.commonprefix(srcs + tgts)
601 com_pfx_len = len(com_pfx)
602 if com_pfx:
603 # do some cleanup and sanity checking on common prefix
604 if com_pfx[-1] == ".":
605 # prefix matches all but file extension: ok
606 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
607 com_pfx = com_pfx[0:-1]
608 elif com_pfx[-1] == "/":
609 # common prefix is directory path: OK
610 pass
611 else:
612 src0_len = len(srcs[0])
613 tgt0_len = len(tgts[0])
614 if src0_len == com_pfx_len:
615 # source is a substring of target, OK
616 pass
617 elif tgt0_len == com_pfx_len:
618 # target is a substring of source, need to back up to
619 # avoid empty string on RHS of arrow
620 sep_idx = com_pfx.rfind(".")
621 if sep_idx != -1:
622 com_pfx = com_pfx[0:sep_idx]
623 else:
624 com_pfx = ''
625 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
626 # still splitting at file extension: ok
627 pass
628 else:
629 # probably a fluke; ignore it
630 com_pfx = ''
631 # recalculate length in case com_pfx was modified
632 com_pfx_len = len(com_pfx)
633 def fmt(files):
634 f = map(lambda s: s[com_pfx_len:], files)
635 return ', '.join(f)
636 return self.format % (com_pfx, fmt(srcs), fmt(tgts))
637
638 Export('Transform')
639
640 # enable the regression script to use the termcap
641 main['TERMCAP'] = termcap
642
643 if GetOption('verbose'):
644 def MakeAction(action, string, *args, **kwargs):
645 return Action(action, *args, **kwargs)
646 else:
647 MakeAction = Action
648 main['CCCOMSTR'] = Transform("CC")
649 main['CXXCOMSTR'] = Transform("CXX")
650 main['ASCOMSTR'] = Transform("AS")
651 main['ARCOMSTR'] = Transform("AR", 0)
652 main['LINKCOMSTR'] = Transform("LINK", 0)
653 main['SHLINKCOMSTR'] = Transform("SHLINK", 0)
654 main['RANLIBCOMSTR'] = Transform("RANLIB", 0)
655 main['M4COMSTR'] = Transform("M4")
656 main['SHCCCOMSTR'] = Transform("SHCC")
657 main['SHCXXCOMSTR'] = Transform("SHCXX")
658 Export('MakeAction')
659
660 # Initialize the Link-Time Optimization (LTO) flags
661 main['LTO_CCFLAGS'] = []
662 main['LTO_LDFLAGS'] = []
663
664 # According to the readme, tcmalloc works best if the compiler doesn't
665 # assume that we're using the builtin malloc and friends. These flags
666 # are compiler-specific, so we need to set them after we detect which
667 # compiler we're using.
668 main['TCMALLOC_CCFLAGS'] = []
669
670 CXX_version = readCommand([main['CXX'],'--version'], exception=False)
671 CXX_V = readCommand([main['CXX'],'-V'], exception=False)
672
673 main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
674 main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
675 if main['GCC'] + main['CLANG'] > 1:
676 print 'Error: How can we have two at the same time?'
677 Exit(1)
678
679 # Set up default C++ compiler flags
680 if main['GCC'] or main['CLANG']:
681 # As gcc and clang share many flags, do the common parts here
682 main.Append(CCFLAGS=['-pipe'])
683 main.Append(CCFLAGS=['-fno-strict-aliasing'])
684 # Enable -Wall and -Wextra and then disable the few warnings that
685 # we consistently violate
686 main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
687 '-Wno-sign-compare', '-Wno-unused-parameter'])
688 # We always compile using C++11
689 main.Append(CXXFLAGS=['-std=c++11'])
690 if sys.platform.startswith('freebsd'):
691 main.Append(CCFLAGS=['-I/usr/local/include'])
692 main.Append(CXXFLAGS=['-I/usr/local/include'])
693
694 main['FILTER_PSHLINKFLAGS'] = lambda x: str(x).replace(' -shared', '')
695 main['PSHLINKFLAGS'] = main.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}')
696 main['PLINKFLAGS'] = main.subst('${LINKFLAGS}')
697 shared_partial_flags = ['-r', '-nostdlib']
698 main.Append(PSHLINKFLAGS=shared_partial_flags)
699 main.Append(PLINKFLAGS=shared_partial_flags)
700 else:
701 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
702 print "Don't know what compiler options to use for your compiler."
703 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
704 print termcap.Yellow + ' version:' + termcap.Normal,
705 if not CXX_version:
706 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
707 termcap.Normal
708 else:
709 print CXX_version.replace('\n', '<nl>')
710 print " If you're trying to use a compiler other than GCC"
711 print " or clang, there appears to be something wrong with your"
712 print " environment."
713 print " "
714 print " If you are trying to use a compiler other than those listed"
715 print " above you will need to ease fix SConstruct and "
716 print " src/SConscript to support that compiler."
717 Exit(1)
718
719 if main['GCC']:
720 # Check for a supported version of gcc. >= 4.8 is chosen for its
721 # level of c++11 support. See
722 # http://gcc.gnu.org/projects/cxx0x.html for details.
723 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
724 if compareVersions(gcc_version, "4.8") < 0:
725 print 'Error: gcc version 4.8 or newer required.'
726 print ' Installed version:', gcc_version
727 Exit(1)
728
729 main['GCC_VERSION'] = gcc_version
730
731 if compareVersions(gcc_version, '4.9') >= 0:
732 # Incremental linking with LTO is currently broken in gcc versions
733 # 4.9 and above. A version where everything works completely hasn't
734 # yet been identified.
735 #
736 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67548
737 main['BROKEN_INCREMENTAL_LTO'] = True
738 if compareVersions(gcc_version, '6.0') >= 0:
739 # gcc versions 6.0 and greater accept an -flinker-output flag which
740 # selects what type of output the linker should generate. This is
741 # necessary for incremental lto to work, but is also broken in
742 # current versions of gcc. It may not be necessary in future
743 # versions. We add it here since it might be, and as a reminder that
744 # it exists. It's excluded if lto is being forced.
745 #
746 # https://gcc.gnu.org/gcc-6/changes.html
747 # https://gcc.gnu.org/ml/gcc-patches/2015-11/msg03161.html
748 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=69866
749 if not GetOption('force_lto'):
750 main.Append(PSHLINKFLAGS='-flinker-output=rel')
751 main.Append(PLINKFLAGS='-flinker-output=rel')
752
753 # gcc from version 4.8 and above generates "rep; ret" instructions
754 # to avoid performance penalties on certain AMD chips. Older
755 # assemblers detect this as an error, "Error: expecting string
756 # instruction after `rep'"
757 as_version_raw = readCommand([main['AS'], '-v', '/dev/null',
758 '-o', '/dev/null'],
759 exception=False).split()
760
761 # version strings may contain extra distro-specific
762 # qualifiers, so play it safe and keep only what comes before
763 # the first hyphen
764 as_version = as_version_raw[-1].split('-')[0] if as_version_raw else None
765
766 if not as_version or compareVersions(as_version, "2.23") < 0:
767 print termcap.Yellow + termcap.Bold + \
768 'Warning: This combination of gcc and binutils have' + \
769 ' known incompatibilities.\n' + \
770 ' If you encounter build problems, please update ' + \
771 'binutils to 2.23.' + \
772 termcap.Normal
773
774 # Make sure we warn if the user has requested to compile with the
775 # Undefined Benahvior Sanitizer and this version of gcc does not
776 # support it.
777 if GetOption('with_ubsan') and \
778 compareVersions(gcc_version, '4.9') < 0:
779 print termcap.Yellow + termcap.Bold + \
780 'Warning: UBSan is only supported using gcc 4.9 and later.' + \
781 termcap.Normal
782
783 disable_lto = GetOption('no_lto')
784 if not disable_lto and main.get('BROKEN_INCREMENTAL_LTO', False) and \
785 not GetOption('force_lto'):
786 print termcap.Yellow + termcap.Bold + \
787 'Warning: Your compiler doesn\'t support incremental linking' + \
788 ' and lto at the same time, so lto is being disabled. To force' + \
789 ' lto on anyway, use the --force-lto option. That will disable' + \
790 ' partial linking.' + \
791 termcap.Normal
792 disable_lto = True
793
794 # Add the appropriate Link-Time Optimization (LTO) flags
795 # unless LTO is explicitly turned off. Note that these flags
796 # are only used by the fast target.
797 if not disable_lto:
798 # Pass the LTO flag when compiling to produce GIMPLE
799 # output, we merely create the flags here and only append
800 # them later
801 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
802
803 # Use the same amount of jobs for LTO as we are running
804 # scons with
805 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
806
807 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
808 '-fno-builtin-realloc', '-fno-builtin-free'])
809
810 # add option to check for undeclared overrides
811 if compareVersions(gcc_version, "5.0") > 0:
812 main.Append(CCFLAGS=['-Wno-error=suggest-override'])
813
814 elif main['CLANG']:
815 # Check for a supported version of clang, >= 3.1 is needed to
816 # support similar features as gcc 4.8. See
817 # http://clang.llvm.org/cxx_status.html for details
818 clang_version_re = re.compile(".* version (\d+\.\d+)")
819 clang_version_match = clang_version_re.search(CXX_version)
820 if (clang_version_match):
821 clang_version = clang_version_match.groups()[0]
822 if compareVersions(clang_version, "3.1") < 0:
823 print 'Error: clang version 3.1 or newer required.'
824 print ' Installed version:', clang_version
825 Exit(1)
826 else:
827 print 'Error: Unable to determine clang version.'
828 Exit(1)
829
830 # clang has a few additional warnings that we disable, extraneous
831 # parantheses are allowed due to Ruby's printing of the AST,
832 # finally self assignments are allowed as the generated CPU code
833 # is relying on this
834 main.Append(CCFLAGS=['-Wno-parentheses',
835 '-Wno-self-assign',
836 # Some versions of libstdc++ (4.8?) seem to
837 # use struct hash and class hash
838 # interchangeably.
839 '-Wno-mismatched-tags',
840 ])
841
842 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
843
844 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
845 # opposed to libstdc++, as the later is dated.
846 if sys.platform == "darwin":
847 main.Append(CXXFLAGS=['-stdlib=libc++'])
848 main.Append(LIBS=['c++'])
849
850 # On FreeBSD we need libthr.
851 if sys.platform.startswith('freebsd'):
852 main.Append(LIBS=['thr'])
853
854 else:
855 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
856 print "Don't know what compiler options to use for your compiler."
857 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
858 print termcap.Yellow + ' version:' + termcap.Normal,
859 if not CXX_version:
860 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
861 termcap.Normal
862 else:
863 print CXX_version.replace('\n', '<nl>')
864 print " If you're trying to use a compiler other than GCC"
865 print " or clang, there appears to be something wrong with your"
866 print " environment."
867 print " "
868 print " If you are trying to use a compiler other than those listed"
869 print " above you will need to ease fix SConstruct and "
870 print " src/SConscript to support that compiler."
871 Exit(1)
872
873 # Set up common yacc/bison flags (needed for Ruby)
874 main['YACCFLAGS'] = '-d'
875 main['YACCHXXFILESUFFIX'] = '.hh'
876
877 # Do this after we save setting back, or else we'll tack on an
878 # extra 'qdo' every time we run scons.
879 if main['BATCH']:
880 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC']
881 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX']
882 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS']
883 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR']
884 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
885
886 if sys.platform == 'cygwin':
887 # cygwin has some header file issues...
888 main.Append(CCFLAGS=["-Wno-uninitialized"])
889
890 # Check for the protobuf compiler
891 protoc_version = readCommand([main['PROTOC'], '--version'],
892 exception='').split()
893
894 # First two words should be "libprotoc x.y.z"
895 if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
896 print termcap.Yellow + termcap.Bold + \
897 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
898 ' Please install protobuf-compiler for tracing support.' + \
899 termcap.Normal
900 main['PROTOC'] = False
901 else:
902 # Based on the availability of the compress stream wrappers,
903 # require 2.1.0
904 min_protoc_version = '2.1.0'
905 if compareVersions(protoc_version[1], min_protoc_version) < 0:
906 print termcap.Yellow + termcap.Bold + \
907 'Warning: protoc version', min_protoc_version, \
908 'or newer required.\n' + \
909 ' Installed version:', protoc_version[1], \
910 termcap.Normal
911 main['PROTOC'] = False
912 else:
913 # Attempt to determine the appropriate include path and
914 # library path using pkg-config, that means we also need to
915 # check for pkg-config. Note that it is possible to use
916 # protobuf without the involvement of pkg-config. Later on we
917 # check go a library config check and at that point the test
918 # will fail if libprotobuf cannot be found.
919 if readCommand(['pkg-config', '--version'], exception=''):
920 try:
921 # Attempt to establish what linking flags to add for protobuf
922 # using pkg-config
923 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
924 except:
925 print termcap.Yellow + termcap.Bold + \
926 'Warning: pkg-config could not get protobuf flags.' + \
927 termcap.Normal
928
929
930 # Check for 'timeout' from GNU coreutils. If present, regressions will
931 # be run with a time limit. We require version 8.13 since we rely on
932 # support for the '--foreground' option.
933 if sys.platform.startswith('freebsd'):
934 timeout_lines = readCommand(['gtimeout', '--version'],
935 exception='').splitlines()
936 else:
937 timeout_lines = readCommand(['timeout', '--version'],
938 exception='').splitlines()
939 # Get the first line and tokenize it
940 timeout_version = timeout_lines[0].split() if timeout_lines else []
941 main['TIMEOUT'] = timeout_version and \
942 compareVersions(timeout_version[-1], '8.13') >= 0
943
944 # Add a custom Check function to test for structure members.
945 def CheckMember(context, include, decl, member, include_quotes="<>"):
946 context.Message("Checking for member %s in %s..." %
947 (member, decl))
948 text = """
949 #include %(header)s
950 int main(){
951 %(decl)s test;
952 (void)test.%(member)s;
953 return 0;
954 };
955 """ % { "header" : include_quotes[0] + include + include_quotes[1],
956 "decl" : decl,
957 "member" : member,
958 }
959
960 ret = context.TryCompile(text, extension=".cc")
961 context.Result(ret)
962 return ret
963
964 # Platform-specific configuration. Note again that we assume that all
965 # builds under a given build root run on the same host platform.
966 conf = Configure(main,
967 conf_dir = joinpath(build_root, '.scons_config'),
968 log_file = joinpath(build_root, 'scons_config.log'),
969 custom_tests = {
970 'CheckMember' : CheckMember,
971 })
972
973 # Check if we should compile a 64 bit binary on Mac OS X/Darwin
974 try:
975 import platform
976 uname = platform.uname()
977 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
978 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
979 main.Append(CCFLAGS=['-arch', 'x86_64'])
980 main.Append(CFLAGS=['-arch', 'x86_64'])
981 main.Append(LINKFLAGS=['-arch', 'x86_64'])
982 main.Append(ASFLAGS=['-arch', 'x86_64'])
983 except:
984 pass
985
986 # Recent versions of scons substitute a "Null" object for Configure()
987 # when configuration isn't necessary, e.g., if the "--help" option is
988 # present. Unfortuantely this Null object always returns false,
989 # breaking all our configuration checks. We replace it with our own
990 # more optimistic null object that returns True instead.
991 if not conf:
992 def NullCheck(*args, **kwargs):
993 return True
994
995 class NullConf:
996 def __init__(self, env):
997 self.env = env
998 def Finish(self):
999 return self.env
1000 def __getattr__(self, mname):
1001 return NullCheck
1002
1003 conf = NullConf(main)
1004
1005 # Cache build files in the supplied directory.
1006 if main['M5_BUILD_CACHE']:
1007 print 'Using build cache located at', main['M5_BUILD_CACHE']
1008 CacheDir(main['M5_BUILD_CACHE'])
1009
1010 main['USE_PYTHON'] = not GetOption('without_python')
1011 if main['USE_PYTHON']:
1012 # Find Python include and library directories for embedding the
1013 # interpreter. We rely on python-config to resolve the appropriate
1014 # includes and linker flags. ParseConfig does not seem to understand
1015 # the more exotic linker flags such as -Xlinker and -export-dynamic so
1016 # we add them explicitly below. If you want to link in an alternate
1017 # version of python, see above for instructions on how to invoke
1018 # scons with the appropriate PATH set.
1019 #
1020 # First we check if python2-config exists, else we use python-config
1021 python_config = readCommand(['which', 'python2-config'],
1022 exception='').strip()
1023 if not os.path.exists(python_config):
1024 python_config = readCommand(['which', 'python-config'],
1025 exception='').strip()
1026 py_includes = readCommand([python_config, '--includes'],
1027 exception='').split()
1028 # Strip the -I from the include folders before adding them to the
1029 # CPPPATH
1030 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
1031
1032 # Read the linker flags and split them into libraries and other link
1033 # flags. The libraries are added later through the call the CheckLib.
1034 py_ld_flags = readCommand([python_config, '--ldflags'],
1035 exception='').split()
1036 py_libs = []
1037 for lib in py_ld_flags:
1038 if not lib.startswith('-l'):
1039 main.Append(LINKFLAGS=[lib])
1040 else:
1041 lib = lib[2:]
1042 if lib not in py_libs:
1043 py_libs.append(lib)
1044
1045 # verify that this stuff works
1046 if not conf.CheckHeader('Python.h', '<>'):
1047 print "Error: can't find Python.h header in", py_includes
1048 print "Install Python headers (package python-dev on Ubuntu and RedHat)"
1049 Exit(1)
1050
1051 for lib in py_libs:
1052 if not conf.CheckLib(lib):
1053 print "Error: can't find library %s required by python" % lib
1054 Exit(1)
1055
1056 # On Solaris you need to use libsocket for socket ops
1057 if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1058 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1059 print "Can't find library with socket calls (e.g. accept())"
1060 Exit(1)
1061
1062 # Check for zlib. If the check passes, libz will be automatically
1063 # added to the LIBS environment variable.
1064 if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
1065 print 'Error: did not find needed zlib compression library '\
1066 'and/or zlib.h header file.'
1067 print ' Please install zlib and try again.'
1068 Exit(1)
1069
1070 # If we have the protobuf compiler, also make sure we have the
1071 # development libraries. If the check passes, libprotobuf will be
1072 # automatically added to the LIBS environment variable. After
1073 # this, we can use the HAVE_PROTOBUF flag to determine if we have
1074 # got both protoc and libprotobuf available.
1075 main['HAVE_PROTOBUF'] = main['PROTOC'] and \
1076 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
1077 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
1078
1079 # If we have the compiler but not the library, print another warning.
1080 if main['PROTOC'] and not main['HAVE_PROTOBUF']:
1081 print termcap.Yellow + termcap.Bold + \
1082 'Warning: did not find protocol buffer library and/or headers.\n' + \
1083 ' Please install libprotobuf-dev for tracing support.' + \
1084 termcap.Normal
1085
1086 # Check for librt.
1087 have_posix_clock = \
1088 conf.CheckLibWithHeader(None, 'time.h', 'C',
1089 'clock_nanosleep(0,0,NULL,NULL);') or \
1090 conf.CheckLibWithHeader('rt', 'time.h', 'C',
1091 'clock_nanosleep(0,0,NULL,NULL);')
1092
1093 have_posix_timers = \
1094 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
1095 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1096
1097 if not GetOption('without_tcmalloc'):
1098 if conf.CheckLib('tcmalloc'):
1099 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1100 elif conf.CheckLib('tcmalloc_minimal'):
1101 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1102 else:
1103 print termcap.Yellow + termcap.Bold + \
1104 "You can get a 12% performance improvement by "\
1105 "installing tcmalloc (libgoogle-perftools-dev package "\
1106 "on Ubuntu or RedHat)." + termcap.Normal
1107
1108
1109 # Detect back trace implementations. The last implementation in the
1110 # list will be used by default.
1111 backtrace_impls = [ "none" ]
1112
1113 if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
1114 'backtrace_symbols_fd((void*)0, 0, 0);'):
1115 backtrace_impls.append("glibc")
1116 elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
1117 'backtrace_symbols_fd((void*)0, 0, 0);'):
1118 # NetBSD and FreeBSD need libexecinfo.
1119 backtrace_impls.append("glibc")
1120 main.Append(LIBS=['execinfo'])
1121
1122 if backtrace_impls[-1] == "none":
1123 default_backtrace_impl = "none"
1124 print termcap.Yellow + termcap.Bold + \
1125 "No suitable back trace implementation found." + \
1126 termcap.Normal
1127
1128 if not have_posix_clock:
1129 print "Can't find library for POSIX clocks."
1130
1131 # Check for <fenv.h> (C99 FP environment control)
1132 have_fenv = conf.CheckHeader('fenv.h', '<>')
1133 if not have_fenv:
1134 print "Warning: Header file <fenv.h> not found."
1135 print " This host has no IEEE FP rounding mode control."
1136
1137 # Check if we should enable KVM-based hardware virtualization. The API
1138 # we rely on exists since version 2.6.36 of the kernel, but somehow
1139 # the KVM_API_VERSION does not reflect the change. We test for one of
1140 # the types as a fall back.
1141 have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1142 if not have_kvm:
1143 print "Info: Compatible header file <linux/kvm.h> not found, " \
1144 "disabling KVM support."
1145
1146 # Check if the TUN/TAP driver is available.
1147 have_tuntap = conf.CheckHeader('linux/if_tun.h', '<>')
1148 if not have_tuntap:
1149 print "Info: Compatible header file <linux/if_tun.h> not found."
1150
1151 # x86 needs support for xsave. We test for the structure here since we
1152 # won't be able to run new tests by the time we know which ISA we're
1153 # targeting.
1154 have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1155 '#include <linux/kvm.h>') != 0
1156
1157 # Check if the requested target ISA is compatible with the host
1158 def is_isa_kvm_compatible(isa):
1159 try:
1160 import platform
1161 host_isa = platform.machine()
1162 except:
1163 print "Warning: Failed to determine host ISA."
1164 return False
1165
1166 if not have_posix_timers:
1167 print "Warning: Can not enable KVM, host seems to lack support " \
1168 "for POSIX timers"
1169 return False
1170
1171 if isa == "arm":
1172 return host_isa in ( "armv7l", "aarch64" )
1173 elif isa == "x86":
1174 if host_isa != "x86_64":
1175 return False
1176
1177 if not have_kvm_xsave:
1178 print "KVM on x86 requires xsave support in kernel headers."
1179 return False
1180
1181 return True
1182 else:
1183 return False
1184
1185
1186 # Check if the exclude_host attribute is available. We want this to
1187 # get accurate instruction counts in KVM.
1188 main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1189 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1190
1191
1192 ######################################################################
1193 #
1194 # Finish the configuration
1195 #
1196 main = conf.Finish()
1197
1198 ######################################################################
1199 #
1200 # Collect all non-global variables
1201 #
1202
1203 # Define the universe of supported ISAs
1204 all_isa_list = [ ]
1205 all_gpu_isa_list = [ ]
1206 Export('all_isa_list')
1207 Export('all_gpu_isa_list')
1208
1209 class CpuModel(object):
1210 '''The CpuModel class encapsulates everything the ISA parser needs to
1211 know about a particular CPU model.'''
1212
1213 # Dict of available CPU model objects. Accessible as CpuModel.dict.
1214 dict = {}
1215
1216 # Constructor. Automatically adds models to CpuModel.dict.
1217 def __init__(self, name, default=False):
1218 self.name = name # name of model
1219
1220 # This cpu is enabled by default
1221 self.default = default
1222
1223 # Add self to dict
1224 if name in CpuModel.dict:
1225 raise AttributeError, "CpuModel '%s' already registered" % name
1226 CpuModel.dict[name] = self
1227
1228 Export('CpuModel')
1229
1230 # Sticky variables get saved in the variables file so they persist from
1231 # one invocation to the next (unless overridden, in which case the new
1232 # value becomes sticky).
1233 sticky_vars = Variables(args=ARGUMENTS)
1234 Export('sticky_vars')
1235
1236 # Sticky variables that should be exported
1237 export_vars = []
1238 Export('export_vars')
1239
1240 # For Ruby
1241 all_protocols = []
1242 Export('all_protocols')
1243 protocol_dirs = []
1244 Export('protocol_dirs')
1245 slicc_includes = []
1246 Export('slicc_includes')
1247
1248 # Walk the tree and execute all SConsopts scripts that wil add to the
1249 # above variables
1250 if GetOption('verbose'):
1251 print "Reading SConsopts"
1252 for bdir in [ base_dir ] + extras_dir_list:
1253 if not isdir(bdir):
1254 print "Error: directory '%s' does not exist" % bdir
1255 Exit(1)
1256 for root, dirs, files in os.walk(bdir):
1257 if 'SConsopts' in files:
1258 if GetOption('verbose'):
1259 print "Reading", joinpath(root, 'SConsopts')
1260 SConscript(joinpath(root, 'SConsopts'))
1261
1262 all_isa_list.sort()
1263 all_gpu_isa_list.sort()
1264
1265 sticky_vars.AddVariables(
1266 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1267 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1268 ListVariable('CPU_MODELS', 'CPU models',
1269 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1270 sorted(CpuModel.dict.keys())),
1271 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1272 False),
1273 BoolVariable('SS_COMPATIBLE_FP',
1274 'Make floating-point results compatible with SimpleScalar',
1275 False),
1276 BoolVariable('USE_SSE2',
1277 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1278 False),
1279 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1280 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1281 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1282 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1283 BoolVariable('USE_TUNTAP',
1284 'Enable using a tap device to bridge to the host network',
1285 have_tuntap),
1286 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1287 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1288 all_protocols),
1289 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1290 backtrace_impls[-1], backtrace_impls)
1291 )
1292
1293 # These variables get exported to #defines in config/*.hh (see src/SConscript).
1294 export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1295 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'USE_TUNTAP',
1296 'PROTOCOL', 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1297
1298 ###################################################
1299 #
1300 # Define a SCons builder for configuration flag headers.
1301 #
1302 ###################################################
1303
1304 # This function generates a config header file that #defines the
1305 # variable symbol to the current variable setting (0 or 1). The source
1306 # operands are the name of the variable and a Value node containing the
1307 # value of the variable.
1308 def build_config_file(target, source, env):
1309 (variable, value) = [s.get_contents() for s in source]
1310 f = file(str(target[0]), 'w')
1311 print >> f, '#define', variable, value
1312 f.close()
1313 return None
1314
1315 # Combine the two functions into a scons Action object.
1316 config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1317
1318 # The emitter munges the source & target node lists to reflect what
1319 # we're really doing.
1320 def config_emitter(target, source, env):
1321 # extract variable name from Builder arg
1322 variable = str(target[0])
1323 # True target is config header file
1324 target = joinpath('config', variable.lower() + '.hh')
1325 val = env[variable]
1326 if isinstance(val, bool):
1327 # Force value to 0/1
1328 val = int(val)
1329 elif isinstance(val, str):
1330 val = '"' + val + '"'
1331
1332 # Sources are variable name & value (packaged in SCons Value nodes)
1333 return ([target], [Value(variable), Value(val)])
1334
1335 config_builder = Builder(emitter = config_emitter, action = config_action)
1336
1337 main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1338
1339 ###################################################
1340 #
1341 # Builders for static and shared partially linked object files.
1342 #
1343 ###################################################
1344
1345 partial_static_builder = Builder(action=SCons.Defaults.LinkAction,
1346 src_suffix='$OBJSUFFIX',
1347 src_builder=['StaticObject', 'Object'],
1348 LINKFLAGS='$PLINKFLAGS',
1349 LIBS='')
1350
1351 def partial_shared_emitter(target, source, env):
1352 for tgt in target:
1353 tgt.attributes.shared = 1
1354 return (target, source)
1355 partial_shared_builder = Builder(action=SCons.Defaults.ShLinkAction,
1356 emitter=partial_shared_emitter,
1357 src_suffix='$SHOBJSUFFIX',
1358 src_builder='SharedObject',
1359 SHLINKFLAGS='$PSHLINKFLAGS',
1360 LIBS='')
1361
1362 main.Append(BUILDERS = { 'PartialShared' : partial_shared_builder,
1363 'PartialStatic' : partial_static_builder })
1364
1365 # builds in ext are shared across all configs in the build root.
1366 ext_dir = abspath(joinpath(str(main.root), 'ext'))
1367 ext_build_dirs = []
1368 for root, dirs, files in os.walk(ext_dir):
1369 if 'SConscript' in files:
1370 build_dir = os.path.relpath(root, ext_dir)
1371 ext_build_dirs.append(build_dir)
1372 main.SConscript(joinpath(root, 'SConscript'),
1373 variant_dir=joinpath(build_root, build_dir))
1374
1375 main.Prepend(CPPPATH=Dir('ext/pybind11/include/'))
1376
1377 ###################################################
1378 #
1379 # This builder and wrapper method are used to set up a directory with
1380 # switching headers. Those are headers which are in a generic location and
1381 # that include more specific headers from a directory chosen at build time
1382 # based on the current build settings.
1383 #
1384 ###################################################
1385
1386 def build_switching_header(target, source, env):
1387 path = str(target[0])
1388 subdir = str(source[0])
1389 dp, fp = os.path.split(path)
1390 dp = os.path.relpath(os.path.realpath(dp),
1391 os.path.realpath(env['BUILDDIR']))
1392 with open(path, 'w') as hdr:
1393 print >>hdr, '#include "%s/%s/%s"' % (dp, subdir, fp)
1394
1395 switching_header_action = MakeAction(build_switching_header,
1396 Transform('GENERATE'))
1397
1398 switching_header_builder = Builder(action=switching_header_action,
1399 source_factory=Value,
1400 single_source=True)
1401
1402 main.Append(BUILDERS = { 'SwitchingHeader': switching_header_builder })
1403
1404 def switching_headers(self, headers, source):
1405 for header in headers:
1406 self.SwitchingHeader(header, source)
1407
1408 main.AddMethod(switching_headers, 'SwitchingHeaders')
1409
1410 # all-isas -> all-deps -> all-environs -> all_targets
1411 main.Alias('#all-isas', [])
1412 main.Alias('#all-deps', '#all-isas')
1413
1414 # Dummy target to ensure all environments are created before telling
1415 # SCons what to actually make (the command line arguments). We attach
1416 # them to the dependence graph after the environments are complete.
1417 ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1418 def environsComplete(target, source, env):
1419 for t in ORIG_BUILD_TARGETS:
1420 main.Depends('#all-targets', t)
1421
1422 # Each build/* switching_dir attaches its *-environs target to #all-environs.
1423 main.Append(BUILDERS = {'CompleteEnvirons' :
1424 Builder(action=MakeAction(environsComplete, None))})
1425 main.CompleteEnvirons('#all-environs', [])
1426
1427 def doNothing(**ignored): pass
1428 main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1429
1430 # The final target to which all the original targets ultimately get attached.
1431 main.Dummy('#all-targets', '#all-environs')
1432 BUILD_TARGETS[:] = ['#all-targets']
1433
1434 ###################################################
1435 #
1436 # Define build environments for selected configurations.
1437 #
1438 ###################################################
1439
1440 def variant_name(path):
1441 return os.path.basename(path).lower().replace('_', '-')
1442 main['variant_name'] = variant_name
1443 main['VARIANT_NAME'] = '${variant_name(BUILDDIR)}'
1444
1445 for variant_path in variant_paths:
1446 if not GetOption('silent'):
1447 print "Building in", variant_path
1448
1449 # Make a copy of the build-root environment to use for this config.
1450 env = main.Clone()
1451 env['BUILDDIR'] = variant_path
1452
1453 # variant_dir is the tail component of build path, and is used to
1454 # determine the build parameters (e.g., 'ALPHA_SE')
1455 (build_root, variant_dir) = splitpath(variant_path)
1456
1457 # Set env variables according to the build directory config.
1458 sticky_vars.files = []
1459 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1460 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1461 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1462 current_vars_file = joinpath(build_root, 'variables', variant_dir)
1463 if isfile(current_vars_file):
1464 sticky_vars.files.append(current_vars_file)
1465 if not GetOption('silent'):
1466 print "Using saved variables file %s" % current_vars_file
1467 elif variant_dir in ext_build_dirs:
1468 # Things in ext are built without a variant directory.
1469 continue
1470 else:
1471 # Build dir-specific variables file doesn't exist.
1472
1473 # Make sure the directory is there so we can create it later
1474 opt_dir = dirname(current_vars_file)
1475 if not isdir(opt_dir):
1476 mkdir(opt_dir)
1477
1478 # Get default build variables from source tree. Variables are
1479 # normally determined by name of $VARIANT_DIR, but can be
1480 # overridden by '--default=' arg on command line.
1481 default = GetOption('default')
1482 opts_dir = joinpath(main.root.abspath, 'build_opts')
1483 if default:
1484 default_vars_files = [joinpath(build_root, 'variables', default),
1485 joinpath(opts_dir, default)]
1486 else:
1487 default_vars_files = [joinpath(opts_dir, variant_dir)]
1488 existing_files = filter(isfile, default_vars_files)
1489 if existing_files:
1490 default_vars_file = existing_files[0]
1491 sticky_vars.files.append(default_vars_file)
1492 print "Variables file %s not found,\n using defaults in %s" \
1493 % (current_vars_file, default_vars_file)
1494 else:
1495 print "Error: cannot find variables file %s or " \
1496 "default file(s) %s" \
1497 % (current_vars_file, ' or '.join(default_vars_files))
1498 Exit(1)
1499
1500 # Apply current variable settings to env
1501 sticky_vars.Update(env)
1502
1503 help_texts["local_vars"] += \
1504 "Build variables for %s:\n" % variant_dir \
1505 + sticky_vars.GenerateHelpText(env)
1506
1507 # Process variable settings.
1508
1509 if not have_fenv and env['USE_FENV']:
1510 print "Warning: <fenv.h> not available; " \
1511 "forcing USE_FENV to False in", variant_dir + "."
1512 env['USE_FENV'] = False
1513
1514 if not env['USE_FENV']:
1515 print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1516 print " FP results may deviate slightly from other platforms."
1517
1518 if env['EFENCE']:
1519 env.Append(LIBS=['efence'])
1520
1521 if env['USE_KVM']:
1522 if not have_kvm:
1523 print "Warning: Can not enable KVM, host seems to lack KVM support"
1524 env['USE_KVM'] = False
1525 elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1526 print "Info: KVM support disabled due to unsupported host and " \
1527 "target ISA combination"
1528 env['USE_KVM'] = False
1529
1530 if env['USE_TUNTAP']:
1531 if not have_tuntap:
1532 print "Warning: Can't connect EtherTap with a tap device."
1533 env['USE_TUNTAP'] = False
1534
1535 if env['BUILD_GPU']:
1536 env.Append(CPPDEFINES=['BUILD_GPU'])
1537
1538 # Warn about missing optional functionality
1539 if env['USE_KVM']:
1540 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1541 print "Warning: perf_event headers lack support for the " \
1542 "exclude_host attribute. KVM instruction counts will " \
1543 "be inaccurate."
1544
1545 # Save sticky variable settings back to current variables file
1546 sticky_vars.Save(current_vars_file, env)
1547
1548 if env['USE_SSE2']:
1549 env.Append(CCFLAGS=['-msse2'])
1550
1551 # The src/SConscript file sets up the build rules in 'env' according
1552 # to the configured variables. It returns a list of environments,
1553 # one for each variant build (debug, opt, etc.)
1554 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1555
1556 def pairwise(iterable):
1557 "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1558 a, b = itertools.tee(iterable)
1559 b.next()
1560 return itertools.izip(a, b)
1561
1562 variant_names = [variant_name(path) for path in variant_paths]
1563
1564 # Create false dependencies so SCons will parse ISAs, establish
1565 # dependencies, and setup the build Environments serially. Either
1566 # SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1567 # greater than 1. It appears to be standard race condition stuff; it
1568 # doesn't always fail, but usually, and the behaviors are different.
1569 # Every time I tried to remove this, builds would fail in some
1570 # creative new way. So, don't do that. You'll want to, though, because
1571 # tests/SConscript takes a long time to make its Environments.
1572 for t1, t2 in pairwise(sorted(variant_names)):
1573 main.Depends('#%s-deps' % t2, '#%s-deps' % t1)
1574 main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1575
1576 # base help text
1577 Help('''
1578 Usage: scons [scons options] [build variables] [target(s)]
1579
1580 Extra scons options:
1581 %(options)s
1582
1583 Global build variables:
1584 %(global_vars)s
1585
1586 %(local_vars)s
1587 ''' % help_texts)