gpu-compute: AMD's baseline GPU model
[gem5.git] / SConstruct
1 # -*- mode:python -*-
2
3 # Copyright (c) 2013, 2015 ARM Limited
4 # All rights reserved.
5 #
6 # The license below extends only to copyright in the software and shall
7 # not be construed as granting a license to any other intellectual
8 # property including but not limited to intellectual property relating
9 # to a hardware implementation of the functionality of the software
10 # licensed hereunder. You may use the software subject to the license
11 # terms below provided that you ensure that this notice is replicated
12 # unmodified and in its entirety in all distributions of the software,
13 # modified or unmodified, in source code or in binary form.
14 #
15 # Copyright (c) 2011 Advanced Micro Devices, Inc.
16 # Copyright (c) 2009 The Hewlett-Packard Development Company
17 # Copyright (c) 2004-2005 The Regents of The University of Michigan
18 # All rights reserved.
19 #
20 # Redistribution and use in source and binary forms, with or without
21 # modification, are permitted provided that the following conditions are
22 # met: redistributions of source code must retain the above copyright
23 # notice, this list of conditions and the following disclaimer;
24 # redistributions in binary form must reproduce the above copyright
25 # notice, this list of conditions and the following disclaimer in the
26 # documentation and/or other materials provided with the distribution;
27 # neither the name of the copyright holders nor the names of its
28 # contributors may be used to endorse or promote products derived from
29 # this software without specific prior written permission.
30 #
31 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42 #
43 # Authors: Steve Reinhardt
44 # Nathan Binkert
45
46 ###################################################
47 #
48 # SCons top-level build description (SConstruct) file.
49 #
50 # While in this directory ('gem5'), just type 'scons' to build the default
51 # configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52 # to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53 # the optimized full-system version).
54 #
55 # You can build gem5 in a different directory as long as there is a
56 # 'build/<CONFIG>' somewhere along the target path. The build system
57 # expects that all configs under the same build directory are being
58 # built for the same host system.
59 #
60 # Examples:
61 #
62 # The following two commands are equivalent. The '-u' option tells
63 # scons to search up the directory tree for this SConstruct file.
64 # % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65 # % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66 #
67 # The following two commands are equivalent and demonstrate building
68 # in a directory outside of the source tree. The '-C' option tells
69 # scons to chdir to the specified directory to find this SConstruct
70 # file.
71 # % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72 # % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73 #
74 # You can use 'scons -H' to print scons options. If you're in this
75 # 'gem5' directory (or use -u or -C to tell scons where to find this
76 # file), you can use 'scons -h' to print all the gem5-specific build
77 # options as well.
78 #
79 ###################################################
80
81 # Check for recent-enough Python and SCons versions.
82 try:
83 # Really old versions of scons only take two options for the
84 # function, so check once without the revision and once with the
85 # revision, the first instance will fail for stuff other than
86 # 0.98, and the second will fail for 0.98.0
87 EnsureSConsVersion(0, 98)
88 EnsureSConsVersion(0, 98, 1)
89 except SystemExit, e:
90 print """
91 For more details, see:
92 http://gem5.org/Dependencies
93 """
94 raise
95
96 # We ensure the python version early because because python-config
97 # requires python 2.5
98 try:
99 EnsurePythonVersion(2, 5)
100 except SystemExit, e:
101 print """
102 You can use a non-default installation of the Python interpreter by
103 rearranging your PATH so that scons finds the non-default 'python' and
104 'python-config' first.
105
106 For more details, see:
107 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108 """
109 raise
110
111 # Global Python includes
112 import itertools
113 import os
114 import re
115 import subprocess
116 import sys
117
118 from os import mkdir, environ
119 from os.path import abspath, basename, dirname, expanduser, normpath
120 from os.path import exists, isdir, isfile
121 from os.path import join as joinpath, split as splitpath
122
123 # SCons includes
124 import SCons
125 import SCons.Node
126
127 extra_python_paths = [
128 Dir('src/python').srcnode().abspath, # gem5 includes
129 Dir('ext/ply').srcnode().abspath, # ply is used by several files
130 ]
131
132 sys.path[1:1] = extra_python_paths
133
134 from m5.util import compareVersions, readCommand
135 from m5.util.terminal import get_termcap
136
137 help_texts = {
138 "options" : "",
139 "global_vars" : "",
140 "local_vars" : ""
141 }
142
143 Export("help_texts")
144
145
146 # There's a bug in scons in that (1) by default, the help texts from
147 # AddOption() are supposed to be displayed when you type 'scons -h'
148 # and (2) you can override the help displayed by 'scons -h' using the
149 # Help() function, but these two features are incompatible: once
150 # you've overridden the help text using Help(), there's no way to get
151 # at the help texts from AddOptions. See:
152 # http://scons.tigris.org/issues/show_bug.cgi?id=2356
153 # http://scons.tigris.org/issues/show_bug.cgi?id=2611
154 # This hack lets us extract the help text from AddOptions and
155 # re-inject it via Help(). Ideally someday this bug will be fixed and
156 # we can just use AddOption directly.
157 def AddLocalOption(*args, **kwargs):
158 col_width = 30
159
160 help = " " + ", ".join(args)
161 if "help" in kwargs:
162 length = len(help)
163 if length >= col_width:
164 help += "\n" + " " * col_width
165 else:
166 help += " " * (col_width - length)
167 help += kwargs["help"]
168 help_texts["options"] += help + "\n"
169
170 AddOption(*args, **kwargs)
171
172 AddLocalOption('--colors', dest='use_colors', action='store_true',
173 help="Add color to abbreviated scons output")
174 AddLocalOption('--no-colors', dest='use_colors', action='store_false',
175 help="Don't add color to abbreviated scons output")
176 AddLocalOption('--with-cxx-config', dest='with_cxx_config',
177 action='store_true',
178 help="Build with support for C++-based configuration")
179 AddLocalOption('--default', dest='default', type='string', action='store',
180 help='Override which build_opts file to use for defaults')
181 AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
182 help='Disable style checking hooks')
183 AddLocalOption('--no-lto', dest='no_lto', action='store_true',
184 help='Disable Link-Time Optimization for fast')
185 AddLocalOption('--update-ref', dest='update_ref', action='store_true',
186 help='Update test reference outputs')
187 AddLocalOption('--verbose', dest='verbose', action='store_true',
188 help='Print full tool command lines')
189 AddLocalOption('--without-python', dest='without_python',
190 action='store_true',
191 help='Build without Python configuration support')
192 AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
193 action='store_true',
194 help='Disable linking against tcmalloc')
195 AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
196 help='Build with Undefined Behavior Sanitizer if available')
197
198 termcap = get_termcap(GetOption('use_colors'))
199
200 ########################################################################
201 #
202 # Set up the main build environment.
203 #
204 ########################################################################
205
206 # export TERM so that clang reports errors in color
207 use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
208 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
209 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
210
211 use_prefixes = [
212 "CCACHE_", # ccache (caching compiler wrapper) configuration
213 "CCC_", # clang static analyzer configuration
214 "DISTCC_", # distcc (distributed compiler wrapper) configuration
215 "INCLUDE_SERVER_", # distcc pump server settings
216 "M5", # M5 configuration (e.g., path to kernels)
217 ]
218
219 use_env = {}
220 for key,val in sorted(os.environ.iteritems()):
221 if key in use_vars or \
222 any([key.startswith(prefix) for prefix in use_prefixes]):
223 use_env[key] = val
224
225 # Tell scons to avoid implicit command dependencies to avoid issues
226 # with the param wrappes being compiled twice (see
227 # http://scons.tigris.org/issues/show_bug.cgi?id=2811)
228 main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
229 main.Decider('MD5-timestamp')
230 main.root = Dir(".") # The current directory (where this file lives).
231 main.srcdir = Dir("src") # The source directory
232
233 main_dict_keys = main.Dictionary().keys()
234
235 # Check that we have a C/C++ compiler
236 if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
237 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
238 Exit(1)
239
240 # Check that swig is present
241 if not 'SWIG' in main_dict_keys:
242 print "swig is not installed (package swig on Ubuntu and RedHat)"
243 Exit(1)
244
245 # add useful python code PYTHONPATH so it can be used by subprocesses
246 # as well
247 main.AppendENVPath('PYTHONPATH', extra_python_paths)
248
249 ########################################################################
250 #
251 # Mercurial Stuff.
252 #
253 # If the gem5 directory is a mercurial repository, we should do some
254 # extra things.
255 #
256 ########################################################################
257
258 hgdir = main.root.Dir(".hg")
259
260 mercurial_style_message = """
261 You're missing the gem5 style hook, which automatically checks your code
262 against the gem5 style rules on hg commit and qrefresh commands. This
263 script will now install the hook in your .hg/hgrc file.
264 Press enter to continue, or ctrl-c to abort: """
265
266 mercurial_style_hook = """
267 # The following lines were automatically added by gem5/SConstruct
268 # to provide the gem5 style-checking hooks
269 [extensions]
270 style = %s/util/style.py
271
272 [hooks]
273 pretxncommit.style = python:style.check_style
274 pre-qrefresh.style = python:style.check_style
275 # End of SConstruct additions
276
277 """ % (main.root.abspath)
278
279 mercurial_lib_not_found = """
280 Mercurial libraries cannot be found, ignoring style hook. If
281 you are a gem5 developer, please fix this and run the style
282 hook. It is important.
283 """
284
285 # Check for style hook and prompt for installation if it's not there.
286 # Skip this if --ignore-style was specified, there's no .hg dir to
287 # install a hook in, or there's no interactive terminal to prompt.
288 if not GetOption('ignore_style') and hgdir.exists() and sys.stdin.isatty():
289 style_hook = True
290 try:
291 from mercurial import ui
292 ui = ui.ui()
293 ui.readconfig(hgdir.File('hgrc').abspath)
294 style_hook = ui.config('hooks', 'pretxncommit.style', None) and \
295 ui.config('hooks', 'pre-qrefresh.style', None)
296 except ImportError:
297 print mercurial_lib_not_found
298
299 if not style_hook:
300 print mercurial_style_message,
301 # continue unless user does ctrl-c/ctrl-d etc.
302 try:
303 raw_input()
304 except:
305 print "Input exception, exiting scons.\n"
306 sys.exit(1)
307 hgrc_path = '%s/.hg/hgrc' % main.root.abspath
308 print "Adding style hook to", hgrc_path, "\n"
309 try:
310 hgrc = open(hgrc_path, 'a')
311 hgrc.write(mercurial_style_hook)
312 hgrc.close()
313 except:
314 print "Error updating", hgrc_path
315 sys.exit(1)
316
317
318 ###################################################
319 #
320 # Figure out which configurations to set up based on the path(s) of
321 # the target(s).
322 #
323 ###################################################
324
325 # Find default configuration & binary.
326 Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
327
328 # helper function: find last occurrence of element in list
329 def rfind(l, elt, offs = -1):
330 for i in range(len(l)+offs, 0, -1):
331 if l[i] == elt:
332 return i
333 raise ValueError, "element not found"
334
335 # Take a list of paths (or SCons Nodes) and return a list with all
336 # paths made absolute and ~-expanded. Paths will be interpreted
337 # relative to the launch directory unless a different root is provided
338 def makePathListAbsolute(path_list, root=GetLaunchDir()):
339 return [abspath(joinpath(root, expanduser(str(p))))
340 for p in path_list]
341
342 # Each target must have 'build' in the interior of the path; the
343 # directory below this will determine the build parameters. For
344 # example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
345 # recognize that ALPHA_SE specifies the configuration because it
346 # follow 'build' in the build path.
347
348 # The funky assignment to "[:]" is needed to replace the list contents
349 # in place rather than reassign the symbol to a new list, which
350 # doesn't work (obviously!).
351 BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
352
353 # Generate a list of the unique build roots and configs that the
354 # collected targets reference.
355 variant_paths = []
356 build_root = None
357 for t in BUILD_TARGETS:
358 path_dirs = t.split('/')
359 try:
360 build_top = rfind(path_dirs, 'build', -2)
361 except:
362 print "Error: no non-leaf 'build' dir found on target path", t
363 Exit(1)
364 this_build_root = joinpath('/',*path_dirs[:build_top+1])
365 if not build_root:
366 build_root = this_build_root
367 else:
368 if this_build_root != build_root:
369 print "Error: build targets not under same build root\n"\
370 " %s\n %s" % (build_root, this_build_root)
371 Exit(1)
372 variant_path = joinpath('/',*path_dirs[:build_top+2])
373 if variant_path not in variant_paths:
374 variant_paths.append(variant_path)
375
376 # Make sure build_root exists (might not if this is the first build there)
377 if not isdir(build_root):
378 mkdir(build_root)
379 main['BUILDROOT'] = build_root
380
381 Export('main')
382
383 main.SConsignFile(joinpath(build_root, "sconsign"))
384
385 # Default duplicate option is to use hard links, but this messes up
386 # when you use emacs to edit a file in the target dir, as emacs moves
387 # file to file~ then copies to file, breaking the link. Symbolic
388 # (soft) links work better.
389 main.SetOption('duplicate', 'soft-copy')
390
391 #
392 # Set up global sticky variables... these are common to an entire build
393 # tree (not specific to a particular build like ALPHA_SE)
394 #
395
396 global_vars_file = joinpath(build_root, 'variables.global')
397
398 global_vars = Variables(global_vars_file, args=ARGUMENTS)
399
400 global_vars.AddVariables(
401 ('CC', 'C compiler', environ.get('CC', main['CC'])),
402 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
403 ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
404 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
405 ('BATCH', 'Use batch pool for build and tests', False),
406 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
407 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
408 ('EXTRAS', 'Add extra directories to the compilation', '')
409 )
410
411 # Update main environment with values from ARGUMENTS & global_vars_file
412 global_vars.Update(main)
413 help_texts["global_vars"] += global_vars.GenerateHelpText(main)
414
415 # Save sticky variable settings back to current variables file
416 global_vars.Save(global_vars_file, main)
417
418 # Parse EXTRAS variable to build list of all directories where we're
419 # look for sources etc. This list is exported as extras_dir_list.
420 base_dir = main.srcdir.abspath
421 if main['EXTRAS']:
422 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
423 else:
424 extras_dir_list = []
425
426 Export('base_dir')
427 Export('extras_dir_list')
428
429 # the ext directory should be on the #includes path
430 main.Append(CPPPATH=[Dir('ext')])
431
432 def strip_build_path(path, env):
433 path = str(path)
434 variant_base = env['BUILDROOT'] + os.path.sep
435 if path.startswith(variant_base):
436 path = path[len(variant_base):]
437 elif path.startswith('build/'):
438 path = path[6:]
439 return path
440
441 # Generate a string of the form:
442 # common/path/prefix/src1, src2 -> tgt1, tgt2
443 # to print while building.
444 class Transform(object):
445 # all specific color settings should be here and nowhere else
446 tool_color = termcap.Normal
447 pfx_color = termcap.Yellow
448 srcs_color = termcap.Yellow + termcap.Bold
449 arrow_color = termcap.Blue + termcap.Bold
450 tgts_color = termcap.Yellow + termcap.Bold
451
452 def __init__(self, tool, max_sources=99):
453 self.format = self.tool_color + (" [%8s] " % tool) \
454 + self.pfx_color + "%s" \
455 + self.srcs_color + "%s" \
456 + self.arrow_color + " -> " \
457 + self.tgts_color + "%s" \
458 + termcap.Normal
459 self.max_sources = max_sources
460
461 def __call__(self, target, source, env, for_signature=None):
462 # truncate source list according to max_sources param
463 source = source[0:self.max_sources]
464 def strip(f):
465 return strip_build_path(str(f), env)
466 if len(source) > 0:
467 srcs = map(strip, source)
468 else:
469 srcs = ['']
470 tgts = map(strip, target)
471 # surprisingly, os.path.commonprefix is a dumb char-by-char string
472 # operation that has nothing to do with paths.
473 com_pfx = os.path.commonprefix(srcs + tgts)
474 com_pfx_len = len(com_pfx)
475 if com_pfx:
476 # do some cleanup and sanity checking on common prefix
477 if com_pfx[-1] == ".":
478 # prefix matches all but file extension: ok
479 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
480 com_pfx = com_pfx[0:-1]
481 elif com_pfx[-1] == "/":
482 # common prefix is directory path: OK
483 pass
484 else:
485 src0_len = len(srcs[0])
486 tgt0_len = len(tgts[0])
487 if src0_len == com_pfx_len:
488 # source is a substring of target, OK
489 pass
490 elif tgt0_len == com_pfx_len:
491 # target is a substring of source, need to back up to
492 # avoid empty string on RHS of arrow
493 sep_idx = com_pfx.rfind(".")
494 if sep_idx != -1:
495 com_pfx = com_pfx[0:sep_idx]
496 else:
497 com_pfx = ''
498 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
499 # still splitting at file extension: ok
500 pass
501 else:
502 # probably a fluke; ignore it
503 com_pfx = ''
504 # recalculate length in case com_pfx was modified
505 com_pfx_len = len(com_pfx)
506 def fmt(files):
507 f = map(lambda s: s[com_pfx_len:], files)
508 return ', '.join(f)
509 return self.format % (com_pfx, fmt(srcs), fmt(tgts))
510
511 Export('Transform')
512
513 # enable the regression script to use the termcap
514 main['TERMCAP'] = termcap
515
516 if GetOption('verbose'):
517 def MakeAction(action, string, *args, **kwargs):
518 return Action(action, *args, **kwargs)
519 else:
520 MakeAction = Action
521 main['CCCOMSTR'] = Transform("CC")
522 main['CXXCOMSTR'] = Transform("CXX")
523 main['ASCOMSTR'] = Transform("AS")
524 main['SWIGCOMSTR'] = Transform("SWIG")
525 main['ARCOMSTR'] = Transform("AR", 0)
526 main['LINKCOMSTR'] = Transform("LINK", 0)
527 main['RANLIBCOMSTR'] = Transform("RANLIB", 0)
528 main['M4COMSTR'] = Transform("M4")
529 main['SHCCCOMSTR'] = Transform("SHCC")
530 main['SHCXXCOMSTR'] = Transform("SHCXX")
531 Export('MakeAction')
532
533 # Initialize the Link-Time Optimization (LTO) flags
534 main['LTO_CCFLAGS'] = []
535 main['LTO_LDFLAGS'] = []
536
537 # According to the readme, tcmalloc works best if the compiler doesn't
538 # assume that we're using the builtin malloc and friends. These flags
539 # are compiler-specific, so we need to set them after we detect which
540 # compiler we're using.
541 main['TCMALLOC_CCFLAGS'] = []
542
543 CXX_version = readCommand([main['CXX'],'--version'], exception=False)
544 CXX_V = readCommand([main['CXX'],'-V'], exception=False)
545
546 main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
547 main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
548 if main['GCC'] + main['CLANG'] > 1:
549 print 'Error: How can we have two at the same time?'
550 Exit(1)
551
552 # Set up default C++ compiler flags
553 if main['GCC'] or main['CLANG']:
554 # As gcc and clang share many flags, do the common parts here
555 main.Append(CCFLAGS=['-pipe'])
556 main.Append(CCFLAGS=['-fno-strict-aliasing'])
557 # Enable -Wall and -Wextra and then disable the few warnings that
558 # we consistently violate
559 main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
560 '-Wno-sign-compare', '-Wno-unused-parameter'])
561 # We always compile using C++11
562 main.Append(CXXFLAGS=['-std=c++11'])
563 else:
564 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
565 print "Don't know what compiler options to use for your compiler."
566 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
567 print termcap.Yellow + ' version:' + termcap.Normal,
568 if not CXX_version:
569 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
570 termcap.Normal
571 else:
572 print CXX_version.replace('\n', '<nl>')
573 print " If you're trying to use a compiler other than GCC"
574 print " or clang, there appears to be something wrong with your"
575 print " environment."
576 print " "
577 print " If you are trying to use a compiler other than those listed"
578 print " above you will need to ease fix SConstruct and "
579 print " src/SConscript to support that compiler."
580 Exit(1)
581
582 if main['GCC']:
583 # Check for a supported version of gcc. >= 4.7 is chosen for its
584 # level of c++11 support. See
585 # http://gcc.gnu.org/projects/cxx0x.html for details.
586 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
587 if compareVersions(gcc_version, "4.7") < 0:
588 print 'Error: gcc version 4.7 or newer required.'
589 print ' Installed version:', gcc_version
590 Exit(1)
591
592 main['GCC_VERSION'] = gcc_version
593
594 # gcc from version 4.8 and above generates "rep; ret" instructions
595 # to avoid performance penalties on certain AMD chips. Older
596 # assemblers detect this as an error, "Error: expecting string
597 # instruction after `rep'"
598 if compareVersions(gcc_version, "4.8") > 0:
599 as_version_raw = readCommand([main['AS'], '-v', '/dev/null'],
600 exception=False).split()
601
602 # version strings may contain extra distro-specific
603 # qualifiers, so play it safe and keep only what comes before
604 # the first hyphen
605 as_version = as_version_raw[-1].split('-')[0] if as_version_raw \
606 else None
607
608 if not as_version or compareVersions(as_version, "2.23") < 0:
609 print termcap.Yellow + termcap.Bold + \
610 'Warning: This combination of gcc and binutils have' + \
611 ' known incompatibilities.\n' + \
612 ' If you encounter build problems, please update ' + \
613 'binutils to 2.23.' + \
614 termcap.Normal
615
616 # Make sure we warn if the user has requested to compile with the
617 # Undefined Benahvior Sanitizer and this version of gcc does not
618 # support it.
619 if GetOption('with_ubsan') and \
620 compareVersions(gcc_version, '4.9') < 0:
621 print termcap.Yellow + termcap.Bold + \
622 'Warning: UBSan is only supported using gcc 4.9 and later.' + \
623 termcap.Normal
624
625 # Add the appropriate Link-Time Optimization (LTO) flags
626 # unless LTO is explicitly turned off. Note that these flags
627 # are only used by the fast target.
628 if not GetOption('no_lto'):
629 # Pass the LTO flag when compiling to produce GIMPLE
630 # output, we merely create the flags here and only append
631 # them later
632 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
633
634 # Use the same amount of jobs for LTO as we are running
635 # scons with
636 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
637
638 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
639 '-fno-builtin-realloc', '-fno-builtin-free'])
640
641 elif main['CLANG']:
642 # Check for a supported version of clang, >= 3.1 is needed to
643 # support similar features as gcc 4.7. See
644 # http://clang.llvm.org/cxx_status.html for details
645 clang_version_re = re.compile(".* version (\d+\.\d+)")
646 clang_version_match = clang_version_re.search(CXX_version)
647 if (clang_version_match):
648 clang_version = clang_version_match.groups()[0]
649 if compareVersions(clang_version, "3.1") < 0:
650 print 'Error: clang version 3.1 or newer required.'
651 print ' Installed version:', clang_version
652 Exit(1)
653 else:
654 print 'Error: Unable to determine clang version.'
655 Exit(1)
656
657 # clang has a few additional warnings that we disable, extraneous
658 # parantheses are allowed due to Ruby's printing of the AST,
659 # finally self assignments are allowed as the generated CPU code
660 # is relying on this
661 main.Append(CCFLAGS=['-Wno-parentheses',
662 '-Wno-self-assign',
663 # Some versions of libstdc++ (4.8?) seem to
664 # use struct hash and class hash
665 # interchangeably.
666 '-Wno-mismatched-tags',
667 ])
668
669 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
670
671 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
672 # opposed to libstdc++, as the later is dated.
673 if sys.platform == "darwin":
674 main.Append(CXXFLAGS=['-stdlib=libc++'])
675 main.Append(LIBS=['c++'])
676
677 else:
678 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
679 print "Don't know what compiler options to use for your compiler."
680 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
681 print termcap.Yellow + ' version:' + termcap.Normal,
682 if not CXX_version:
683 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
684 termcap.Normal
685 else:
686 print CXX_version.replace('\n', '<nl>')
687 print " If you're trying to use a compiler other than GCC"
688 print " or clang, there appears to be something wrong with your"
689 print " environment."
690 print " "
691 print " If you are trying to use a compiler other than those listed"
692 print " above you will need to ease fix SConstruct and "
693 print " src/SConscript to support that compiler."
694 Exit(1)
695
696 # Set up common yacc/bison flags (needed for Ruby)
697 main['YACCFLAGS'] = '-d'
698 main['YACCHXXFILESUFFIX'] = '.hh'
699
700 # Do this after we save setting back, or else we'll tack on an
701 # extra 'qdo' every time we run scons.
702 if main['BATCH']:
703 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC']
704 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX']
705 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS']
706 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR']
707 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
708
709 if sys.platform == 'cygwin':
710 # cygwin has some header file issues...
711 main.Append(CCFLAGS=["-Wno-uninitialized"])
712
713 # Check for the protobuf compiler
714 protoc_version = readCommand([main['PROTOC'], '--version'],
715 exception='').split()
716
717 # First two words should be "libprotoc x.y.z"
718 if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
719 print termcap.Yellow + termcap.Bold + \
720 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
721 ' Please install protobuf-compiler for tracing support.' + \
722 termcap.Normal
723 main['PROTOC'] = False
724 else:
725 # Based on the availability of the compress stream wrappers,
726 # require 2.1.0
727 min_protoc_version = '2.1.0'
728 if compareVersions(protoc_version[1], min_protoc_version) < 0:
729 print termcap.Yellow + termcap.Bold + \
730 'Warning: protoc version', min_protoc_version, \
731 'or newer required.\n' + \
732 ' Installed version:', protoc_version[1], \
733 termcap.Normal
734 main['PROTOC'] = False
735 else:
736 # Attempt to determine the appropriate include path and
737 # library path using pkg-config, that means we also need to
738 # check for pkg-config. Note that it is possible to use
739 # protobuf without the involvement of pkg-config. Later on we
740 # check go a library config check and at that point the test
741 # will fail if libprotobuf cannot be found.
742 if readCommand(['pkg-config', '--version'], exception=''):
743 try:
744 # Attempt to establish what linking flags to add for protobuf
745 # using pkg-config
746 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
747 except:
748 print termcap.Yellow + termcap.Bold + \
749 'Warning: pkg-config could not get protobuf flags.' + \
750 termcap.Normal
751
752 # Check for SWIG
753 if not main.has_key('SWIG'):
754 print 'Error: SWIG utility not found.'
755 print ' Please install (see http://www.swig.org) and retry.'
756 Exit(1)
757
758 # Check for appropriate SWIG version
759 swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
760 # First 3 words should be "SWIG Version x.y.z"
761 if len(swig_version) < 3 or \
762 swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
763 print 'Error determining SWIG version.'
764 Exit(1)
765
766 min_swig_version = '2.0.4'
767 if compareVersions(swig_version[2], min_swig_version) < 0:
768 print 'Error: SWIG version', min_swig_version, 'or newer required.'
769 print ' Installed version:', swig_version[2]
770 Exit(1)
771
772 # Check for known incompatibilities. The standard library shipped with
773 # gcc >= 4.9 does not play well with swig versions prior to 3.0
774 if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \
775 compareVersions(swig_version[2], '3.0') < 0:
776 print termcap.Yellow + termcap.Bold + \
777 'Warning: This combination of gcc and swig have' + \
778 ' known incompatibilities.\n' + \
779 ' If you encounter build problems, please update ' + \
780 'swig to 3.0 or later.' + \
781 termcap.Normal
782
783 # Set up SWIG flags & scanner
784 swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
785 main.Append(SWIGFLAGS=swig_flags)
786
787 # Check for 'timeout' from GNU coreutils. If present, regressions will
788 # be run with a time limit. We require version 8.13 since we rely on
789 # support for the '--foreground' option.
790 timeout_lines = readCommand(['timeout', '--version'],
791 exception='').splitlines()
792 # Get the first line and tokenize it
793 timeout_version = timeout_lines[0].split() if timeout_lines else []
794 main['TIMEOUT'] = timeout_version and \
795 compareVersions(timeout_version[-1], '8.13') >= 0
796
797 # filter out all existing swig scanners, they mess up the dependency
798 # stuff for some reason
799 scanners = []
800 for scanner in main['SCANNERS']:
801 skeys = scanner.skeys
802 if skeys == '.i':
803 continue
804
805 if isinstance(skeys, (list, tuple)) and '.i' in skeys:
806 continue
807
808 scanners.append(scanner)
809
810 # add the new swig scanner that we like better
811 from SCons.Scanner import ClassicCPP as CPPScanner
812 swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
813 scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
814
815 # replace the scanners list that has what we want
816 main['SCANNERS'] = scanners
817
818 # Add a custom Check function to test for structure members.
819 def CheckMember(context, include, decl, member, include_quotes="<>"):
820 context.Message("Checking for member %s in %s..." %
821 (member, decl))
822 text = """
823 #include %(header)s
824 int main(){
825 %(decl)s test;
826 (void)test.%(member)s;
827 return 0;
828 };
829 """ % { "header" : include_quotes[0] + include + include_quotes[1],
830 "decl" : decl,
831 "member" : member,
832 }
833
834 ret = context.TryCompile(text, extension=".cc")
835 context.Result(ret)
836 return ret
837
838 # Platform-specific configuration. Note again that we assume that all
839 # builds under a given build root run on the same host platform.
840 conf = Configure(main,
841 conf_dir = joinpath(build_root, '.scons_config'),
842 log_file = joinpath(build_root, 'scons_config.log'),
843 custom_tests = {
844 'CheckMember' : CheckMember,
845 })
846
847 # Check if we should compile a 64 bit binary on Mac OS X/Darwin
848 try:
849 import platform
850 uname = platform.uname()
851 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
852 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
853 main.Append(CCFLAGS=['-arch', 'x86_64'])
854 main.Append(CFLAGS=['-arch', 'x86_64'])
855 main.Append(LINKFLAGS=['-arch', 'x86_64'])
856 main.Append(ASFLAGS=['-arch', 'x86_64'])
857 except:
858 pass
859
860 # Recent versions of scons substitute a "Null" object for Configure()
861 # when configuration isn't necessary, e.g., if the "--help" option is
862 # present. Unfortuantely this Null object always returns false,
863 # breaking all our configuration checks. We replace it with our own
864 # more optimistic null object that returns True instead.
865 if not conf:
866 def NullCheck(*args, **kwargs):
867 return True
868
869 class NullConf:
870 def __init__(self, env):
871 self.env = env
872 def Finish(self):
873 return self.env
874 def __getattr__(self, mname):
875 return NullCheck
876
877 conf = NullConf(main)
878
879 # Cache build files in the supplied directory.
880 if main['M5_BUILD_CACHE']:
881 print 'Using build cache located at', main['M5_BUILD_CACHE']
882 CacheDir(main['M5_BUILD_CACHE'])
883
884 if not GetOption('without_python'):
885 # Find Python include and library directories for embedding the
886 # interpreter. We rely on python-config to resolve the appropriate
887 # includes and linker flags. ParseConfig does not seem to understand
888 # the more exotic linker flags such as -Xlinker and -export-dynamic so
889 # we add them explicitly below. If you want to link in an alternate
890 # version of python, see above for instructions on how to invoke
891 # scons with the appropriate PATH set.
892 #
893 # First we check if python2-config exists, else we use python-config
894 python_config = readCommand(['which', 'python2-config'],
895 exception='').strip()
896 if not os.path.exists(python_config):
897 python_config = readCommand(['which', 'python-config'],
898 exception='').strip()
899 py_includes = readCommand([python_config, '--includes'],
900 exception='').split()
901 # Strip the -I from the include folders before adding them to the
902 # CPPPATH
903 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
904
905 # Read the linker flags and split them into libraries and other link
906 # flags. The libraries are added later through the call the CheckLib.
907 py_ld_flags = readCommand([python_config, '--ldflags'],
908 exception='').split()
909 py_libs = []
910 for lib in py_ld_flags:
911 if not lib.startswith('-l'):
912 main.Append(LINKFLAGS=[lib])
913 else:
914 lib = lib[2:]
915 if lib not in py_libs:
916 py_libs.append(lib)
917
918 # verify that this stuff works
919 if not conf.CheckHeader('Python.h', '<>'):
920 print "Error: can't find Python.h header in", py_includes
921 print "Install Python headers (package python-dev on Ubuntu and RedHat)"
922 Exit(1)
923
924 for lib in py_libs:
925 if not conf.CheckLib(lib):
926 print "Error: can't find library %s required by python" % lib
927 Exit(1)
928
929 # On Solaris you need to use libsocket for socket ops
930 if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
931 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
932 print "Can't find library with socket calls (e.g. accept())"
933 Exit(1)
934
935 # Check for zlib. If the check passes, libz will be automatically
936 # added to the LIBS environment variable.
937 if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
938 print 'Error: did not find needed zlib compression library '\
939 'and/or zlib.h header file.'
940 print ' Please install zlib and try again.'
941 Exit(1)
942
943 # If we have the protobuf compiler, also make sure we have the
944 # development libraries. If the check passes, libprotobuf will be
945 # automatically added to the LIBS environment variable. After
946 # this, we can use the HAVE_PROTOBUF flag to determine if we have
947 # got both protoc and libprotobuf available.
948 main['HAVE_PROTOBUF'] = main['PROTOC'] and \
949 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
950 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
951
952 # If we have the compiler but not the library, print another warning.
953 if main['PROTOC'] and not main['HAVE_PROTOBUF']:
954 print termcap.Yellow + termcap.Bold + \
955 'Warning: did not find protocol buffer library and/or headers.\n' + \
956 ' Please install libprotobuf-dev for tracing support.' + \
957 termcap.Normal
958
959 # Check for librt.
960 have_posix_clock = \
961 conf.CheckLibWithHeader(None, 'time.h', 'C',
962 'clock_nanosleep(0,0,NULL,NULL);') or \
963 conf.CheckLibWithHeader('rt', 'time.h', 'C',
964 'clock_nanosleep(0,0,NULL,NULL);')
965
966 have_posix_timers = \
967 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
968 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
969
970 if not GetOption('without_tcmalloc'):
971 if conf.CheckLib('tcmalloc'):
972 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
973 elif conf.CheckLib('tcmalloc_minimal'):
974 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
975 else:
976 print termcap.Yellow + termcap.Bold + \
977 "You can get a 12% performance improvement by "\
978 "installing tcmalloc (libgoogle-perftools-dev package "\
979 "on Ubuntu or RedHat)." + termcap.Normal
980
981
982 # Detect back trace implementations. The last implementation in the
983 # list will be used by default.
984 backtrace_impls = [ "none" ]
985
986 if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
987 'backtrace_symbols_fd((void*)0, 0, 0);'):
988 backtrace_impls.append("glibc")
989
990 if backtrace_impls[-1] == "none":
991 default_backtrace_impl = "none"
992 print termcap.Yellow + termcap.Bold + \
993 "No suitable back trace implementation found." + \
994 termcap.Normal
995
996 if not have_posix_clock:
997 print "Can't find library for POSIX clocks."
998
999 # Check for <fenv.h> (C99 FP environment control)
1000 have_fenv = conf.CheckHeader('fenv.h', '<>')
1001 if not have_fenv:
1002 print "Warning: Header file <fenv.h> not found."
1003 print " This host has no IEEE FP rounding mode control."
1004
1005 # Check if we should enable KVM-based hardware virtualization. The API
1006 # we rely on exists since version 2.6.36 of the kernel, but somehow
1007 # the KVM_API_VERSION does not reflect the change. We test for one of
1008 # the types as a fall back.
1009 have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1010 if not have_kvm:
1011 print "Info: Compatible header file <linux/kvm.h> not found, " \
1012 "disabling KVM support."
1013
1014 # x86 needs support for xsave. We test for the structure here since we
1015 # won't be able to run new tests by the time we know which ISA we're
1016 # targeting.
1017 have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1018 '#include <linux/kvm.h>') != 0
1019
1020 # Check if the requested target ISA is compatible with the host
1021 def is_isa_kvm_compatible(isa):
1022 try:
1023 import platform
1024 host_isa = platform.machine()
1025 except:
1026 print "Warning: Failed to determine host ISA."
1027 return False
1028
1029 if not have_posix_timers:
1030 print "Warning: Can not enable KVM, host seems to lack support " \
1031 "for POSIX timers"
1032 return False
1033
1034 if isa == "arm":
1035 return host_isa in ( "armv7l", "aarch64" )
1036 elif isa == "x86":
1037 if host_isa != "x86_64":
1038 return False
1039
1040 if not have_kvm_xsave:
1041 print "KVM on x86 requires xsave support in kernel headers."
1042 return False
1043
1044 return True
1045 else:
1046 return False
1047
1048
1049 # Check if the exclude_host attribute is available. We want this to
1050 # get accurate instruction counts in KVM.
1051 main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1052 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1053
1054
1055 ######################################################################
1056 #
1057 # Finish the configuration
1058 #
1059 main = conf.Finish()
1060
1061 ######################################################################
1062 #
1063 # Collect all non-global variables
1064 #
1065
1066 # Define the universe of supported ISAs
1067 all_isa_list = [ ]
1068 all_gpu_isa_list = [ ]
1069 Export('all_isa_list')
1070 Export('all_gpu_isa_list')
1071
1072 class CpuModel(object):
1073 '''The CpuModel class encapsulates everything the ISA parser needs to
1074 know about a particular CPU model.'''
1075
1076 # Dict of available CPU model objects. Accessible as CpuModel.dict.
1077 dict = {}
1078
1079 # Constructor. Automatically adds models to CpuModel.dict.
1080 def __init__(self, name, default=False):
1081 self.name = name # name of model
1082
1083 # This cpu is enabled by default
1084 self.default = default
1085
1086 # Add self to dict
1087 if name in CpuModel.dict:
1088 raise AttributeError, "CpuModel '%s' already registered" % name
1089 CpuModel.dict[name] = self
1090
1091 Export('CpuModel')
1092
1093 # Sticky variables get saved in the variables file so they persist from
1094 # one invocation to the next (unless overridden, in which case the new
1095 # value becomes sticky).
1096 sticky_vars = Variables(args=ARGUMENTS)
1097 Export('sticky_vars')
1098
1099 # Sticky variables that should be exported
1100 export_vars = []
1101 Export('export_vars')
1102
1103 # For Ruby
1104 all_protocols = []
1105 Export('all_protocols')
1106 protocol_dirs = []
1107 Export('protocol_dirs')
1108 slicc_includes = []
1109 Export('slicc_includes')
1110
1111 # Walk the tree and execute all SConsopts scripts that wil add to the
1112 # above variables
1113 if GetOption('verbose'):
1114 print "Reading SConsopts"
1115 for bdir in [ base_dir ] + extras_dir_list:
1116 if not isdir(bdir):
1117 print "Error: directory '%s' does not exist" % bdir
1118 Exit(1)
1119 for root, dirs, files in os.walk(bdir):
1120 if 'SConsopts' in files:
1121 if GetOption('verbose'):
1122 print "Reading", joinpath(root, 'SConsopts')
1123 SConscript(joinpath(root, 'SConsopts'))
1124
1125 all_isa_list.sort()
1126 all_gpu_isa_list.sort()
1127
1128 sticky_vars.AddVariables(
1129 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1130 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1131 ListVariable('CPU_MODELS', 'CPU models',
1132 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1133 sorted(CpuModel.dict.keys())),
1134 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1135 False),
1136 BoolVariable('SS_COMPATIBLE_FP',
1137 'Make floating-point results compatible with SimpleScalar',
1138 False),
1139 BoolVariable('USE_SSE2',
1140 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1141 False),
1142 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1143 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1144 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1145 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1146 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1147 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1148 all_protocols),
1149 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1150 backtrace_impls[-1], backtrace_impls)
1151 )
1152
1153 # These variables get exported to #defines in config/*.hh (see src/SConscript).
1154 export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1155 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL',
1156 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1157
1158 ###################################################
1159 #
1160 # Define a SCons builder for configuration flag headers.
1161 #
1162 ###################################################
1163
1164 # This function generates a config header file that #defines the
1165 # variable symbol to the current variable setting (0 or 1). The source
1166 # operands are the name of the variable and a Value node containing the
1167 # value of the variable.
1168 def build_config_file(target, source, env):
1169 (variable, value) = [s.get_contents() for s in source]
1170 f = file(str(target[0]), 'w')
1171 print >> f, '#define', variable, value
1172 f.close()
1173 return None
1174
1175 # Combine the two functions into a scons Action object.
1176 config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1177
1178 # The emitter munges the source & target node lists to reflect what
1179 # we're really doing.
1180 def config_emitter(target, source, env):
1181 # extract variable name from Builder arg
1182 variable = str(target[0])
1183 # True target is config header file
1184 target = joinpath('config', variable.lower() + '.hh')
1185 val = env[variable]
1186 if isinstance(val, bool):
1187 # Force value to 0/1
1188 val = int(val)
1189 elif isinstance(val, str):
1190 val = '"' + val + '"'
1191
1192 # Sources are variable name & value (packaged in SCons Value nodes)
1193 return ([target], [Value(variable), Value(val)])
1194
1195 config_builder = Builder(emitter = config_emitter, action = config_action)
1196
1197 main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1198
1199 # libelf build is shared across all configs in the build root.
1200 main.SConscript('ext/libelf/SConscript',
1201 variant_dir = joinpath(build_root, 'libelf'))
1202
1203 # iostream3 build is shared across all configs in the build root.
1204 main.SConscript('ext/iostream3/SConscript',
1205 variant_dir = joinpath(build_root, 'iostream3'))
1206
1207 # libfdt build is shared across all configs in the build root.
1208 main.SConscript('ext/libfdt/SConscript',
1209 variant_dir = joinpath(build_root, 'libfdt'))
1210
1211 # fputils build is shared across all configs in the build root.
1212 main.SConscript('ext/fputils/SConscript',
1213 variant_dir = joinpath(build_root, 'fputils'))
1214
1215 # DRAMSim2 build is shared across all configs in the build root.
1216 main.SConscript('ext/dramsim2/SConscript',
1217 variant_dir = joinpath(build_root, 'dramsim2'))
1218
1219 # DRAMPower build is shared across all configs in the build root.
1220 main.SConscript('ext/drampower/SConscript',
1221 variant_dir = joinpath(build_root, 'drampower'))
1222
1223 # nomali build is shared across all configs in the build root.
1224 main.SConscript('ext/nomali/SConscript',
1225 variant_dir = joinpath(build_root, 'nomali'))
1226
1227 ###################################################
1228 #
1229 # This function is used to set up a directory with switching headers
1230 #
1231 ###################################################
1232
1233 main['ALL_ISA_LIST'] = all_isa_list
1234 main['ALL_GPU_ISA_LIST'] = all_gpu_isa_list
1235 all_isa_deps = {}
1236 def make_switching_dir(dname, switch_headers, env):
1237 # Generate the header. target[0] is the full path of the output
1238 # header to generate. 'source' is a dummy variable, since we get the
1239 # list of ISAs from env['ALL_ISA_LIST'].
1240 def gen_switch_hdr(target, source, env):
1241 fname = str(target[0])
1242 isa = env['TARGET_ISA'].lower()
1243 try:
1244 f = open(fname, 'w')
1245 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1246 f.close()
1247 except IOError:
1248 print "Failed to create %s" % fname
1249 raise
1250
1251 # Build SCons Action object. 'varlist' specifies env vars that this
1252 # action depends on; when env['ALL_ISA_LIST'] changes these actions
1253 # should get re-executed.
1254 switch_hdr_action = MakeAction(gen_switch_hdr,
1255 Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1256
1257 # Instantiate actions for each header
1258 for hdr in switch_headers:
1259 env.Command(hdr, [], switch_hdr_action)
1260
1261 isa_target = Dir('.').up().name.lower().replace('_', '-')
1262 env['PHONY_BASE'] = '#'+isa_target
1263 all_isa_deps[isa_target] = None
1264
1265 Export('make_switching_dir')
1266
1267 def make_gpu_switching_dir(dname, switch_headers, env):
1268 # Generate the header. target[0] is the full path of the output
1269 # header to generate. 'source' is a dummy variable, since we get the
1270 # list of ISAs from env['ALL_ISA_LIST'].
1271 def gen_switch_hdr(target, source, env):
1272 fname = str(target[0])
1273
1274 isa = env['TARGET_GPU_ISA'].lower()
1275
1276 try:
1277 f = open(fname, 'w')
1278 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1279 f.close()
1280 except IOError:
1281 print "Failed to create %s" % fname
1282 raise
1283
1284 # Build SCons Action object. 'varlist' specifies env vars that this
1285 # action depends on; when env['ALL_ISA_LIST'] changes these actions
1286 # should get re-executed.
1287 switch_hdr_action = MakeAction(gen_switch_hdr,
1288 Transform("GENERATE"), varlist=['ALL_ISA_GPU_LIST'])
1289
1290 # Instantiate actions for each header
1291 for hdr in switch_headers:
1292 env.Command(hdr, [], switch_hdr_action)
1293
1294 Export('make_gpu_switching_dir')
1295
1296 # all-isas -> all-deps -> all-environs -> all_targets
1297 main.Alias('#all-isas', [])
1298 main.Alias('#all-deps', '#all-isas')
1299
1300 # Dummy target to ensure all environments are created before telling
1301 # SCons what to actually make (the command line arguments). We attach
1302 # them to the dependence graph after the environments are complete.
1303 ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1304 def environsComplete(target, source, env):
1305 for t in ORIG_BUILD_TARGETS:
1306 main.Depends('#all-targets', t)
1307
1308 # Each build/* switching_dir attaches its *-environs target to #all-environs.
1309 main.Append(BUILDERS = {'CompleteEnvirons' :
1310 Builder(action=MakeAction(environsComplete, None))})
1311 main.CompleteEnvirons('#all-environs', [])
1312
1313 def doNothing(**ignored): pass
1314 main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1315
1316 # The final target to which all the original targets ultimately get attached.
1317 main.Dummy('#all-targets', '#all-environs')
1318 BUILD_TARGETS[:] = ['#all-targets']
1319
1320 ###################################################
1321 #
1322 # Define build environments for selected configurations.
1323 #
1324 ###################################################
1325
1326 for variant_path in variant_paths:
1327 if not GetOption('silent'):
1328 print "Building in", variant_path
1329
1330 # Make a copy of the build-root environment to use for this config.
1331 env = main.Clone()
1332 env['BUILDDIR'] = variant_path
1333
1334 # variant_dir is the tail component of build path, and is used to
1335 # determine the build parameters (e.g., 'ALPHA_SE')
1336 (build_root, variant_dir) = splitpath(variant_path)
1337
1338 # Set env variables according to the build directory config.
1339 sticky_vars.files = []
1340 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1341 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1342 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1343 current_vars_file = joinpath(build_root, 'variables', variant_dir)
1344 if isfile(current_vars_file):
1345 sticky_vars.files.append(current_vars_file)
1346 if not GetOption('silent'):
1347 print "Using saved variables file %s" % current_vars_file
1348 else:
1349 # Build dir-specific variables file doesn't exist.
1350
1351 # Make sure the directory is there so we can create it later
1352 opt_dir = dirname(current_vars_file)
1353 if not isdir(opt_dir):
1354 mkdir(opt_dir)
1355
1356 # Get default build variables from source tree. Variables are
1357 # normally determined by name of $VARIANT_DIR, but can be
1358 # overridden by '--default=' arg on command line.
1359 default = GetOption('default')
1360 opts_dir = joinpath(main.root.abspath, 'build_opts')
1361 if default:
1362 default_vars_files = [joinpath(build_root, 'variables', default),
1363 joinpath(opts_dir, default)]
1364 else:
1365 default_vars_files = [joinpath(opts_dir, variant_dir)]
1366 existing_files = filter(isfile, default_vars_files)
1367 if existing_files:
1368 default_vars_file = existing_files[0]
1369 sticky_vars.files.append(default_vars_file)
1370 print "Variables file %s not found,\n using defaults in %s" \
1371 % (current_vars_file, default_vars_file)
1372 else:
1373 print "Error: cannot find variables file %s or " \
1374 "default file(s) %s" \
1375 % (current_vars_file, ' or '.join(default_vars_files))
1376 Exit(1)
1377
1378 # Apply current variable settings to env
1379 sticky_vars.Update(env)
1380
1381 help_texts["local_vars"] += \
1382 "Build variables for %s:\n" % variant_dir \
1383 + sticky_vars.GenerateHelpText(env)
1384
1385 # Process variable settings.
1386
1387 if not have_fenv and env['USE_FENV']:
1388 print "Warning: <fenv.h> not available; " \
1389 "forcing USE_FENV to False in", variant_dir + "."
1390 env['USE_FENV'] = False
1391
1392 if not env['USE_FENV']:
1393 print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1394 print " FP results may deviate slightly from other platforms."
1395
1396 if env['EFENCE']:
1397 env.Append(LIBS=['efence'])
1398
1399 if env['USE_KVM']:
1400 if not have_kvm:
1401 print "Warning: Can not enable KVM, host seems to lack KVM support"
1402 env['USE_KVM'] = False
1403 elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1404 print "Info: KVM support disabled due to unsupported host and " \
1405 "target ISA combination"
1406 env['USE_KVM'] = False
1407
1408 # Warn about missing optional functionality
1409 if env['USE_KVM']:
1410 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1411 print "Warning: perf_event headers lack support for the " \
1412 "exclude_host attribute. KVM instruction counts will " \
1413 "be inaccurate."
1414
1415 # Save sticky variable settings back to current variables file
1416 sticky_vars.Save(current_vars_file, env)
1417
1418 if env['USE_SSE2']:
1419 env.Append(CCFLAGS=['-msse2'])
1420
1421 # The src/SConscript file sets up the build rules in 'env' according
1422 # to the configured variables. It returns a list of environments,
1423 # one for each variant build (debug, opt, etc.)
1424 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1425
1426 def pairwise(iterable):
1427 "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1428 a, b = itertools.tee(iterable)
1429 b.next()
1430 return itertools.izip(a, b)
1431
1432 # Create false dependencies so SCons will parse ISAs, establish
1433 # dependencies, and setup the build Environments serially. Either
1434 # SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1435 # greater than 1. It appears to be standard race condition stuff; it
1436 # doesn't always fail, but usually, and the behaviors are different.
1437 # Every time I tried to remove this, builds would fail in some
1438 # creative new way. So, don't do that. You'll want to, though, because
1439 # tests/SConscript takes a long time to make its Environments.
1440 for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1441 main.Depends('#%s-deps' % t2, '#%s-deps' % t1)
1442 main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1443
1444 # base help text
1445 Help('''
1446 Usage: scons [scons options] [build variables] [target(s)]
1447
1448 Extra scons options:
1449 %(options)s
1450
1451 Global build variables:
1452 %(global_vars)s
1453
1454 %(local_vars)s
1455 ''' % help_texts)