blob: dcb08a3045936be59ff69e043c8992f4c062db5b [file] [log] [blame]
# -*- mode:python -*-
# Copyright (c) 2018 ARM Limited
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2004-2005 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
from __future__ import print_function
import array
import bisect
import functools
import imp
import marshal
import os
import re
import subprocess
import sys
import zlib
from os.path import basename, dirname, exists, isdir, isfile, join as joinpath
import SCons
from gem5_scons import Transform
# This file defines how to build a particular configuration of gem5
# based on variable settings in the 'env' build environment.
Import('*')
# Children need to see the environment
Export('env')
build_env = [(opt, env[opt]) for opt in export_vars]
from m5.util import code_formatter, compareVersions
########################################################################
# Code for adding source files of various types
#
# When specifying a source file of some type, a set of tags can be
# specified for that file.
class SourceFilter(object):
def __init__(self, predicate):
self.predicate = predicate
def __or__(self, other):
return SourceFilter(lambda tags: self.predicate(tags) or
other.predicate(tags))
def __and__(self, other):
return SourceFilter(lambda tags: self.predicate(tags) and
other.predicate(tags))
def with_tags_that(predicate):
'''Return a list of sources with tags that satisfy a predicate.'''
return SourceFilter(predicate)
def with_any_tags(*tags):
'''Return a list of sources with any of the supplied tags.'''
return SourceFilter(lambda stags: len(set(tags) & stags) > 0)
def with_all_tags(*tags):
'''Return a list of sources with all of the supplied tags.'''
return SourceFilter(lambda stags: set(tags) <= stags)
def with_tag(tag):
'''Return a list of sources with the supplied tag.'''
return SourceFilter(lambda stags: tag in stags)
def without_tags(*tags):
'''Return a list of sources without any of the supplied tags.'''
return SourceFilter(lambda stags: len(set(tags) & stags) == 0)
def without_tag(tag):
'''Return a list of sources with the supplied tag.'''
return SourceFilter(lambda stags: tag not in stags)
source_filter_factories = {
'with_tags_that': with_tags_that,
'with_any_tags': with_any_tags,
'with_all_tags': with_all_tags,
'with_tag': with_tag,
'without_tags': without_tags,
'without_tag': without_tag,
}
Export(source_filter_factories)
class SourceList(list):
def apply_filter(self, f):
def match(source):
return f.predicate(source.tags)
return SourceList(filter(match, self))
def __getattr__(self, name):
func = source_filter_factories.get(name, None)
if not func:
raise AttributeError
@functools.wraps(func)
def wrapper(*args, **kwargs):
return self.apply_filter(func(*args, **kwargs))
return wrapper
class SourceMeta(type):
'''Meta class for source files that keeps track of all files of a
particular type.'''
def __init__(cls, name, bases, dict):
super(SourceMeta, cls).__init__(name, bases, dict)
cls.all = SourceList()
class SourceFile(object):
'''Base object that encapsulates the notion of a source file.
This includes, the source node, target node, various manipulations
of those. A source file also specifies a set of tags which
describing arbitrary properties of the source file.'''
__metaclass__ = SourceMeta
static_objs = {}
shared_objs = {}
def __init__(self, source, tags=None, add_tags=None):
if tags is None:
tags='gem5 lib'
if isinstance(tags, basestring):
tags = set([tags])
if not isinstance(tags, set):
tags = set(tags)
self.tags = tags
if add_tags:
if isinstance(add_tags, basestring):
add_tags = set([add_tags])
if not isinstance(add_tags, set):
add_tags = set(add_tags)
self.tags |= add_tags
tnode = source
if not isinstance(source, SCons.Node.FS.File):
tnode = File(source)
self.tnode = tnode
self.snode = tnode.srcnode()
for base in type(self).__mro__:
if issubclass(base, SourceFile):
base.all.append(self)
def static(self, env):
key = (self.tnode, env['OBJSUFFIX'])
if not key in self.static_objs:
self.static_objs[key] = env.StaticObject(self.tnode)
return self.static_objs[key]
def shared(self, env):
key = (self.tnode, env['OBJSUFFIX'])
if not key in self.shared_objs:
self.shared_objs[key] = env.SharedObject(self.tnode)
return self.shared_objs[key]
@property
def filename(self):
return str(self.tnode)
@property
def dirname(self):
return dirname(self.filename)
@property
def basename(self):
return basename(self.filename)
@property
def extname(self):
index = self.basename.rfind('.')
if index <= 0:
# dot files aren't extensions
return self.basename, None
return self.basename[:index], self.basename[index+1:]
def __lt__(self, other): return self.filename < other.filename
def __le__(self, other): return self.filename <= other.filename
def __gt__(self, other): return self.filename > other.filename
def __ge__(self, other): return self.filename >= other.filename
def __eq__(self, other): return self.filename == other.filename
def __ne__(self, other): return self.filename != other.filename
def blobToCpp(data, symbol, cpp_code, hpp_code=None, namespace=None):
'''
Convert bytes data into C++ .cpp and .hh uint8_t byte array
code containing that binary data.
:param data: binary data to be converted to C++
:param symbol: name of the symbol
:param cpp_code: append the generated cpp_code to this object
:param hpp_code: append the generated hpp_code to this object
If None, ignore it. Otherwise, also include it
in the .cpp file.
:param namespace: namespace to put the symbol into. If None,
don't put the symbols into any namespace.
'''
symbol_len_declaration = 'const std::size_t {}_len'.format(symbol)
symbol_declaration = 'const std::uint8_t {}[]'.format(symbol)
if hpp_code is not None:
cpp_code('''\
#include "blobs/{}.hh"
'''.format(symbol))
hpp_code('''\
#include <cstddef>
#include <cstdint>
''')
if namespace is not None:
hpp_code('namespace {} {{'.format(namespace))
hpp_code('extern ' + symbol_len_declaration + ';')
hpp_code('extern ' + symbol_declaration + ';')
if namespace is not None:
hpp_code('}')
if namespace is not None:
cpp_code('namespace {} {{'.format(namespace))
if hpp_code is not None:
cpp_code(symbol_len_declaration + ' = {};'.format(len(data)))
cpp_code(symbol_declaration + ' = {')
cpp_code.indent()
step = 16
for i in xrange(0, len(data), step):
x = array.array('B', data[i:i+step])
cpp_code(''.join('%d,' % d for d in x))
cpp_code.dedent()
cpp_code('};')
if namespace is not None:
cpp_code('}')
def Blob(blob_path, symbol):
'''
Embed an arbitrary blob into the gem5 executable,
and make it accessible to C++ as a byte array.
'''
blob_path = os.path.abspath(blob_path)
blob_out_dir = os.path.join(env['BUILDDIR'], 'blobs')
path_noext = joinpath(blob_out_dir, symbol)
cpp_path = path_noext + '.cc'
hpp_path = path_noext + '.hh'
def embedBlob(target, source, env):
data = file(str(source[0]), 'r').read()
cpp_code = code_formatter()
hpp_code = code_formatter()
blobToCpp(data, symbol, cpp_code, hpp_code, namespace='Blobs')
cpp_path = str(target[0])
hpp_path = str(target[1])
cpp_dir = os.path.split(cpp_path)[0]
if not os.path.exists(cpp_dir):
os.makedirs(cpp_dir)
cpp_code.write(cpp_path)
hpp_code.write(hpp_path)
env.Command([cpp_path, hpp_path], blob_path,
MakeAction(embedBlob, Transform("EMBED BLOB")))
Source(cpp_path)
def GdbXml(xml_id, symbol):
Blob(joinpath(gdb_xml_dir, xml_id), symbol)
class Source(SourceFile):
ungrouped_tag = 'No link group'
source_groups = set()
_current_group_tag = ungrouped_tag
@staticmethod
def link_group_tag(group):
return 'link group: %s' % group
@classmethod
def set_group(cls, group):
new_tag = Source.link_group_tag(group)
Source._current_group_tag = new_tag
Source.source_groups.add(group)
def _add_link_group_tag(self):
self.tags.add(Source._current_group_tag)
'''Add a c/c++ source file to the build'''
def __init__(self, source, tags=None, add_tags=None):
'''specify the source file, and any tags'''
super(Source, self).__init__(source, tags, add_tags)
self._add_link_group_tag()
class PySource(SourceFile):
'''Add a python source file to the named package'''
invalid_sym_char = re.compile('[^A-z0-9_]')
modules = {}
tnodes = {}
symnames = {}
def __init__(self, package, source, tags=None, add_tags=None):
'''specify the python package, the source file, and any tags'''
super(PySource, self).__init__(source, tags, add_tags)
modname,ext = self.extname
assert ext == 'py'
if package:
path = package.split('.')
else:
path = []
modpath = path[:]
if modname != '__init__':
modpath += [ modname ]
modpath = '.'.join(modpath)
arcpath = path + [ self.basename ]
abspath = self.snode.abspath
if not exists(abspath):
abspath = self.tnode.abspath
self.package = package
self.modname = modname
self.modpath = modpath
self.arcname = joinpath(*arcpath)
self.abspath = abspath
self.compiled = File(self.filename + 'c')
self.cpp = File(self.filename + '.cc')
self.symname = PySource.invalid_sym_char.sub('_', modpath)
PySource.modules[modpath] = self
PySource.tnodes[self.tnode] = self
PySource.symnames[self.symname] = self
class SimObject(PySource):
'''Add a SimObject python file as a python source object and add
it to a list of sim object modules'''
fixed = False
modnames = []
def __init__(self, source, tags=None, add_tags=None):
'''Specify the source file and any tags (automatically in
the m5.objects package)'''
super(SimObject, self).__init__('m5.objects', source, tags, add_tags)
if self.fixed:
raise AttributeError, "Too late to call SimObject now."
bisect.insort_right(SimObject.modnames, self.modname)
class ProtoBuf(SourceFile):
'''Add a Protocol Buffer to build'''
def __init__(self, source, tags=None, add_tags=None):
'''Specify the source file, and any tags'''
super(ProtoBuf, self).__init__(source, tags, add_tags)
# Get the file name and the extension
modname,ext = self.extname
assert ext == 'proto'
# Currently, we stick to generating the C++ headers, so we
# only need to track the source and header.
self.cc_file = File(modname + '.pb.cc')
self.hh_file = File(modname + '.pb.h')
exectuable_classes = []
class ExecutableMeta(type):
'''Meta class for Executables.'''
all = []
def __init__(cls, name, bases, d):
if not d.pop('abstract', False):
ExecutableMeta.all.append(cls)
super(ExecutableMeta, cls).__init__(name, bases, d)
cls.all = []
class Executable(object):
'''Base class for creating an executable from sources.'''
__metaclass__ = ExecutableMeta
abstract = True
def __init__(self, target, *srcs_and_filts):
'''Specify the target name and any sources. Sources that are
not SourceFiles are evalued with Source().'''
super(Executable, self).__init__()
self.all.append(self)
self.target = target
isFilter = lambda arg: isinstance(arg, SourceFilter)
self.filters = filter(isFilter, srcs_and_filts)
sources = filter(lambda a: not isFilter(a), srcs_and_filts)
srcs = SourceList()
for src in sources:
if not isinstance(src, SourceFile):
src = Source(src, tags=[])
srcs.append(src)
self.sources = srcs
self.dir = Dir('.')
def path(self, env):
return self.dir.File(self.target + '.' + env['EXE_SUFFIX'])
def srcs_to_objs(self, env, sources):
return list([ s.static(env) for s in sources ])
@classmethod
def declare_all(cls, env):
return list([ instance.declare(env) for instance in cls.all ])
def declare(self, env, objs=None):
if objs is None:
objs = self.srcs_to_objs(env, self.sources)
if env['STRIP_EXES']:
stripped = self.path(env)
unstripped = env.File(str(stripped) + '.unstripped')
if sys.platform == 'sunos5':
cmd = 'cp $SOURCE $TARGET; strip $TARGET'
else:
cmd = 'strip $SOURCE -o $TARGET'
env.Program(unstripped, objs)
return env.Command(stripped, unstripped,
MakeAction(cmd, Transform("STRIP")))
else:
return env.Program(self.path(env), objs)
class UnitTest(Executable):
'''Create a UnitTest'''
def __init__(self, target, *srcs_and_filts, **kwargs):
super(UnitTest, self).__init__(target, *srcs_and_filts)
self.main = kwargs.get('main', False)
def declare(self, env):
sources = list(self.sources)
for f in self.filters:
sources += Source.all.apply_filter(f)
objs = self.srcs_to_objs(env, sources) + env['STATIC_OBJS']
if self.main:
objs += env['MAIN_OBJS']
return super(UnitTest, self).declare(env, objs)
class GTest(Executable):
'''Create a unit test based on the google test framework.'''
all = []
def __init__(self, *srcs_and_filts, **kwargs):
super(GTest, self).__init__(*srcs_and_filts)
self.skip_lib = kwargs.pop('skip_lib', False)
@classmethod
def declare_all(cls, env):
env = env.Clone()
env.Append(LIBS=env['GTEST_LIBS'])
env.Append(CPPFLAGS=env['GTEST_CPPFLAGS'])
env['GTEST_LIB_SOURCES'] = Source.all.with_tag('gtest lib')
env['GTEST_OUT_DIR'] = \
Dir(env['BUILDDIR']).Dir('unittests.' + env['EXE_SUFFIX'])
return super(GTest, cls).declare_all(env)
def declare(self, env):
sources = list(self.sources)
if not self.skip_lib:
sources += env['GTEST_LIB_SOURCES']
for f in self.filters:
sources += Source.all.apply_filter(f)
objs = self.srcs_to_objs(env, sources)
binary = super(GTest, self).declare(env, objs)
out_dir = env['GTEST_OUT_DIR']
xml_file = out_dir.Dir(str(self.dir)).File(self.target + '.xml')
AlwaysBuild(env.Command(xml_file, binary,
"${SOURCES[0]} --gtest_output=xml:${TARGETS[0]}"))
return binary
class Gem5(Executable):
'''Create a gem5 executable.'''
def __init__(self, target):
super(Gem5, self).__init__(target)
def declare(self, env):
objs = env['MAIN_OBJS'] + env['STATIC_OBJS']
return super(Gem5, self).declare(env, objs)
# Children should have access
Export('Blob')
Export('GdbXml')
Export('Source')
Export('PySource')
Export('SimObject')
Export('ProtoBuf')
Export('Executable')
Export('UnitTest')
Export('GTest')
########################################################################
#
# Debug Flags
#
debug_flags = {}
def DebugFlag(name, desc=None):
if name in debug_flags:
raise AttributeError, "Flag %s already specified" % name
debug_flags[name] = (name, (), desc)
def CompoundFlag(name, flags, desc=None):
if name in debug_flags:
raise AttributeError, "Flag %s already specified" % name
compound = tuple(flags)
debug_flags[name] = (name, compound, desc)
Export('DebugFlag')
Export('CompoundFlag')
########################################################################
#
# Set some compiler variables
#
# Include file paths are rooted in this directory. SCons will
# automatically expand '.' to refer to both the source directory and
# the corresponding build directory to pick up generated include
# files.
env.Append(CPPPATH=Dir('.'))
for extra_dir in extras_dir_list:
env.Append(CPPPATH=Dir(extra_dir))
# Workaround for bug in SCons version > 0.97d20071212
# Scons bug id: 2006 gem5 Bug id: 308
for root, dirs, files in os.walk(base_dir, topdown=True):
Dir(root[len(base_dir) + 1:])
########################################################################
#
# Walk the tree and execute all SConscripts in subdirectories
#
here = Dir('.').srcnode().abspath
for root, dirs, files in os.walk(base_dir, topdown=True):
if root == here:
# we don't want to recurse back into this SConscript
continue
if 'SConscript' in files:
build_dir = joinpath(env['BUILDDIR'], root[len(base_dir) + 1:])
Source.set_group(build_dir)
SConscript(joinpath(root, 'SConscript'), variant_dir=build_dir)
for extra_dir in extras_dir_list:
prefix_len = len(dirname(extra_dir)) + 1
# Also add the corresponding build directory to pick up generated
# include files.
env.Append(CPPPATH=Dir(joinpath(env['BUILDDIR'], extra_dir[prefix_len:])))
for root, dirs, files in os.walk(extra_dir, topdown=True):
# if build lives in the extras directory, don't walk down it
if 'build' in dirs:
dirs.remove('build')
if 'SConscript' in files:
build_dir = joinpath(env['BUILDDIR'], root[prefix_len:])
SConscript(joinpath(root, 'SConscript'), variant_dir=build_dir)
for opt in export_vars:
env.ConfigFile(opt)
def makeTheISA(source, target, env):
isas = [ src.get_contents() for src in source ]
target_isa = env['TARGET_ISA']
def define(isa):
return isa.upper() + '_ISA'
def namespace(isa):
return isa[0].upper() + isa[1:].lower() + 'ISA'
code = code_formatter()
code('''\
#ifndef __CONFIG_THE_ISA_HH__
#define __CONFIG_THE_ISA_HH__
''')
# create defines for the preprocessing and compile-time determination
for i,isa in enumerate(isas):
code('#define $0 $1', define(isa), i + 1)
code()
# create an enum for any run-time determination of the ISA, we
# reuse the same name as the namespaces
code('enum class Arch {')
for i,isa in enumerate(isas):
if i + 1 == len(isas):
code(' $0 = $1', namespace(isa), define(isa))
else:
code(' $0 = $1,', namespace(isa), define(isa))
code('};')
code('''
#define THE_ISA ${{define(target_isa)}}
#define TheISA ${{namespace(target_isa)}}
#define THE_ISA_STR "${{target_isa}}"
#endif // __CONFIG_THE_ISA_HH__''')
code.write(str(target[0]))
env.Command('config/the_isa.hh', map(Value, all_isa_list),
MakeAction(makeTheISA, Transform("CFG ISA", 0)))
def makeTheGPUISA(source, target, env):
isas = [ src.get_contents() for src in source ]
target_gpu_isa = env['TARGET_GPU_ISA']
def define(isa):
return isa.upper() + '_ISA'
def namespace(isa):
return isa[0].upper() + isa[1:].lower() + 'ISA'
code = code_formatter()
code('''\
#ifndef __CONFIG_THE_GPU_ISA_HH__
#define __CONFIG_THE_GPU_ISA_HH__
''')
# create defines for the preprocessing and compile-time determination
for i,isa in enumerate(isas):
code('#define $0 $1', define(isa), i + 1)
code()
# create an enum for any run-time determination of the ISA, we
# reuse the same name as the namespaces
code('enum class GPUArch {')
for i,isa in enumerate(isas):
if i + 1 == len(isas):
code(' $0 = $1', namespace(isa), define(isa))
else:
code(' $0 = $1,', namespace(isa), define(isa))
code('};')
code('''
#define THE_GPU_ISA ${{define(target_gpu_isa)}}
#define TheGpuISA ${{namespace(target_gpu_isa)}}
#define THE_GPU_ISA_STR "${{target_gpu_isa}}"
#endif // __CONFIG_THE_GPU_ISA_HH__''')
code.write(str(target[0]))
env.Command('config/the_gpu_isa.hh', map(Value, all_gpu_isa_list),
MakeAction(makeTheGPUISA, Transform("CFG ISA", 0)))
########################################################################
#
# Prevent any SimObjects from being added after this point, they
# should all have been added in the SConscripts above
#
SimObject.fixed = True
class DictImporter(object):
'''This importer takes a dictionary of arbitrary module names that
map to arbitrary filenames.'''
def __init__(self, modules):
self.modules = modules
self.installed = set()
def __del__(self):
self.unload()
def unload(self):
import sys
for module in self.installed:
del sys.modules[module]
self.installed = set()
def find_module(self, fullname, path):
if fullname == 'm5.defines':
return self
if fullname == 'm5.objects':
return self
if fullname.startswith('_m5'):
return None
source = self.modules.get(fullname, None)
if source is not None and fullname.startswith('m5.objects'):
return self
return None
def load_module(self, fullname):
mod = imp.new_module(fullname)
sys.modules[fullname] = mod
self.installed.add(fullname)
mod.__loader__ = self
if fullname == 'm5.objects':
mod.__path__ = fullname.split('.')
return mod
if fullname == 'm5.defines':
mod.__dict__['buildEnv'] = m5.util.SmartDict(build_env)
return mod
source = self.modules[fullname]
if source.modname == '__init__':
mod.__path__ = source.modpath
mod.__file__ = source.abspath
exec file(source.abspath, 'r') in mod.__dict__
return mod
import m5.SimObject
import m5.params
from m5.util import code_formatter
m5.SimObject.clear()
m5.params.clear()
# install the python importer so we can grab stuff from the source
# tree itself. We can't have SimObjects added after this point or
# else we won't know about them for the rest of the stuff.
importer = DictImporter(PySource.modules)
sys.meta_path[0:0] = [ importer ]
# import all sim objects so we can populate the all_objects list
# make sure that we're working with a list, then let's sort it
for modname in SimObject.modnames:
exec('from m5.objects import %s' % modname)
# we need to unload all of the currently imported modules so that they
# will be re-imported the next time the sconscript is run
importer.unload()
sys.meta_path.remove(importer)
sim_objects = m5.SimObject.allClasses
all_enums = m5.params.allEnums
for name,obj in sorted(sim_objects.iteritems()):
for param in obj._params.local.values():
# load the ptype attribute now because it depends on the
# current version of SimObject.allClasses, but when scons
# actually uses the value, all versions of
# SimObject.allClasses will have been loaded
param.ptype
########################################################################
#
# calculate extra dependencies
#
module_depends = ["m5", "m5.SimObject", "m5.params"]
depends = [ PySource.modules[dep].snode for dep in module_depends ]
depends.sort(key = lambda x: x.name)
########################################################################
#
# Commands for the basic automatically generated python files
#
# Generate Python file containing a dict specifying the current
# buildEnv flags.
def makeDefinesPyFile(target, source, env):
build_env = source[0].get_contents()
code = code_formatter()
code("""
import _m5.core
import m5.util
buildEnv = m5.util.SmartDict($build_env)
compileDate = _m5.core.compileDate
_globals = globals()
for key,val in _m5.core.__dict__.iteritems():
if key.startswith('flag_'):
flag = key[5:]
_globals[flag] = val
del _globals
""")
code.write(target[0].abspath)
defines_info = Value(build_env)
# Generate a file with all of the compile options in it
env.Command('python/m5/defines.py', defines_info,
MakeAction(makeDefinesPyFile, Transform("DEFINES", 0)))
PySource('m5', 'python/m5/defines.py')
# Generate python file containing info about the M5 source code
def makeInfoPyFile(target, source, env):
code = code_formatter()
for src in source:
data = ''.join(file(src.srcnode().abspath, 'r').xreadlines())
code('$src = ${{repr(data)}}')
code.write(str(target[0]))
# Generate a file that wraps the basic top level files
env.Command('python/m5/info.py',
[ '#/COPYING', '#/LICENSE', '#/README', ],
MakeAction(makeInfoPyFile, Transform("INFO")))
PySource('m5', 'python/m5/info.py')
########################################################################
#
# Create all of the SimObject param headers and enum headers
#
def createSimObjectParamStruct(target, source, env):
assert len(target) == 1 and len(source) == 1
name = source[0].get_text_contents()
obj = sim_objects[name]
code = code_formatter()
obj.cxx_param_decl(code)
code.write(target[0].abspath)
def createSimObjectCxxConfig(is_header):
def body(target, source, env):
assert len(target) == 1 and len(source) == 1
name = str(source[0].get_contents())
obj = sim_objects[name]
code = code_formatter()
obj.cxx_config_param_file(code, is_header)
code.write(target[0].abspath)
return body
def createEnumStrings(target, source, env):
assert len(target) == 1 and len(source) == 2
name = source[0].get_text_contents()
use_python = source[1].read()
obj = all_enums[name]
code = code_formatter()
obj.cxx_def(code)
if use_python:
obj.pybind_def(code)
code.write(target[0].abspath)
def createEnumDecls(target, source, env):
assert len(target) == 1 and len(source) == 1
name = source[0].get_text_contents()
obj = all_enums[name]
code = code_formatter()
obj.cxx_decl(code)
code.write(target[0].abspath)
def createSimObjectPyBindWrapper(target, source, env):
name = source[0].get_text_contents()
obj = sim_objects[name]
code = code_formatter()
obj.pybind_decl(code)
code.write(target[0].abspath)
# Generate all of the SimObject param C++ struct header files
params_hh_files = []
for name,simobj in sorted(sim_objects.iteritems()):
py_source = PySource.modules[simobj.__module__]
extra_deps = [ py_source.tnode ]
hh_file = File('params/%s.hh' % name)
params_hh_files.append(hh_file)
env.Command(hh_file, Value(name),
MakeAction(createSimObjectParamStruct, Transform("SO PARAM")))
env.Depends(hh_file, depends + extra_deps)
# C++ parameter description files
if GetOption('with_cxx_config'):
for name,simobj in sorted(sim_objects.iteritems()):
py_source = PySource.modules[simobj.__module__]
extra_deps = [ py_source.tnode ]
cxx_config_hh_file = File('cxx_config/%s.hh' % name)
cxx_config_cc_file = File('cxx_config/%s.cc' % name)
env.Command(cxx_config_hh_file, Value(name),
MakeAction(createSimObjectCxxConfig(True),
Transform("CXXCPRHH")))
env.Command(cxx_config_cc_file, Value(name),
MakeAction(createSimObjectCxxConfig(False),
Transform("CXXCPRCC")))
env.Depends(cxx_config_hh_file, depends + extra_deps +
[File('params/%s.hh' % name), File('sim/cxx_config.hh')])
env.Depends(cxx_config_cc_file, depends + extra_deps +
[cxx_config_hh_file])
Source(cxx_config_cc_file)
cxx_config_init_cc_file = File('cxx_config/init.cc')
def createCxxConfigInitCC(target, source, env):
assert len(target) == 1 and len(source) == 1
code = code_formatter()
for name,simobj in sorted(sim_objects.iteritems()):
if not hasattr(simobj, 'abstract') or not simobj.abstract:
code('#include "cxx_config/${name}.hh"')
code()
code('void cxxConfigInit()')
code('{')
code.indent()
for name,simobj in sorted(sim_objects.iteritems()):
not_abstract = not hasattr(simobj, 'abstract') or \
not simobj.abstract
if not_abstract and 'type' in simobj.__dict__:
code('cxx_config_directory["${name}"] = '
'${name}CxxConfigParams::makeDirectoryEntry();')
code.dedent()
code('}')
code.write(target[0].abspath)
py_source = PySource.modules[simobj.__module__]
extra_deps = [ py_source.tnode ]
env.Command(cxx_config_init_cc_file, Value(name),
MakeAction(createCxxConfigInitCC, Transform("CXXCINIT")))
cxx_param_hh_files = ["cxx_config/%s.hh" % simobj
for name,simobj in sorted(sim_objects.iteritems())
if not hasattr(simobj, 'abstract') or not simobj.abstract]
Depends(cxx_config_init_cc_file, cxx_param_hh_files +
[File('sim/cxx_config.hh')])
Source(cxx_config_init_cc_file)
# Generate all enum header files
for name,enum in sorted(all_enums.iteritems()):
py_source = PySource.modules[enum.__module__]
extra_deps = [ py_source.tnode ]
cc_file = File('enums/%s.cc' % name)
env.Command(cc_file, [Value(name), Value(env['USE_PYTHON'])],
MakeAction(createEnumStrings, Transform("ENUM STR")))
env.Depends(cc_file, depends + extra_deps)
Source(cc_file)
hh_file = File('enums/%s.hh' % name)
env.Command(hh_file, Value(name),
MakeAction(createEnumDecls, Transform("ENUMDECL")))
env.Depends(hh_file, depends + extra_deps)
# Generate SimObject Python bindings wrapper files
if env['USE_PYTHON']:
for name,simobj in sorted(sim_objects.iteritems()):
py_source = PySource.modules[simobj.__module__]
extra_deps = [ py_source.tnode ]
cc_file = File('python/_m5/param_%s.cc' % name)
env.Command(cc_file, Value(name),
MakeAction(createSimObjectPyBindWrapper,
Transform("SO PyBind")))
env.Depends(cc_file, depends + extra_deps)
Source(cc_file)
# Build all protocol buffers if we have got protoc and protobuf available
if env['HAVE_PROTOBUF']:
for proto in ProtoBuf.all:
# Use both the source and header as the target, and the .proto
# file as the source. When executing the protoc compiler, also
# specify the proto_path to avoid having the generated files
# include the path.
env.Command([proto.cc_file, proto.hh_file], proto.tnode,
MakeAction('$PROTOC --cpp_out ${TARGET.dir} '
'--proto_path ${SOURCE.dir} $SOURCE',
Transform("PROTOC")))
# Add the C++ source file
Source(proto.cc_file, tags=proto.tags)
elif ProtoBuf.all:
print('Got protobuf to build, but lacks support!')
Exit(1)
#
# Handle debug flags
#
def makeDebugFlagCC(target, source, env):
assert(len(target) == 1 and len(source) == 1)
code = code_formatter()
# delay definition of CompoundFlags until after all the definition
# of all constituent SimpleFlags
comp_code = code_formatter()
# file header
code('''
/*
* DO NOT EDIT THIS FILE! Automatically generated by SCons.
*/
#include "base/debug.hh"
namespace Debug {
''')
for name, flag in sorted(source[0].read().iteritems()):
n, compound, desc = flag
assert n == name
if not compound:
code('SimpleFlag $name("$name", "$desc");')
else:
comp_code('CompoundFlag $name("$name", "$desc",')
comp_code.indent()
last = len(compound) - 1
for i,flag in enumerate(compound):
if i != last:
comp_code('&$flag,')
else:
comp_code('&$flag);')
comp_code.dedent()
code.append(comp_code)
code()
code('} // namespace Debug')
code.write(str(target[0]))
def makeDebugFlagHH(target, source, env):
assert(len(target) == 1 and len(source) == 1)
val = eval(source[0].get_contents())
name, compound, desc = val
code = code_formatter()
# file header boilerplate
code('''\
/*
* DO NOT EDIT THIS FILE! Automatically generated by SCons.
*/
#ifndef __DEBUG_${name}_HH__
#define __DEBUG_${name}_HH__
namespace Debug {
''')
if compound:
code('class CompoundFlag;')
code('class SimpleFlag;')
if compound:
code('extern CompoundFlag $name;')
for flag in compound:
code('extern SimpleFlag $flag;')
else:
code('extern SimpleFlag $name;')
code('''
}
#endif // __DEBUG_${name}_HH__
''')
code.write(str(target[0]))
for name,flag in sorted(debug_flags.iteritems()):
n, compound, desc = flag
assert n == name
hh_file = 'debug/%s.hh' % name
env.Command(hh_file, Value(flag),
MakeAction(makeDebugFlagHH, Transform("TRACING", 0)))
env.Command('debug/flags.cc', Value(debug_flags),
MakeAction(makeDebugFlagCC, Transform("TRACING", 0)))
Source('debug/flags.cc')
# version tags
tags = \
env.Command('sim/tags.cc', None,
MakeAction('util/cpt_upgrader.py --get-cc-file > $TARGET',
Transform("VER TAGS")))
env.AlwaysBuild(tags)
# Embed python files. All .py files that have been indicated by a
# PySource() call in a SConscript need to be embedded into the M5
# library. To do that, we compile the file to byte code, marshal the
# byte code, compress it, and then generate a c++ file that
# inserts the result into an array.
def embedPyFile(target, source, env):
def c_str(string):
if string is None:
return "0"
return '"%s"' % string
'''Action function to compile a .py into a code object, marshal
it, compress it, and stick it into an asm file so the code appears
as just bytes with a label in the data section'''
src = file(str(source[0]), 'r').read()
pysource = PySource.tnodes[source[0]]
compiled = compile(src, pysource.abspath, 'exec')
marshalled = marshal.dumps(compiled)
compressed = zlib.compress(marshalled)
data = compressed
sym = pysource.symname
code = code_formatter()
code('''\
#include "sim/init.hh"
namespace {
''')
blobToCpp(data, 'data_' + sym, code)
code('''\
EmbeddedPython embedded_${sym}(
${{c_str(pysource.arcname)}},
${{c_str(pysource.abspath)}},
${{c_str(pysource.modpath)}},
data_${sym},
${{len(data)}},
${{len(marshalled)}});
} // anonymous namespace
''')
code.write(str(target[0]))
for source in PySource.all:
env.Command(source.cpp, source.tnode,
MakeAction(embedPyFile, Transform("EMBED PY")))
Source(source.cpp, tags=source.tags, add_tags='python')
########################################################################
#
# Define binaries. Each different build type (debug, opt, etc.) gets
# a slightly different build environment.
#
# List of constructed environments to pass back to SConstruct
date_source = Source('base/date.cc', tags=[])
gem5_binary = Gem5('gem5')
# Function to create a new build environment as clone of current
# environment 'env' with modified object suffix and optional stripped
# binary. Additional keyword arguments are appended to corresponding
# build environment vars.
def makeEnv(env, label, objsfx, strip=False, disable_partial=False, **kwargs):
# SCons doesn't know to append a library suffix when there is a '.' in the
# name. Use '_' instead.
libname = 'gem5_' + label
secondary_exename = 'm5.' + label
new_env = env.Clone(OBJSUFFIX=objsfx, SHOBJSUFFIX=objsfx + 's')
new_env.Label = label
new_env.Append(**kwargs)
lib_sources = Source.all.with_tag('gem5 lib')
# Without Python, leave out all Python content from the library
# builds. The option doesn't affect gem5 built as a program
if GetOption('without_python'):
lib_sources = lib_sources.without_tag('python')
static_objs = []
shared_objs = []
for s in lib_sources.with_tag(Source.ungrouped_tag):
static_objs.append(s.static(new_env))
shared_objs.append(s.shared(new_env))
for group in Source.source_groups:
srcs = lib_sources.with_tag(Source.link_group_tag(group))
if not srcs:
continue
group_static = [ s.static(new_env) for s in srcs ]
group_shared = [ s.shared(new_env) for s in srcs ]
# If partial linking is disabled, add these sources to the build
# directly, and short circuit this loop.
if disable_partial:
static_objs.extend(group_static)
shared_objs.extend(group_shared)
continue
# Set up the static partially linked objects.
file_name = new_env.subst("${OBJPREFIX}lib${OBJSUFFIX}.partial")
target = File(joinpath(group, file_name))
partial = env.PartialStatic(target=target, source=group_static)
static_objs.extend(partial)
# Set up the shared partially linked objects.
file_name = new_env.subst("${SHOBJPREFIX}lib${SHOBJSUFFIX}.partial")
target = File(joinpath(group, file_name))
partial = env.PartialShared(target=target, source=group_shared)
shared_objs.extend(partial)
static_date = date_source.static(new_env)
new_env.Depends(static_date, static_objs)
static_objs.extend(static_date)
shared_date = date_source.shared(new_env)
new_env.Depends(shared_date, shared_objs)
shared_objs.extend(shared_date)
main_objs = [ s.static(new_env) for s in Source.all.with_tag('main') ]
# First make a library of everything but main() so other programs can
# link against m5.
static_lib = new_env.StaticLibrary(libname, static_objs)
shared_lib = new_env.SharedLibrary(libname, shared_objs)
# Keep track of the object files generated so far so Executables can
# include them.
new_env['STATIC_OBJS'] = static_objs
new_env['SHARED_OBJS'] = shared_objs
new_env['MAIN_OBJS'] = main_objs
new_env['STATIC_LIB'] = static_lib
new_env['SHARED_LIB'] = shared_lib
# Record some settings for building Executables.
new_env['EXE_SUFFIX'] = label
new_env['STRIP_EXES'] = strip
for cls in ExecutableMeta.all:
cls.declare_all(new_env)
new_env.M5Binary = File(gem5_binary.path(new_env))
new_env.Command(secondary_exename, new_env.M5Binary,
MakeAction('ln $SOURCE $TARGET', Transform("HARDLINK")))
# Set up regression tests.
SConscript(os.path.join(env.root.abspath, 'tests', 'SConscript'),
variant_dir=Dir('tests').Dir(new_env.Label),
exports={ 'env' : new_env }, duplicate=False)
# Start out with the compiler flags common to all compilers,
# i.e. they all use -g for opt and -g -pg for prof
ccflags = {'debug' : [], 'opt' : ['-g'], 'fast' : [], 'prof' : ['-g', '-pg'],
'perf' : ['-g']}
# Start out with the linker flags common to all linkers, i.e. -pg for
# prof, and -lprofiler for perf. The -lprofile flag is surrounded by
# no-as-needed and as-needed as the binutils linker is too clever and
# simply doesn't link to the library otherwise.
ldflags = {'debug' : [], 'opt' : [], 'fast' : [], 'prof' : ['-pg'],
'perf' : ['-Wl,--no-as-needed', '-lprofiler', '-Wl,--as-needed']}
# For Link Time Optimization, the optimisation flags used to compile
# individual files are decoupled from those used at link time
# (i.e. you can compile with -O3 and perform LTO with -O0), so we need
# to also update the linker flags based on the target.
if env['GCC']:
if sys.platform == 'sunos5':
ccflags['debug'] += ['-gstabs+']
else:
ccflags['debug'] += ['-ggdb3']
ldflags['debug'] += ['-O0']
# opt, fast, prof and perf all share the same cc flags, also add
# the optimization to the ldflags as LTO defers the optimization
# to link time
for target in ['opt', 'fast', 'prof', 'perf']:
ccflags[target] += ['-O3']
ldflags[target] += ['-O3']
ccflags['fast'] += env['LTO_CCFLAGS']
ldflags['fast'] += env['LTO_LDFLAGS']
elif env['CLANG']:
ccflags['debug'] += ['-g', '-O0']
# opt, fast, prof and perf all share the same cc flags
for target in ['opt', 'fast', 'prof', 'perf']:
ccflags[target] += ['-O3']
else:
print('Unknown compiler, please fix compiler options')
Exit(1)
# To speed things up, we only instantiate the build environments we
# need. We try to identify the needed environment for each target; if
# we can't, we fall back on instantiating all the environments just to
# be safe.
target_types = ['debug', 'opt', 'fast', 'prof', 'perf']
obj2target = {'do': 'debug', 'o': 'opt', 'fo': 'fast', 'po': 'prof',
'gpo' : 'perf'}
def identifyTarget(t):
ext = t.split('.')[-1]
if ext in target_types:
return ext
if obj2target.has_key(ext):
return obj2target[ext]
match = re.search(r'/tests/([^/]+)/', t)
if match and match.group(1) in target_types:
return match.group(1)
return 'all'
needed_envs = [identifyTarget(target) for target in BUILD_TARGETS]
if 'all' in needed_envs:
needed_envs += target_types
disable_partial = False
if env['PLATFORM'] == 'darwin':
# Up until Apple LLVM version 10.0.0 (clang-1000.11.45.5), partial
# linked objects do not expose symbols that are marked with the
# hidden visibility and consequently building gem5 on Mac OS
# fails. As a workaround, we disable partial linking, however, we
# may want to revisit in the future.
disable_partial = True
# Debug binary
if 'debug' in needed_envs:
makeEnv(env, 'debug', '.do',
CCFLAGS = Split(ccflags['debug']),
CPPDEFINES = ['DEBUG', 'TRACING_ON=1'],
LINKFLAGS = Split(ldflags['debug']),
disable_partial=disable_partial)
# Optimized binary
if 'opt' in needed_envs:
makeEnv(env, 'opt', '.o',
CCFLAGS = Split(ccflags['opt']),
CPPDEFINES = ['TRACING_ON=1'],
LINKFLAGS = Split(ldflags['opt']),
disable_partial=disable_partial)
# "Fast" binary
if 'fast' in needed_envs:
disable_partial = disable_partial and \
env.get('BROKEN_INCREMENTAL_LTO', False) and \
GetOption('force_lto')
makeEnv(env, 'fast', '.fo', strip = True,
CCFLAGS = Split(ccflags['fast']),
CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
LINKFLAGS = Split(ldflags['fast']),
disable_partial=disable_partial)
# Profiled binary using gprof
if 'prof' in needed_envs:
makeEnv(env, 'prof', '.po',
CCFLAGS = Split(ccflags['prof']),
CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
LINKFLAGS = Split(ldflags['prof']),
disable_partial=disable_partial)
# Profiled binary using google-pprof
if 'perf' in needed_envs:
makeEnv(env, 'perf', '.gpo',
CCFLAGS = Split(ccflags['perf']),
CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
LINKFLAGS = Split(ldflags['perf']),
disable_partial=disable_partial)