blob: cc51b9fe31603c02bfcc4c4ba943331d9142b8d1 [file] [log] [blame]
# -*- mode:python -*-
# Copyright (c) 2018, 2020 ARM Limited
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2004-2005 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import array
import bisect
import distutils.spawn
import functools
import imp
import os
import re
import sys
import zlib
from os.path import basename, dirname, exists, isdir, isfile, join as joinpath
import SCons
from gem5_scons import Transform, warning, error
# This file defines how to build a particular configuration of gem5
# based on variable settings in the 'env' build environment.
Import('*')
# Children need to see the environment
Export('env')
build_env = [(opt, env[opt]) for opt in export_vars]
from m5.util import code_formatter
########################################################################
# Code for adding source files of various types
#
# When specifying a source file of some type, a set of tags can be
# specified for that file.
def tag_implies(env, tag, tag_list):
'''
Associates a tag X to a list of tags which are implied by X.
For example, assume:
- Each file <X>.cc is tagged with the tag "Tag <X>".
- B.cc refers to symbols from A.cc
- C.cc refers to symbols from B.cc
- D.cc refers to symbols from A.cc and C.cc
Then:
- "Tag A" is implied by "Tag B"
- "Tag B" is implied by "Tag C"
- "Tag A" is transitively implied by "Tag C" (from "Tag B")
- "Tag A" and "Tag C" are implied by "Tag D"
- "Tag B" is transitively implied by "Tag D" (from "Tag C")
- "Tag A" is transitively implied by "Tag D" (from transitive "Tag B")
All of these implications are simply declared as:
env.TagImplies("Tag B", "Tag A")
env.TagImplies("Tag C", "Tag B")
env.TagImplies("Tag D", ["Tag A", "Tag C"])
So that any use of a tag will automatically include its transitive tags
after being resolved.
'''
env.SetDefault(_tag_implies={})
implications = env['_tag_implies']
if isinstance(tag_list, str):
tag_list = frozenset([tag_list])
if not isinstance(tag_list, frozenset):
tag_list = frozenset(tag_list)
if tag in implications:
implications[tag] |= tag_list
else:
implications[tag] = tag_list
# Check if any of the tags on which the new tag depends on already
# has a list of implications. If so, add the list to the new tag's
# implications
for t in tag_list:
if t in implications:
implications[tag] |= implications[t]
# Check if another tag depends on this tag. If so, add this tag's
# implications to that tag.
for t,implied in implications.items():
if tag in implied:
implications[t] |= implications[tag]
env.AddMethod(tag_implies, 'TagImplies')
def resolve_tags(env, tags):
'''
Returns the complete set of tags implied (dependencies) by the
supplied tags.
'''
implications = env.SetDefault(_tag_implies={})
implications = env['_tag_implies']
if isinstance(tags, str):
tags = frozenset([tags])
if not isinstance(tags, frozenset):
tags = frozenset(tags)
tags = tags.copy()
for tag in tags:
if tag in implications:
tags |= implications[tag]
return tags
class SourceFilter(object):
def __init__(self, predicate):
self.predicate = predicate
def __or__(self, other):
return SourceFilter(lambda env, tags: self.predicate(env, tags) or
other.predicate(env, tags))
def __and__(self, other):
return SourceFilter(lambda env, tags: self.predicate(env, tags) and
other.predicate(env, tags))
def with_tags_that(predicate):
'''Return a list of sources with tags that satisfy a predicate.'''
return SourceFilter(predicate)
def with_any_tags(*tags):
'''Return a list of sources with any of the supplied tags.'''
return SourceFilter(lambda env, stags: \
len(resolve_tags(env, tags) & stags) > 0)
def with_all_tags(*tags):
'''Return a list of sources with all of the supplied tags.'''
return SourceFilter(lambda env, stags: resolve_tags(env, tags) <= stags)
def with_tag(tag):
'''Return a list of sources with the supplied tag.'''
return with_any_tags(*[tag])
def without_tags(*tags):
'''Return a list of sources without any of the supplied tags.'''
return SourceFilter(lambda env, stags: \
len(resolve_tags(env, tags) & stags) == 0)
def without_tag(tag):
'''Return a list of sources without the supplied tag.'''
return without_tags(*[tag])
source_filter_factories = {
'with_tags_that': with_tags_that,
'with_any_tags': with_any_tags,
'with_all_tags': with_all_tags,
'with_tag': with_tag,
'without_tags': without_tags,
'without_tag': without_tag,
}
Export(source_filter_factories)
class SourceList(list):
def apply_filter(self, env, f):
def match(source):
return f.predicate(env, source.tags)
return SourceList(filter(match, self))
def __getattr__(self, name):
func = source_filter_factories.get(name, None)
if not func:
raise AttributeError
@functools.wraps(func)
def wrapper(env, *args, **kwargs):
return self.apply_filter(env, func(*args, **kwargs))
return wrapper
class SourceMeta(type):
'''Meta class for source files that keeps track of all files of a
particular type.'''
def __init__(cls, name, bases, dict):
super(SourceMeta, cls).__init__(name, bases, dict)
cls.all = SourceList()
class SourceFile(object, metaclass=SourceMeta):
'''Base object that encapsulates the notion of a source file.
This includes, the source node, target node, various manipulations
of those. A source file also specifies a set of tags which
describing arbitrary properties of the source file.'''
static_objs = {}
shared_objs = {}
def __init__(self, source, tags=None, add_tags=None, append=None):
if tags is None:
tags='gem5 lib'
if isinstance(tags, str):
tags = set([tags])
if not isinstance(tags, set):
tags = set(tags)
self.tags = tags
if add_tags:
if isinstance(add_tags, str):
add_tags = set([add_tags])
if not isinstance(add_tags, set):
add_tags = set(add_tags)
self.tags |= add_tags
self.append = append
tnode = source
if not isinstance(source, SCons.Node.FS.File):
tnode = File(source)
self.tnode = tnode
self.snode = tnode.srcnode()
for base in type(self).__mro__:
if issubclass(base, SourceFile):
base.all.append(self)
def static(self, env):
key = (self.tnode, env['OBJSUFFIX'])
if self.append:
env = env.Clone()
env.Append(**self.append)
if not key in self.static_objs:
self.static_objs[key] = env.StaticObject(self.tnode)
return self.static_objs[key]
def shared(self, env):
key = (self.tnode, env['OBJSUFFIX'])
if self.append:
env = env.Clone()
env.Append(**self.append)
if not key in self.shared_objs:
self.shared_objs[key] = env.SharedObject(self.tnode)
return self.shared_objs[key]
@property
def filename(self):
return str(self.tnode)
@property
def dirname(self):
return dirname(self.filename)
@property
def basename(self):
return basename(self.filename)
@property
def extname(self):
index = self.basename.rfind('.')
if index <= 0:
# dot files aren't extensions
return self.basename, None
return self.basename[:index], self.basename[index+1:]
def __lt__(self, other): return self.filename < other.filename
def __le__(self, other): return self.filename <= other.filename
def __gt__(self, other): return self.filename > other.filename
def __ge__(self, other): return self.filename >= other.filename
def __eq__(self, other): return self.filename == other.filename
def __ne__(self, other): return self.filename != other.filename
def blobToCpp(data, symbol, cpp_code, hpp_code=None, namespace=None):
'''
Convert bytes data into C++ .cpp and .hh uint8_t byte array
code containing that binary data.
:param data: binary data to be converted to C++
:param symbol: name of the symbol
:param cpp_code: append the generated cpp_code to this object
:param hpp_code: append the generated hpp_code to this object
If None, ignore it. Otherwise, also include it
in the .cpp file.
:param namespace: namespace to put the symbol into. If None,
don't put the symbols into any namespace.
'''
symbol_len_declaration = 'const std::size_t {}_len'.format(symbol)
symbol_declaration = 'const std::uint8_t {}[]'.format(symbol)
if hpp_code is not None:
cpp_code('''\
#include "blobs/{}.hh"
'''.format(symbol))
hpp_code('''\
#include <cstddef>
#include <cstdint>
''')
if namespace is not None:
hpp_code('namespace {} {{'.format(namespace))
hpp_code('extern ' + symbol_len_declaration + ';')
hpp_code('extern ' + symbol_declaration + ';')
if namespace is not None:
hpp_code('}')
if namespace is not None:
cpp_code('namespace {} {{'.format(namespace))
if hpp_code is not None:
cpp_code(symbol_len_declaration + ' = {};'.format(len(data)))
cpp_code(symbol_declaration + ' = {')
cpp_code.indent()
step = 16
for i in range(0, len(data), step):
x = array.array('B', data[i:i+step])
cpp_code(''.join('%d,' % d for d in x))
cpp_code.dedent()
cpp_code('};')
if namespace is not None:
cpp_code('}')
def Blob(blob_path, symbol):
'''
Embed an arbitrary blob into the gem5 executable,
and make it accessible to C++ as a byte array.
'''
blob_path = os.path.abspath(blob_path)
blob_out_dir = os.path.join(env['BUILDDIR'], 'blobs')
path_noext = joinpath(blob_out_dir, symbol)
cpp_path = path_noext + '.cc'
hpp_path = path_noext + '.hh'
def embedBlob(target, source, env):
with open(str(source[0]), 'rb') as f:
data = f.read()
cpp_code = code_formatter()
hpp_code = code_formatter()
blobToCpp(data, symbol, cpp_code, hpp_code, namespace='Blobs')
cpp_path = str(target[0])
hpp_path = str(target[1])
cpp_dir = os.path.split(cpp_path)[0]
if not os.path.exists(cpp_dir):
os.makedirs(cpp_dir)
cpp_code.write(cpp_path)
hpp_code.write(hpp_path)
env.Command([cpp_path, hpp_path], blob_path,
MakeAction(embedBlob, Transform("EMBED BLOB")))
Source(cpp_path)
def GdbXml(xml_id, symbol):
Blob(joinpath(gdb_xml_dir, xml_id), symbol)
class Source(SourceFile):
pass
class PySource(SourceFile):
'''Add a python source file to the named package'''
invalid_sym_char = re.compile('[^A-z0-9_]')
modules = {}
tnodes = {}
symnames = {}
def __init__(self, package, source, tags=None, add_tags=None):
'''specify the python package, the source file, and any tags'''
super(PySource, self).__init__(source, tags, add_tags)
modname,ext = self.extname
assert ext == 'py'
if package:
path = package.split('.')
else:
path = []
modpath = path[:]
if modname != '__init__':
modpath += [ modname ]
modpath = '.'.join(modpath)
arcpath = path + [ self.basename ]
abspath = self.snode.abspath
if not exists(abspath):
abspath = self.tnode.abspath
self.package = package
self.modname = modname
self.modpath = modpath
self.arcname = joinpath(*arcpath)
self.abspath = abspath
self.compiled = File(self.filename + 'c')
self.cpp = File(self.filename + '.cc')
self.symname = PySource.invalid_sym_char.sub('_', modpath)
PySource.modules[modpath] = self
PySource.tnodes[self.tnode] = self
PySource.symnames[self.symname] = self
class SimObject(PySource):
'''Add a SimObject python file as a python source object and add
it to a list of sim object modules'''
fixed = False
modnames = []
def __init__(self, source, tags=None, add_tags=None):
'''Specify the source file and any tags (automatically in
the m5.objects package)'''
super(SimObject, self).__init__('m5.objects', source, tags, add_tags)
if self.fixed:
raise AttributeError("Too late to call SimObject now.")
bisect.insort_right(SimObject.modnames, self.modname)
# This regular expression is simplistic and assumes that the import takes up
# the entire line, doesn't have the keyword "public", uses double quotes, has
# no whitespace at the end before or after the ;, and is all on one line. This
# should still cover most cases, and a completely accurate scanner would be
# MUCH more complex.
protoc_import_re = re.compile(r'^import\s+\"(.*\.proto)\"\;$', re.M)
def protoc_scanner(node, env, path):
deps = []
for imp in protoc_import_re.findall(node.get_text_contents()):
deps.append(Dir(env['BUILDDIR']).File(imp))
return deps
env.Append(SCANNERS=Scanner(function=protoc_scanner, skeys=['.proto']))
def protoc_emitter(target, source, env):
root, ext = os.path.splitext(source[0].get_abspath())
return [root + '.pb.cc', root + '.pb.h'], source
env.Append(BUILDERS={'ProtoBufCC' : Builder(
action=MakeAction('${PROTOC} --cpp_out ${BUILDDIR} '
'--proto_path ${BUILDDIR} '
'${SOURCE.get_abspath()}',
Transform("PROTOC")),
emitter=protoc_emitter
)})
class ProtoBuf(SourceFile):
'''Add a Protocol Buffer to build'''
def __init__(self, source, tags=None, add_tags=None):
'''Specify the source file, and any tags'''
super(ProtoBuf, self).__init__(source, tags, add_tags)
if not env['HAVE_PROTOC'] or not env['HAVE_PROTOBUF']:
error('Got protobuf to build, but lacks support!')
# Get the file name and the extension
modname,ext = self.extname
assert ext == 'proto'
self.cc_file, self.hh_file = env.ProtoBufCC(source=source)
# Add the C++ source file
Source(self.cc_file, tags=self.tags,
append={'CXXFLAGS': '-Wno-array-bounds'})
env['PROTOC_GRPC'] = distutils.spawn.find_executable('grpc_cpp_plugin')
if env['PROTOC_GRPC']:
env.Append(LIBS=['grpc++'])
def protoc_grpc_emitter(target, source, env):
root, ext = os.path.splitext(source[0].get_abspath())
return [root + '.grpc.pb.cc', root + '.grpc.pb.h'], source
env.Append(BUILDERS={'GrpcProtoBufCC' : Builder(
action=MakeAction('${PROTOC} --grpc_out ${BUILDDIR} '
'--plugin=protoc-gen-grpc=${PROTOC_GRPC} '
'--proto_path ${BUILDDIR} '
'${SOURCE.get_abspath()}',
Transform("PROTOC")),
emitter=protoc_grpc_emitter
)})
class GrpcProtoBuf(SourceFile):
'''Add a GRPC protocol buffer to the build'''
def __init__(self, source, tags=None, add_tags=None):
'''Specify the source file, and any tags'''
super(GrpcProtoBuf, self).__init__(source, tags, add_tags)
if not env['PROTOC_GRPC']:
error('No grpc_cpp_plugin found')
self.cc_file, self.hh_file = env.GrpcProtoBufCC(source=source)
# We still need to build the normal protobuf code too.
self.protobuf = ProtoBuf(source, tags=self.tags)
# Add the C++ source file.
Source(self.cc_file, tags=self.tags,
append={'CXXFLAGS': '-Wno-array-bounds'})
exectuable_classes = []
class ExecutableMeta(type):
'''Meta class for Executables.'''
all = []
def __init__(cls, name, bases, d):
ExecutableMeta.all.append(cls)
super(ExecutableMeta, cls).__init__(name, bases, d)
cls.all = []
class Executable(object, metaclass=ExecutableMeta):
'''Base class for creating an executable from sources.'''
def __init__(self, target, *srcs_and_filts):
'''Specify the target name and any sources. Sources that are
not SourceFiles are evalued with Source().'''
super(Executable, self).__init__()
self.all.append(self)
self.target = target
isFilter = lambda arg: isinstance(arg, SourceFilter)
self.filters = filter(isFilter, srcs_and_filts)
sources = filter(lambda a: not isFilter(a), srcs_and_filts)
srcs = SourceList()
for src in sources:
if not isinstance(src, SourceFile):
src = Source(src, tags=[])
srcs.append(src)
self.sources = srcs
self.dir = Dir('.')
def path(self, env):
return self.dir.File(self.target + '.' + env['EXE_SUFFIX'])
def srcs_to_objs(self, env, sources):
return list([ s.static(env) for s in sources ])
@classmethod
def declare_all(cls, env):
return list([ instance.declare(env) for instance in cls.all ])
def declare(self, env, objs=None):
if objs is None:
objs = self.srcs_to_objs(env, self.sources)
env = env.Clone()
env['BIN_RPATH_PREFIX'] = os.path.relpath(
env['BUILDDIR'], self.path(env).dir.abspath)
if env['STRIP_EXES']:
stripped = self.path(env)
unstripped = env.File(str(stripped) + '.unstripped')
if sys.platform == 'sunos5':
cmd = 'cp $SOURCE $TARGET; strip $TARGET'
else:
cmd = 'strip $SOURCE -o $TARGET'
env.Program(unstripped, objs)
return env.Command(stripped, unstripped,
MakeAction(cmd, Transform("STRIP")))
else:
return env.Program(self.path(env), objs)
class GTest(Executable):
'''Create a unit test based on the google test framework.'''
all = []
def __init__(self, *srcs_and_filts, **kwargs):
super(GTest, self).__init__(*srcs_and_filts)
self.skip_lib = kwargs.pop('skip_lib', False)
@classmethod
def declare_all(cls, env):
env = env.Clone()
env.Append(LIBS=env['GTEST_LIBS'])
env.Append(CPPFLAGS=env['GTEST_CPPFLAGS'])
env['GTEST_LIB_SOURCES'] = Source.all.with_tag(env, 'gtest lib')
env['GTEST_OUT_DIR'] = \
Dir(env['BUILDDIR']).Dir('unittests.' + env['EXE_SUFFIX'])
return super(GTest, cls).declare_all(env)
def declare(self, env):
sources = list(self.sources)
if not self.skip_lib:
sources += env['GTEST_LIB_SOURCES']
for f in self.filters:
sources += Source.all.apply_filter(env, f)
objs = self.srcs_to_objs(env, sources)
binary = super(GTest, self).declare(env, objs)
out_dir = env['GTEST_OUT_DIR']
xml_file = out_dir.Dir(str(self.dir)).File(self.target + '.xml')
AlwaysBuild(env.Command(xml_file, binary,
"${SOURCES[0]} --gtest_output=xml:${TARGETS[0]}"))
return binary
class Gem5(Executable):
'''Create a gem5 executable.'''
def __init__(self, target):
super(Gem5, self).__init__(target)
def declare(self, env):
objs = env['MAIN_OBJS'] + env['STATIC_OBJS']
return super(Gem5, self).declare(env, objs)
# Children should have access
Export('Blob')
Export('GdbXml')
Export('Source')
Export('PySource')
Export('SimObject')
Export('ProtoBuf')
Export('GrpcProtoBuf')
Export('Executable')
Export('GTest')
########################################################################
#
# Debug Flags
#
debug_flags = {}
def DebugFlag(name, desc=None, fmt=False):
if name in debug_flags:
raise AttributeError("Flag {} already specified".format(name))
debug_flags[name] = (name, (), desc, fmt)
def CompoundFlag(name, flags, desc=None):
if name in debug_flags:
raise AttributeError("Flag {} already specified".format(name))
compound = tuple(flags)
debug_flags[name] = (name, compound, desc, False)
def DebugFormatFlag(name, desc=None):
DebugFlag(name, desc, True)
# Create a compound debug flag that encapsulates all flags: "All". This
# flag should not be used within C++ code - it is a compound meta flag
def _createAllDebugFlag():
simple_flags = []
for name,flag in sorted(debug_flags.items()):
n, compound, desc, fmt = flag
assert n == name
if not compound and not fmt:
simple_flags.append(n)
CompoundFlag("All", simple_flags,
"Controls all debug flags. It should not be used within C++ code.")
Export('DebugFlag')
Export('CompoundFlag')
Export('DebugFormatFlag')
########################################################################
#
# Set some compiler variables
#
# Include file paths are rooted in this directory. SCons will
# automatically expand '.' to refer to both the source directory and
# the corresponding build directory to pick up generated include
# files.
env.Append(CPPPATH=Dir('.'))
for extra_dir in extras_dir_list:
env.Append(CPPPATH=Dir(extra_dir))
# Workaround for bug in SCons version > 0.97d20071212
# Scons bug id: 2006 gem5 Bug id: 308
for root, dirs, files in os.walk(base_dir, topdown=True):
Dir(root[len(base_dir) + 1:])
########################################################################
#
# Walk the tree and execute all SConscripts in subdirectories
#
here = Dir('.').srcnode().abspath
for root, dirs, files in os.walk(base_dir, topdown=True):
if root == here:
# we don't want to recurse back into this SConscript
continue
if 'SConscript' in files:
build_dir = joinpath(env['BUILDDIR'], root[len(base_dir) + 1:])
SConscript(joinpath(root, 'SConscript'), variant_dir=build_dir)
for extra_dir in extras_dir_list:
prefix_len = len(dirname(extra_dir)) + 1
# Also add the corresponding build directory to pick up generated
# include files.
env.Append(CPPPATH=Dir(joinpath(env['BUILDDIR'], extra_dir[prefix_len:])))
for root, dirs, files in os.walk(extra_dir, topdown=True):
# if build lives in the extras directory, don't walk down it
if 'build' in dirs:
dirs.remove('build')
if 'SConscript' in files:
build_dir = joinpath(env['BUILDDIR'], root[prefix_len:])
SConscript(joinpath(root, 'SConscript'), variant_dir=build_dir)
for opt in export_vars:
env.ConfigFile(opt)
def makeTheISA(source, target, env):
isas = [ src.get_contents().decode('utf-8') for src in source ]
target_isa = env['TARGET_ISA']
def define(isa):
return str(isa.upper()) + '_ISA'
def namespace(isa):
return isa[0].upper() + isa[1:].lower() + 'ISA'
code = code_formatter()
code('''\
#ifndef __CONFIG_THE_ISA_HH__
#define __CONFIG_THE_ISA_HH__
''')
# create defines for the preprocessing and compile-time determination
for i,isa in enumerate(isas):
code('#define $0 $1', define(isa), i + 1)
code()
# create an enum for any run-time determination of the ISA, we
# reuse the same name as the namespaces
code('enum class Arch {')
for isa in isas:
code(' $0 = $1,', namespace(isa), define(isa))
code('};')
code('''
#define THE_ISA ${{define(target_isa)}}
#define TheISA ${{namespace(target_isa)}}
#define THE_ISA_STR "${{target_isa}}"
#endif // __CONFIG_THE_ISA_HH__''')
code.write(str(target[0]))
env.Command('config/the_isa.hh', list(map(Value, all_isa_list)),
MakeAction(makeTheISA, Transform("CFG ISA", 0)))
def makeTheGPUISA(source, target, env):
isas = [ src.get_contents().decode('utf-8') for src in source ]
target_gpu_isa = env['TARGET_GPU_ISA']
def define(isa):
return str(isa.upper()) + '_ISA'
def namespace(isa):
return isa[0].upper() + isa[1:].lower() + 'ISA'
code = code_formatter()
code('''\
#ifndef __CONFIG_THE_GPU_ISA_HH__
#define __CONFIG_THE_GPU_ISA_HH__
''')
# create defines for the preprocessing and compile-time determination
for i,isa in enumerate(isas):
code('#define $0 $1', define(isa), i + 1)
code()
# create an enum for any run-time determination of the ISA, we
# reuse the same name as the namespaces
code('enum class GPUArch {')
for isa in isas:
code(' $0 = $1,', namespace(isa), define(isa))
code('};')
code('''
#define THE_GPU_ISA ${{define(target_gpu_isa)}}
#define TheGpuISA ${{namespace(target_gpu_isa)}}
#define THE_GPU_ISA_STR "${{target_gpu_isa}}"
#endif // __CONFIG_THE_GPU_ISA_HH__''')
code.write(str(target[0]))
env.Command('config/the_gpu_isa.hh', list(map(Value, all_gpu_isa_list)),
MakeAction(makeTheGPUISA, Transform("CFG ISA", 0)))
########################################################################
#
# Prevent any SimObjects from being added after this point, they
# should all have been added in the SConscripts above
#
SimObject.fixed = True
class DictImporter(object):
'''This importer takes a dictionary of arbitrary module names that
map to arbitrary filenames.'''
def __init__(self, modules):
self.modules = modules
self.installed = set()
def unload(self):
import sys
for module in self.installed:
del sys.modules[module]
self.installed = set()
def find_module(self, fullname, path):
if fullname == 'm5.defines':
return self
if fullname == 'm5.objects':
return self
if fullname.startswith('_m5'):
return None
source = self.modules.get(fullname, None)
if source is not None and fullname.startswith('m5.objects'):
return self
return None
def load_module(self, fullname):
mod = imp.new_module(fullname)
sys.modules[fullname] = mod
self.installed.add(fullname)
mod.__loader__ = self
if fullname == 'm5.objects':
mod.__path__ = fullname.split('.')
return mod
if fullname == 'm5.defines':
mod.__dict__['buildEnv'] = dict(build_env)
return mod
source = self.modules[fullname]
if source.modname == '__init__':
mod.__path__ = source.modpath
mod.__file__ = source.abspath
compiled = compile(open(source.abspath).read(), source.abspath, 'exec')
exec(compiled, mod.__dict__)
return mod
import m5.SimObject
import m5.params
from m5.util import code_formatter
m5.SimObject.clear()
m5.params.clear()
# install the python importer so we can grab stuff from the source
# tree itself. We can't have SimObjects added after this point or
# else we won't know about them for the rest of the stuff.
importer = DictImporter(PySource.modules)
sys.meta_path[0:0] = [ importer ]
# import all sim objects so we can populate the all_objects list
# make sure that we're working with a list, then let's sort it
for modname in SimObject.modnames:
exec('from m5.objects import %s' % modname)
# we need to unload all of the currently imported modules so that they
# will be re-imported the next time the sconscript is run
importer.unload()
sys.meta_path.remove(importer)
sim_objects = m5.SimObject.allClasses
all_enums = m5.params.allEnums
for name,obj in sorted(sim_objects.items()):
for param in obj._params.local.values():
# load the ptype attribute now because it depends on the
# current version of SimObject.allClasses, but when scons
# actually uses the value, all versions of
# SimObject.allClasses will have been loaded
param.ptype
########################################################################
#
# calculate extra dependencies
#
module_depends = ["m5", "m5.SimObject", "m5.params"]
depends = [ PySource.modules[dep].snode for dep in module_depends ]
depends.sort(key = lambda x: x.name)
########################################################################
#
# Commands for the basic automatically generated python files
#
# Generate Python file containing a dict specifying the current
# buildEnv flags.
def makeDefinesPyFile(target, source, env):
build_env = source[0].get_contents().decode('utf-8')
code = code_formatter()
code("""
import _m5.core
import m5.util
buildEnv = dict($build_env)
compileDate = _m5.core.compileDate
gem5Version = _m5.core.gem5Version
_globals = globals()
for key,val in _m5.core.__dict__.items():
if key.startswith('flag_'):
flag = key[5:]
_globals[flag] = val
del _globals
""")
code.write(target[0].abspath)
defines_info = Value(build_env)
# Generate a file with all of the compile options in it
env.Command('python/m5/defines.py', defines_info,
MakeAction(makeDefinesPyFile, Transform("DEFINES", 0)))
PySource('m5', 'python/m5/defines.py')
# Generate python file containing info about the M5 source code
def makeInfoPyFile(target, source, env):
code = code_formatter()
for src in source:
with open(src.srcnode().abspath, 'r') as f:
data = ''.join(f)
code('$src = ${{repr(data)}}')
code.write(str(target[0]))
# Generate a file that wraps the basic top level files
env.Command('python/m5/info.py',
[ '#/COPYING', '#/LICENSE', '#/README', ],
MakeAction(makeInfoPyFile, Transform("INFO")))
PySource('m5', 'python/m5/info.py')
########################################################################
#
# Create all of the SimObject param headers and enum headers
#
def createSimObjectParamDecl(target, source, env):
assert len(target) == 1 and len(source) == 1
name = source[0].get_text_contents()
obj = sim_objects[name]
code = code_formatter()
obj.cxx_param_decl(code)
code.write(target[0].abspath)
def createSimObjectParamDef(target, source, env):
assert len(target) == 1 and len(source) == 1
name = source[0].get_text_contents()
obj = sim_objects[name]
code = code_formatter()
obj.cxx_param_def(code)
code.write(target[0].abspath)
def createSimObjectCxxConfig(is_header):
def body(target, source, env):
assert len(target) == 1 and len(source) == 1
name = source[0].get_contents().decode('utf-8')
obj = sim_objects[name]
code = code_formatter()
obj.cxx_config_param_file(code, is_header)
code.write(target[0].abspath)
return body
def createEnumStrings(target, source, env):
assert len(target) == 1 and len(source) == 2
name = source[0].get_text_contents()
use_python = source[1].read()
obj = all_enums[name]
code = code_formatter()
obj.cxx_def(code)
if use_python:
obj.pybind_def(code)
code.write(target[0].abspath)
def createEnumDecls(target, source, env):
assert len(target) == 1 and len(source) == 1
name = source[0].get_text_contents()
obj = all_enums[name]
code = code_formatter()
obj.cxx_decl(code)
code.write(target[0].abspath)
def createSimObjectPyBindWrapper(target, source, env):
name = source[0].get_text_contents()
obj = sim_objects[name]
code = code_formatter()
obj.pybind_decl(code)
code.write(target[0].abspath)
# Generate all of the SimObject param C++ struct header files
params_hh_files = []
for name,simobj in sorted(sim_objects.items()):
# If this simobject's source changes, we need to regenerate the header.
py_source = PySource.modules[simobj.__module__]
extra_deps = [ py_source.tnode ]
# Get the params for just this SimObject, excluding base classes.
params = simobj._params.local.values()
# Extract the parameters' c++ types.
types = sorted(map(lambda p: p.ptype.cxx_type, params))
# If any of these types have changed, we need to regenerate the header.
extra_deps.append(Value(types))
hh_file = File('params/%s.hh' % name)
params_hh_files.append(hh_file)
env.Command(hh_file, Value(name),
MakeAction(createSimObjectParamDecl, Transform("SOPARMHH")))
env.Depends(hh_file, depends + extra_deps)
if not getattr(simobj, 'abstract', False) and hasattr(simobj, 'type'):
cc_file = File('params/%s.cc' % name)
env.Command(cc_file, Value(name),
MakeAction(createSimObjectParamDef, Transform("SOPARMCC")))
env.Depends(cc_file, depends + extra_deps)
Source(cc_file)
# C++ parameter description files
if GetOption('with_cxx_config'):
for name,simobj in sorted(sim_objects.items()):
py_source = PySource.modules[simobj.__module__]
extra_deps = [ py_source.tnode ]
cxx_config_hh_file = File('cxx_config/%s.hh' % name)
cxx_config_cc_file = File('cxx_config/%s.cc' % name)
env.Command(cxx_config_hh_file, Value(name),
MakeAction(createSimObjectCxxConfig(True),
Transform("CXXCPRHH")))
env.Command(cxx_config_cc_file, Value(name),
MakeAction(createSimObjectCxxConfig(False),
Transform("CXXCPRCC")))
env.Depends(cxx_config_hh_file, depends + extra_deps +
[File('params/%s.hh' % name), File('sim/cxx_config.hh')])
env.Depends(cxx_config_cc_file, depends + extra_deps +
[cxx_config_hh_file])
Source(cxx_config_cc_file)
cxx_config_init_cc_file = File('cxx_config/init.cc')
def createCxxConfigInitCC(target, source, env):
assert len(target) == 1 and len(source) == 1
code = code_formatter()
for name,simobj in sorted(sim_objects.items()):
if not hasattr(simobj, 'abstract') or not simobj.abstract:
code('#include "cxx_config/${name}.hh"')
code()
code('void cxxConfigInit()')
code('{')
code.indent()
for name,simobj in sorted(sim_objects.items()):
not_abstract = not hasattr(simobj, 'abstract') or \
not simobj.abstract
if not_abstract and 'type' in simobj.__dict__:
code('cxx_config_directory["${name}"] = '
'${name}CxxConfigParams::makeDirectoryEntry();')
code.dedent()
code('}')
code.write(target[0].abspath)
py_source = PySource.modules[simobj.__module__]
extra_deps = [ py_source.tnode ]
env.Command(cxx_config_init_cc_file, Value(name),
MakeAction(createCxxConfigInitCC, Transform("CXXCINIT")))
cxx_param_hh_files = ["cxx_config/%s.hh" % simobj
for name,simobj in sorted(sim_objects.items())
if not hasattr(simobj, 'abstract') or not simobj.abstract]
Depends(cxx_config_init_cc_file, cxx_param_hh_files +
[File('sim/cxx_config.hh')])
Source(cxx_config_init_cc_file)
# Generate all enum header files
for name,enum in sorted(all_enums.items()):
py_source = PySource.modules[enum.__module__]
extra_deps = [ py_source.tnode ]
cc_file = File('enums/%s.cc' % name)
env.Command(cc_file, [Value(name), Value(env['USE_PYTHON'])],
MakeAction(createEnumStrings, Transform("ENUM STR")))
env.Depends(cc_file, depends + extra_deps)
Source(cc_file)
hh_file = File('enums/%s.hh' % name)
env.Command(hh_file, Value(name),
MakeAction(createEnumDecls, Transform("ENUMDECL")))
env.Depends(hh_file, depends + extra_deps)
# Generate SimObject Python bindings wrapper files
if env['USE_PYTHON']:
for name,simobj in sorted(sim_objects.items()):
py_source = PySource.modules[simobj.__module__]
extra_deps = [ py_source.tnode ]
cc_file = File('python/_m5/param_%s.cc' % name)
env.Command(cc_file, Value(name),
MakeAction(createSimObjectPyBindWrapper,
Transform("SO PyBind")))
env.Depends(cc_file, depends + extra_deps)
Source(cc_file)
#
# Handle debug flags
#
def makeDebugFlagCC(target, source, env):
assert(len(target) == 1 and len(source) == 1)
code = code_formatter()
# delay definition of CompoundFlags until after all the definition
# of all constituent SimpleFlags
comp_code = code_formatter()
# file header
code('''
/*
* DO NOT EDIT THIS FILE! Automatically generated by SCons.
*/
#include "base/debug.hh"
namespace Debug {
''')
for name, flag in sorted(source[0].read().items()):
n, compound, desc, fmt = flag
assert n == name
# We intentionally make flag a reference to a heap allocated object so
# (1) It has a similar interface to a global object like before
# (2) It does not get destructed at the end of simulation
# The second property is desirable as global objects from different
# translation units do not have a defined destruction order, so it'll
# be unsafe to access debug flags in their destructor in such cases.
if not compound:
code('SimpleFlag& $name = *(')
code.indent()
if fmt:
code('new SimpleFlag("$name", "$desc", true)')
else:
code('new SimpleFlag("$name", "$desc", false)')
code.dedent()
code(');')
else:
comp_code('CompoundFlag& $name = *(')
comp_code.indent()
comp_code('new CompoundFlag("$name", "$desc", {')
comp_code.indent()
for flag in compound:
comp_code('&$flag,')
comp_code.dedent()
comp_code('})')
comp_code.dedent()
comp_code(');')
code.append(comp_code)
code()
code('} // namespace Debug')
code.write(str(target[0]))
def makeDebugFlagHH(target, source, env):
assert(len(target) == 1 and len(source) == 1)
val = eval(source[0].get_contents())
name, compound, desc, fmt = val
code = code_formatter()
# file header boilerplate
code('''\
/*
* DO NOT EDIT THIS FILE! Automatically generated by SCons.
*/
#ifndef __DEBUG_${name}_HH__
#define __DEBUG_${name}_HH__
namespace Debug {
''')
if compound:
code('class CompoundFlag;')
code('class SimpleFlag;')
if compound:
code('extern CompoundFlag& $name;')
for flag in compound:
code('extern SimpleFlag& $flag;')
else:
code('extern SimpleFlag& $name;')
code('''
}
#endif // __DEBUG_${name}_HH__
''')
code.write(str(target[0]))
# Generate the files for the debug and debug-format flags
_createAllDebugFlag()
for name,flag in sorted(debug_flags.items()):
n, compound, desc, fmt = flag
assert n == name
hh_file = 'debug/%s.hh' % name
env.Command(hh_file, Value(flag),
MakeAction(makeDebugFlagHH, Transform("TRACING", 0)))
env.Command('debug/flags.cc', Value(debug_flags),
MakeAction(makeDebugFlagCC, Transform("TRACING", 0)))
Source('debug/flags.cc')
# version tags
tags = \
env.Command('sim/tags.cc', None,
MakeAction('util/cpt_upgrader.py --get-cc-file > $TARGET',
Transform("VER TAGS")))
env.AlwaysBuild(tags)
# Embed python files. All .py files that have been indicated by a
# PySource() call in a SConscript need to be embedded into the M5
# library. To do that, we compile the file to byte code, marshal the
# byte code, compress it, and then generate a c++ file that
# inserts the result into an array.
def embedPyFile(target, source, env):
def c_str(string):
if string is None:
return "0"
return '"%s"' % string
'''Action function to compile a .py into a code object, marshal it,
compress it, and stick it into an asm file so the code appears as
just bytes with a label in the data section. The action takes two
sources:
source[0]: Binary used to marshal Python sources
source[1]: Python script to marshal
'''
import subprocess
marshalled = subprocess.check_output(
[source[0].abspath, str(source[1])], env=env['ENV'])
compressed = zlib.compress(marshalled)
data = compressed
pysource = PySource.tnodes[source[1]]
sym = pysource.symname
code = code_formatter()
code('''\
#include "sim/init.hh"
namespace {
''')
blobToCpp(data, 'data_' + sym, code)
code('''\
EmbeddedPython embedded_${sym}(
${{c_str(pysource.arcname)}},
${{c_str(pysource.abspath)}},
${{c_str(pysource.modpath)}},
data_${sym},
${{len(data)}},
${{len(marshalled)}});
} // anonymous namespace
''')
code.write(str(target[0]))
if main['USE_PYTHON']:
# Build a small helper that marshals the Python code using the same
# version of Python as gem5. This is in an unorthodox location to
# avoid building it for every variant.
py_marshal = marshal_env.Program('marshal', 'python/marshal.cc')[0]
for source in PySource.all:
marshal_env.Command(source.cpp, [ py_marshal, source.tnode ],
MakeAction(embedPyFile, Transform("EMBED PY")))
Source(source.cpp, tags=source.tags, add_tags='python')
########################################################################
#
# Define binaries. Each different build type (debug, opt, etc.) gets
# a slightly different build environment.
#
# List of constructed environments to pass back to SConstruct
date_source = Source('base/date.cc', tags=[])
gem5_binary = Gem5('gem5')
# Function to create a new build environment as clone of current
# environment 'env' with modified object suffix and optional stripped
# binary. Additional keyword arguments are appended to corresponding
# build environment vars.
def makeEnv(env, label, objsfx, strip=False, **kwargs):
# SCons doesn't know to append a library suffix when there is a '.' in the
# name. Use '_' instead.
libname = 'gem5_' + label
secondary_exename = 'm5.' + label
new_env = env.Clone(OBJSUFFIX=objsfx, SHOBJSUFFIX=objsfx + 's')
new_env.Label = label
new_env.Append(**kwargs)
lib_sources = Source.all.with_tag(new_env, 'gem5 lib')
# Without Python, leave out all Python content from the library
# builds. The option doesn't affect gem5 built as a program
if GetOption('without_python'):
lib_sources = lib_sources.without_tag(new_env, 'python')
static_objs = list([ s.static(new_env) for s in lib_sources ])
shared_objs = list([ s.shared(new_env) for s in lib_sources ])
static_date = date_source.static(new_env)
new_env.Depends(static_date, static_objs)
static_objs.extend(static_date)
shared_date = date_source.shared(new_env)
new_env.Depends(shared_date, shared_objs)
shared_objs.extend(shared_date)
main_objs = [ s.static(new_env) for s in
Source.all.with_tag(new_env, 'main') ]
# First make a library of everything but main() so other programs can
# link against m5.
static_lib = new_env.StaticLibrary(libname, static_objs)
shared_lib = new_env.SharedLibrary(libname, shared_objs)
# Keep track of the object files generated so far so Executables can
# include them.
new_env['STATIC_OBJS'] = static_objs
new_env['SHARED_OBJS'] = shared_objs
new_env['MAIN_OBJS'] = main_objs
new_env['STATIC_LIB'] = static_lib
new_env['SHARED_LIB'] = shared_lib
# Record some settings for building Executables.
new_env['EXE_SUFFIX'] = label
new_env['STRIP_EXES'] = strip
for cls in ExecutableMeta.all:
cls.declare_all(new_env)
new_env.M5Binary = File(gem5_binary.path(new_env))
new_env.Command(secondary_exename, new_env.M5Binary,
MakeAction('ln $SOURCE $TARGET', Transform("HARDLINK")))
# Start out with the compiler flags common to all compilers,
# i.e. they all use -g for opt and -g -pg for prof
ccflags = {'debug' : [], 'opt' : ['-g'], 'fast' : [], 'prof' : ['-g', '-pg'],
'perf' : ['-g']}
# Start out with the linker flags common to all linkers, i.e. -pg for
# prof, and -lprofiler for perf. The -lprofile flag is surrounded by
# no-as-needed and as-needed as the binutils linker is too clever and
# simply doesn't link to the library otherwise.
ldflags = {'debug' : [], 'opt' : [], 'fast' : [], 'prof' : ['-pg'],
'perf' : ['-Wl,--no-as-needed', '-lprofiler', '-Wl,--as-needed']}
# For Link Time Optimization, the optimisation flags used to compile
# individual files are decoupled from those used at link time
# (i.e. you can compile with -O3 and perform LTO with -O0), so we need
# to also update the linker flags based on the target.
if env['GCC']:
if sys.platform == 'sunos5':
ccflags['debug'] += ['-gstabs+']
else:
ccflags['debug'] += ['-ggdb3']
ldflags['debug'] += ['-O0']
# opt, fast, prof and perf all share the same cc flags, also add
# the optimization to the ldflags as LTO defers the optimization
# to link time
for target in ['opt', 'fast', 'prof', 'perf']:
ccflags[target] += ['-O3'] + env['LTO_CCFLAGS']
ldflags[target] += ['-O3'] + env['LTO_LDFLAGS']
elif env['CLANG']:
ccflags['debug'] += ['-g', '-O0']
# opt, fast, prof and perf all share the same cc flags
for target in ['opt', 'fast', 'prof', 'perf']:
ccflags[target] += ['-O3']
else:
error('Unknown compiler, please fix compiler options')
# To speed things up, we only instantiate the build environments we
# need. We try to identify the needed environment for each target; if
# we can't, we fall back on instantiating all the environments just to
# be safe.
target_types = ['debug', 'opt', 'fast', 'prof', 'perf']
obj2target = {'do': 'debug', 'o': 'opt', 'fo': 'fast', 'po': 'prof',
'gpo' : 'perf'}
def identifyTarget(t):
ext = t.split('.')[-1]
if ext in target_types:
return ext
if ext in obj2target:
return obj2target[ext]
return 'all'
needed_envs = [identifyTarget(target) for target in BUILD_TARGETS]
if 'all' in needed_envs:
needed_envs += target_types
# Debug binary
if 'debug' in needed_envs:
makeEnv(env, 'debug', '.do',
CCFLAGS = Split(ccflags['debug']),
CPPDEFINES = ['DEBUG', 'TRACING_ON=1'],
LINKFLAGS = Split(ldflags['debug']))
# Optimized binary
if 'opt' in needed_envs:
makeEnv(env, 'opt', '.o',
CCFLAGS = Split(ccflags['opt']),
CPPDEFINES = ['TRACING_ON=1'],
LINKFLAGS = Split(ldflags['opt']))
# "Fast" binary
if 'fast' in needed_envs:
makeEnv(env, 'fast', '.fo', strip = True,
CCFLAGS = Split(ccflags['fast']),
CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
LINKFLAGS = Split(ldflags['fast']))
# Profiled binary using gprof
if 'prof' in needed_envs:
makeEnv(env, 'prof', '.po',
CCFLAGS = Split(ccflags['prof']),
CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
LINKFLAGS = Split(ldflags['prof']))
# Profiled binary using google-pprof
if 'perf' in needed_envs:
makeEnv(env, 'perf', '.gpo',
CCFLAGS = Split(ccflags['perf']),
CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
LINKFLAGS = Split(ldflags['perf']))