summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGabe Black <gabeblack@google.com>2017-10-14 21:39:21 -0700
committerGabe Black <gabeblack@google.com>2017-10-17 18:57:50 +0000
commitb8efd0e854ae568dae33fe3d24f67054ed016d19 (patch)
treebf9eccdc6f2e89bbff30ba3b1470a6c932e09ccd /src
parent1b63b756cc6ca75acceee51397f815df06fb0dff (diff)
downloadgem5-b8efd0e854ae568dae33fe3d24f67054ed016d19.tar.xz
scons: Stop generating inc.d in the isa parser.
Generating dependency/build product information in the isa parser breaks scons idea of how a build is supposed to work. Arm twisting it into working forced a lot of false dependencies which slowed down the build. Change-Id: Iadee8c930fd7c80136d200d69870df7672a6b3ca Reviewed-on: https://gem5-review.googlesource.com/5081 Reviewed-by: Jason Lowe-Power <jason@lowepower.com> Maintainer: Gabe Black <gabeblack@google.com>
Diffstat (limited to 'src')
-rwxr-xr-xsrc/SConscript120
-rw-r--r--src/arch/SConscript206
-rw-r--r--src/arch/alpha/SConscript2
-rw-r--r--src/arch/arm/SConscript2
-rwxr-xr-xsrc/arch/isa_parser.py34
-rw-r--r--src/arch/mips/SConscript2
-rw-r--r--src/arch/null/generated/inc.d0
-rw-r--r--src/arch/power/SConscript2
-rw-r--r--src/arch/riscv/SConscript2
-rw-r--r--src/arch/sparc/SConscript2
-rw-r--r--src/arch/x86/SConscript2
11 files changed, 158 insertions, 216 deletions
diff --git a/src/SConscript b/src/SConscript
index 519a0a986..911e5a37c 100755
--- a/src/SConscript
+++ b/src/SConscript
@@ -945,14 +945,6 @@ for source in PySource.all:
# List of constructed environments to pass back to SConstruct
date_source = Source('base/date.cc', skip_lib=True)
-# Capture this directory for the closure makeEnv, otherwise when it is
-# called, it won't know what directory it should use.
-variant_dir = Dir('.').path
-def variant(*path):
- return os.path.join(variant_dir, *path)
-def variantd(*path):
- return variant(*path)+'/'
-
# Function to create a new build environment as clone of current
# environment 'env' with modified object suffix and optional stripped
# binary. Additional keyword arguments are appended to corresponding
@@ -960,9 +952,9 @@ def variantd(*path):
def makeEnv(env, label, objsfx, strip=False, disable_partial=False, **kwargs):
# SCons doesn't know to append a library suffix when there is a '.' in the
# name. Use '_' instead.
- libname = variant('gem5_' + label)
- exename = variant('gem5.' + label)
- secondary_exename = variant('m5.' + label)
+ libname = 'gem5_' + label
+ exename = 'gem5.' + label
+ secondary_exename = 'm5.' + label
new_env = env.Clone(OBJSUFFIX=objsfx, SHOBJSUFFIX=objsfx + 's')
new_env.Label = label
@@ -1101,7 +1093,7 @@ def makeEnv(env, label, objsfx, strip=False, disable_partial=False, **kwargs):
test_objs = [ make_obj(s, static=True) for s in test_sources ]
if test.main:
test_objs += main_objs
- path = variant('unittest/%s.%s' % (test.target, label))
+ path = 'unittest/%s.%s' % (test.target, label)
new_env.Program(path, test_objs + static_objs)
progname = exename
@@ -1125,7 +1117,7 @@ def makeEnv(env, label, objsfx, strip=False, disable_partial=False, **kwargs):
# Set up regression tests.
SConscript(os.path.join(env.root.abspath, 'tests', 'SConscript'),
- variant_dir=variantd('tests', new_env.Label),
+ variant_dir=Dir('tests').Dir(new_env.Label).path,
exports={ 'env' : new_env }, duplicate=False)
# Start out with the compiler flags common to all compilers,
@@ -1192,67 +1184,41 @@ needed_envs = [identifyTarget(target) for target in BUILD_TARGETS]
if 'all' in needed_envs:
needed_envs += target_types
-def makeEnvirons(target, source, env):
- # cause any later Source() calls to be fatal, as a diagnostic.
- Source.done()
-
- # Debug binary
- if 'debug' in needed_envs:
- makeEnv(env, 'debug', '.do',
- CCFLAGS = Split(ccflags['debug']),
- CPPDEFINES = ['DEBUG', 'TRACING_ON=1'],
- LINKFLAGS = Split(ldflags['debug']))
-
- # Optimized binary
- if 'opt' in needed_envs:
- makeEnv(env, 'opt', '.o',
- CCFLAGS = Split(ccflags['opt']),
- CPPDEFINES = ['TRACING_ON=1'],
- LINKFLAGS = Split(ldflags['opt']))
-
- # "Fast" binary
- if 'fast' in needed_envs:
- disable_partial = \
- env.get('BROKEN_INCREMENTAL_LTO', False) and \
- GetOption('force_lto')
- makeEnv(env, 'fast', '.fo', strip = True,
- CCFLAGS = Split(ccflags['fast']),
- CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
- LINKFLAGS = Split(ldflags['fast']),
- disable_partial=disable_partial)
-
- # Profiled binary using gprof
- if 'prof' in needed_envs:
- makeEnv(env, 'prof', '.po',
- CCFLAGS = Split(ccflags['prof']),
- CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
- LINKFLAGS = Split(ldflags['prof']))
-
- # Profiled binary using google-pprof
- if 'perf' in needed_envs:
- makeEnv(env, 'perf', '.gpo',
- CCFLAGS = Split(ccflags['perf']),
- CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
- LINKFLAGS = Split(ldflags['perf']))
-
-# The MakeEnvirons Builder defers the full dependency collection until
-# after processing the ISA definition (due to dynamically generated
-# source files). Add this dependency to all targets so they will wait
-# until the environments are completely set up. Otherwise, a second
-# process (e.g. -j2 or higher) will try to compile the requested target,
-# not know how, and fail.
-env.Append(BUILDERS = {'MakeEnvirons' :
- Builder(action=MakeAction(makeEnvirons,
- Transform("ENVIRONS", 1)))})
-
-isa_target = '#${VARIANT_NAME}-deps'
-environs = '#${VARIANT_NAME}-environs'
-env.Depends('#all-deps', isa_target)
-env.Depends('#all-environs', environs)
-env.ScanISA(isa_target, File('arch/%s/generated/inc.d' % env['TARGET_ISA']))
-envSetup = env.MakeEnvirons(environs, isa_target)
-
-# make sure no -deps targets occur before all ISAs are complete
-env.Depends(isa_target, '#all-isas')
-# likewise for -environs targets and all the -deps targets
-env.Depends(environs, '#all-deps')
+# Debug binary
+if 'debug' in needed_envs:
+ makeEnv(env, 'debug', '.do',
+ CCFLAGS = Split(ccflags['debug']),
+ CPPDEFINES = ['DEBUG', 'TRACING_ON=1'],
+ LINKFLAGS = Split(ldflags['debug']))
+
+# Optimized binary
+if 'opt' in needed_envs:
+ makeEnv(env, 'opt', '.o',
+ CCFLAGS = Split(ccflags['opt']),
+ CPPDEFINES = ['TRACING_ON=1'],
+ LINKFLAGS = Split(ldflags['opt']))
+
+# "Fast" binary
+if 'fast' in needed_envs:
+ disable_partial = \
+ env.get('BROKEN_INCREMENTAL_LTO', False) and \
+ GetOption('force_lto')
+ makeEnv(env, 'fast', '.fo', strip = True,
+ CCFLAGS = Split(ccflags['fast']),
+ CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
+ LINKFLAGS = Split(ldflags['fast']),
+ disable_partial=disable_partial)
+
+# Profiled binary using gprof
+if 'prof' in needed_envs:
+ makeEnv(env, 'prof', '.po',
+ CCFLAGS = Split(ccflags['prof']),
+ CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
+ LINKFLAGS = Split(ldflags['prof']))
+
+# Profiled binary using google-pprof
+if 'perf' in needed_envs:
+ makeEnv(env, 'perf', '.gpo',
+ CCFLAGS = Split(ccflags['perf']),
+ CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
+ LINKFLAGS = Split(ldflags['perf']))
diff --git a/src/arch/SConscript b/src/arch/SConscript
index e30069c04..1030be7d0 100644
--- a/src/arch/SConscript
+++ b/src/arch/SConscript
@@ -97,120 +97,130 @@ if env['BUILD_GPU']:
# Build a SCons scanner for ISA files
#
import SCons.Scanner
+import SCons.Tool
-isa_scanner = SCons.Scanner.Classic("ISAScan",
- [".isa", ".ISA"],
- "SRCDIR",
- r'^\s*##include\s+"([\w/.-]*)"')
+scanner = SCons.Scanner.Classic("ISAScan",
+ [".isa", ".ISA"],
+ "SRCDIR",
+ r'^\s*##include\s+"([\w/.-]*)"')
-env.Append(SCANNERS = isa_scanner)
+env.Append(SCANNERS=scanner)
+
+# Tell scons that when it sees a cc.inc file, it should scan it for includes.
+SCons.Tool.SourceFileScanner.add_scanner('.cc.inc', SCons.Tool.CScanner)
#
# Now create a Builder object that uses isa_parser.py to generate C++
# output from the ISA description (*.isa) files.
#
-isa_parser = File('isa_parser.py')
-
-# The emitter patches up the sources & targets to include the
-# autogenerated files as targets and isa parser itself as a source.
-def isa_desc_emitter(target, source, env):
- # List the isa parser as a source.
- source += [
- isa_parser,
- Value("ExecContext"),
- ]
-
- # Specify different targets depending on if we're running the ISA
- # parser for its dependency information, or for the generated files.
- # (As an optimization, the ISA parser detects the useless second run
- # and skips doing any work, if the first run was performed, since it
- # always generates all its files). The way we track this in SCons is the
- # <arch>_isa_outputs value in the environment (env). If it's unset, we
- # don't know what the dependencies are so we ask for generated/inc.d to
- # be generated so they can be acquired. If we know what they are, then
- # it's because we've already processed inc.d and then claim that our
- # outputs (targets) will be thus.
- isa = env['TARGET_ISA']
- key = '%s_isa_outputs' % isa
- if key in env:
- targets = [ os.path.join('generated', f) for f in env[key] ]
- else:
- targets = [ os.path.join('generated','inc.d') ]
-
- def prefix(s):
- return os.path.join(target[0].dir.up().abspath, s)
-
- return [ prefix(t) for t in targets ], source
-
-ARCH_DIR = Dir('.')
+parser_py = File('isa_parser.py')
+micro_asm_py = File('micro_asm.py')
# import ply here because SCons screws with sys.path when performing actions.
import ply
-def isa_desc_action_func(target, source, env):
- # Add the current directory to the system path so we can import files
- sys.path[0:0] = [ ARCH_DIR.srcnode().abspath ]
+def run_parser(target, source, env):
+ # Add the current directory to the system path so we can import files.
+ sys.path[0:0] = [ parser_py.dir.abspath ]
import isa_parser
- # Skip over the ISA description itself and the parser to the CPU models.
- models = [ s.get_contents() for s in source[2:] ]
parser = isa_parser.ISAParser(target[0].dir.abspath)
parser.parse_isa_desc(source[0].abspath)
-isa_desc_action = MakeAction(isa_desc_action_func, Transform("ISA DESC", 1))
-
-# Also include the CheckerCPU as one of the models if it is being
-# enabled via command line.
-isa_desc_builder = Builder(action=isa_desc_action, emitter=isa_desc_emitter)
-
-env.Append(BUILDERS = { 'ISADesc' : isa_desc_builder })
-
-# The ISA is generated twice: the first time to find out what it generates,
-# and the second time to make scons happy by telling the ISADesc builder
-# what it will make before it builds it.
-def scan_isa_deps(target, source, env):
- # Process dependency file generated by the ISA parser --
- # add the listed files to the dependency tree of the build.
- source = source[0]
- archbase = source.dir.up().path
-
- try:
- depfile = open(source.abspath, 'r')
- except:
- print "scan_isa_deps: Can't open ISA deps file '%s' in %s" % \
- (source.path,os.getcwd())
- raise
-
- # Scan through the lines
- targets = {}
- for line in depfile:
- # Read the dependency line with the format
- # <target file>: [ <dependent file>* ]
- m = re.match(r'^\s*([^:]+\.([^\.:]+))\s*:\s*(.*)', line)
- assert(m)
- targ, extn = m.group(1,2)
- deps = m.group(3).split()
-
- files = [ targ ] + deps
- for f in files:
- targets[f] = True
- # Eliminate unnecessary re-generation if we already generated it
- env.Precious(os.path.join(archbase, 'generated', f))
-
- files = [ os.path.join(archbase, 'generated', f) for f in files ]
-
- if extn == 'cc':
- Source(os.path.join(archbase,'generated', targ))
- depfile.close()
- env[env['TARGET_ISA'] + '_isa_outputs'] = targets.keys()
-
- isa = env.ISADesc(os.path.join(archbase,'isa','main.isa'))
- for t in targets:
- env.Depends('#all-isas', isa)
-
-env.Append(BUILDERS = {'ScanISA' :
- Builder(action=MakeAction(scan_isa_deps,
- Transform("NEW DEPS", 1)))})
+
+desc_action = MakeAction(run_parser, Transform("ISA DESC", 1))
+
+IsaDescBuilder = Builder(action=desc_action)
+
+
+# ISAs should use this function to set up an IsaDescBuilder and not try to
+# set one up manually.
+def ISADesc(desc, decoder_splits=1, exec_splits=1):
+ '''Set up a builder for an ISA description.
+
+ The decoder_splits and exec_splits parameters let us determine what
+ files the isa parser is actually going to generate. This needs to match
+ what files are actually generated, and there's no specific check for that
+ right now.
+
+ If the parser itself is responsible for generating a list of its products
+ and their dependencies, then using that output to set up the right
+ dependencies. This is what we used to do. The problem is that scons
+ fundamentally doesn't support using a build product to affect its graph
+ of possible products, dependencies, builders, etc. There are a couple ways
+ to work around that limitation.
+
+ One option is to compute dependencies while the build phase of scons is
+ running. That method can be quite complicated and cumbersome, because we
+ have to make sure our modifications are made before scons tries to
+ consume them. There's also no guarantee that this mechanism will work since
+ it subverts scons expectations and changes things behind its back. This
+ was implemented previously and constrained the builds parallelism
+ significantly.
+
+ Another option would be to recursively call scons to have it update the
+ list of products/dependencies during the setup phase of this invocation of
+ scons. The problem with that is that it would be very difficult to make
+ the sub-invocation of scons observe the options passed to the primary one
+ in all possible cases, or to even determine conclusively what the name of
+ the scons executable is in the first place.
+
+ Possible future changes to the isa parser might make it easier to
+ determine what files it would generate, perhaps because there was a more
+ direct correspondence between input files and output files. Or, if the
+ parser could run quickly and determine what its output files would be
+ without having do actually generate those files, then it could be run
+ unconditionally without slowing down all builds or touching the output
+ files unnecessarily.
+ '''
+ generated_dir = File(desc).dir.up().Dir('generated')
+ def gen_file(name):
+ return generated_dir.File(name)
+
+ gen = []
+ def add_gen(name):
+ gen.append(gen_file(name))
+
+ # Tell scons about the various files the ISA parser will generate.
+ add_gen('decoder-g.cc.inc')
+ add_gen('decoder-ns.cc.inc')
+ add_gen('decode-method.cc.inc')
+
+ add_gen('decoder.hh')
+ add_gen('decoder-g.hh.inc')
+ add_gen('decoder-ns.hh.inc')
+
+ add_gen('exec-g.cc.inc')
+ add_gen('exec-ns.cc.inc')
+
+ add_gen('max_inst_regs.hh')
+
+
+ # These generated files are also top level sources.
+ def source_gen(name):
+ add_gen(name)
+ Source(gen_file(name))
+
+ source_gen('decoder.cc')
+
+ if decoder_splits == 1:
+ source_gen('inst-constrs.cc')
+ else:
+ for i in range(1, decoder_splits + 1):
+ source_gen('inst-constrs-%d.cc' % i)
+
+ if exec_splits == 1:
+ source_gen('generic_cpu_exec.cc')
+ else:
+ for i in range(1, exec_splits + 1):
+ source_gen('generic_cpu_exec_%d.cc' % i)
+
+ # Actually create the builder.
+ sources = [desc, parser_py, micro_asm_py]
+ IsaDescBuilder(target=gen, source=sources, env=env)
+ return gen
+
+Export('ISADesc')
DebugFlag('IntRegs')
DebugFlag('FloatRegs')
diff --git a/src/arch/alpha/SConscript b/src/arch/alpha/SConscript
index cf528ebf1..80116122b 100644
--- a/src/arch/alpha/SConscript
+++ b/src/arch/alpha/SConscript
@@ -61,4 +61,4 @@ if env['TARGET_ISA'] == 'alpha':
SimObject('AlphaTLB.py')
# Add in files generated by the ISA description.
- env.ISADesc('isa/main.isa')
+ ISADesc('isa/main.isa')
diff --git a/src/arch/arm/SConscript b/src/arch/arm/SConscript
index 1aab3dc1b..3b68fb647 100644
--- a/src/arch/arm/SConscript
+++ b/src/arch/arm/SConscript
@@ -98,4 +98,4 @@ if env['TARGET_ISA'] == 'arm':
DebugFlag('TLBVerbose')
# Add files generated by the ISA description.
- env.ISADesc('isa/main.isa')
+ ISADesc('isa/main.isa', decoder_splits=3, exec_splits=6)
diff --git a/src/arch/isa_parser.py b/src/arch/isa_parser.py
index 4c3902fc9..7764c344c 100755
--- a/src/arch/isa_parser.py
+++ b/src/arch/isa_parser.py
@@ -1573,46 +1573,31 @@ class ISAParser(Grammar):
# These small files make it much clearer how this tool works, since
# you directly see the chunks emitted as files that are #include'd.
def write_top_level_files(self):
- dep = self.open('inc.d', bare=True)
-
# decoder header - everything depends on this
file = 'decoder.hh'
with self.open(file) as f:
- inc = []
-
fn = 'decoder-g.hh.inc'
assert(fn in self.files)
f.write('#include "%s"\n' % fn)
- inc.append(fn)
fn = 'decoder-ns.hh.inc'
assert(fn in self.files)
f.write('namespace %s {\n#include "%s"\n}\n'
% (self.namespace, fn))
- inc.append(fn)
-
- print >>dep, file+':', ' '.join(inc)
# decoder method - cannot be split
file = 'decoder.cc'
with self.open(file) as f:
- inc = []
-
fn = 'decoder-g.cc.inc'
assert(fn in self.files)
f.write('#include "%s"\n' % fn)
- inc.append(fn)
fn = 'decoder.hh'
f.write('#include "%s"\n' % fn)
- inc.append(fn)
fn = 'decode-method.cc.inc'
# is guaranteed to have been written for parse to complete
f.write('#include "%s"\n' % fn)
- inc.append(fn)
-
- print >>dep, file+':', ' '.join(inc)
extn = re.compile('(\.[^\.]+)$')
@@ -1625,16 +1610,12 @@ class ISAParser(Grammar):
else:
file = file_
with self.open(file) as f:
- inc = []
-
fn = 'decoder-g.cc.inc'
assert(fn in self.files)
f.write('#include "%s"\n' % fn)
- inc.append(fn)
fn = 'decoder.hh'
f.write('#include "%s"\n' % fn)
- inc.append(fn)
fn = 'decoder-ns.cc.inc'
assert(fn in self.files)
@@ -1643,9 +1624,6 @@ class ISAParser(Grammar):
print >>f, '#define __SPLIT %u' % i
print >>f, '#include "%s"' % fn
print >>f, '}'
- inc.append(fn)
-
- print >>dep, file+':', ' '.join(inc)
# instruction execution per-CPU model
splits = self.splits[self.get_file('exec')]
@@ -1656,18 +1634,14 @@ class ISAParser(Grammar):
else:
file = cpu.filename
with self.open(file) as f:
- inc = []
-
fn = 'exec-g.cc.inc'
assert(fn in self.files)
f.write('#include "%s"\n' % fn)
- inc.append(fn)
f.write(cpu.includes+"\n")
fn = 'decoder.hh'
f.write('#include "%s"\n' % fn)
- inc.append(fn)
fn = 'exec-ns.cc.inc'
assert(fn in self.files)
@@ -1678,10 +1652,6 @@ class ISAParser(Grammar):
print >>f, '#define __SPLIT %u' % i
print >>f, '#include "%s"' % fn
print >>f, '}'
- inc.append(fn)
-
- inc.append("decoder.hh")
- print >>dep, file+':', ' '.join(inc)
# max_inst_regs.hh
self.update('max_inst_regs.hh',
@@ -1689,10 +1659,6 @@ class ISAParser(Grammar):
const int MaxInstSrcRegs = %(maxInstSrcRegs)d;
const int MaxInstDestRegs = %(maxInstDestRegs)d;
const int MaxMiscDestRegs = %(maxMiscDestRegs)d;\n}\n''' % self)
- print >>dep, 'max_inst_regs.hh:'
-
- dep.close()
-
scaremonger_template ='''// DO NOT EDIT
// This file was automatically generated from an ISA description:
diff --git a/src/arch/mips/SConscript b/src/arch/mips/SConscript
index 2bc7eca99..d30c28da1 100644
--- a/src/arch/mips/SConscript
+++ b/src/arch/mips/SConscript
@@ -59,4 +59,4 @@ if env['TARGET_ISA'] == 'mips':
DebugFlag('MipsPRA')
- env.ISADesc('isa/main.isa')
+ ISADesc('isa/main.isa')
diff --git a/src/arch/null/generated/inc.d b/src/arch/null/generated/inc.d
deleted file mode 100644
index e69de29bb..000000000
--- a/src/arch/null/generated/inc.d
+++ /dev/null
diff --git a/src/arch/power/SConscript b/src/arch/power/SConscript
index e26035cbe..473c312ca 100644
--- a/src/arch/power/SConscript
+++ b/src/arch/power/SConscript
@@ -59,4 +59,4 @@ if env['TARGET_ISA'] == 'power':
DebugFlag('Power')
- env.ISADesc('isa/main.isa')
+ ISADesc('isa/main.isa')
diff --git a/src/arch/riscv/SConscript b/src/arch/riscv/SConscript
index 5aaac6be4..4655057e5 100644
--- a/src/arch/riscv/SConscript
+++ b/src/arch/riscv/SConscript
@@ -70,4 +70,4 @@ if env['TARGET_ISA'] == 'riscv':
DebugFlag('RiscvTLB')
# Add in files generated by the ISA description.
- env.ISADesc('isa/main.isa')
+ ISADesc('isa/main.isa')
diff --git a/src/arch/sparc/SConscript b/src/arch/sparc/SConscript
index f05f30469..afffd8afb 100644
--- a/src/arch/sparc/SConscript
+++ b/src/arch/sparc/SConscript
@@ -61,4 +61,4 @@ if env['TARGET_ISA'] == 'sparc':
DebugFlag('Sparc', "Generic SPARC ISA stuff")
DebugFlag('RegisterWindows', "Register window manipulation")
- env.ISADesc('isa/main.isa')
+ ISADesc('isa/main.isa')
diff --git a/src/arch/x86/SConscript b/src/arch/x86/SConscript
index bdc012259..6f20f54b1 100644
--- a/src/arch/x86/SConscript
+++ b/src/arch/x86/SConscript
@@ -307,7 +307,7 @@ if env['TARGET_ISA'] == 'x86':
# Add in files generated by the ISA description.
- isa_desc_files = env.ISADesc('isa/main.isa')
+ isa_desc_files = ISADesc('isa/main.isa')
for f in isa_desc_files:
# Add in python file dependencies that won't be caught otherwise
for pyfile in python_files: