From b8efd0e854ae568dae33fe3d24f67054ed016d19 Mon Sep 17 00:00:00 2001 From: Gabe Black Date: Sat, 14 Oct 2017 21:39:21 -0700 Subject: scons: Stop generating inc.d in the isa parser. Generating dependency/build product information in the isa parser breaks scons idea of how a build is supposed to work. Arm twisting it into working forced a lot of false dependencies which slowed down the build. Change-Id: Iadee8c930fd7c80136d200d69870df7672a6b3ca Reviewed-on: https://gem5-review.googlesource.com/5081 Reviewed-by: Jason Lowe-Power Maintainer: Gabe Black --- src/arch/SConscript | 206 +++++++++++++++++++++++++++------------------------- 1 file changed, 108 insertions(+), 98 deletions(-) (limited to 'src/arch/SConscript') diff --git a/src/arch/SConscript b/src/arch/SConscript index e30069c04..1030be7d0 100644 --- a/src/arch/SConscript +++ b/src/arch/SConscript @@ -97,120 +97,130 @@ if env['BUILD_GPU']: # Build a SCons scanner for ISA files # import SCons.Scanner +import SCons.Tool -isa_scanner = SCons.Scanner.Classic("ISAScan", - [".isa", ".ISA"], - "SRCDIR", - r'^\s*##include\s+"([\w/.-]*)"') +scanner = SCons.Scanner.Classic("ISAScan", + [".isa", ".ISA"], + "SRCDIR", + r'^\s*##include\s+"([\w/.-]*)"') -env.Append(SCANNERS = isa_scanner) +env.Append(SCANNERS=scanner) + +# Tell scons that when it sees a cc.inc file, it should scan it for includes. +SCons.Tool.SourceFileScanner.add_scanner('.cc.inc', SCons.Tool.CScanner) # # Now create a Builder object that uses isa_parser.py to generate C++ # output from the ISA description (*.isa) files. # -isa_parser = File('isa_parser.py') - -# The emitter patches up the sources & targets to include the -# autogenerated files as targets and isa parser itself as a source. -def isa_desc_emitter(target, source, env): - # List the isa parser as a source. - source += [ - isa_parser, - Value("ExecContext"), - ] - - # Specify different targets depending on if we're running the ISA - # parser for its dependency information, or for the generated files. - # (As an optimization, the ISA parser detects the useless second run - # and skips doing any work, if the first run was performed, since it - # always generates all its files). The way we track this in SCons is the - # _isa_outputs value in the environment (env). If it's unset, we - # don't know what the dependencies are so we ask for generated/inc.d to - # be generated so they can be acquired. If we know what they are, then - # it's because we've already processed inc.d and then claim that our - # outputs (targets) will be thus. - isa = env['TARGET_ISA'] - key = '%s_isa_outputs' % isa - if key in env: - targets = [ os.path.join('generated', f) for f in env[key] ] - else: - targets = [ os.path.join('generated','inc.d') ] - - def prefix(s): - return os.path.join(target[0].dir.up().abspath, s) - - return [ prefix(t) for t in targets ], source - -ARCH_DIR = Dir('.') +parser_py = File('isa_parser.py') +micro_asm_py = File('micro_asm.py') # import ply here because SCons screws with sys.path when performing actions. import ply -def isa_desc_action_func(target, source, env): - # Add the current directory to the system path so we can import files - sys.path[0:0] = [ ARCH_DIR.srcnode().abspath ] +def run_parser(target, source, env): + # Add the current directory to the system path so we can import files. + sys.path[0:0] = [ parser_py.dir.abspath ] import isa_parser - # Skip over the ISA description itself and the parser to the CPU models. - models = [ s.get_contents() for s in source[2:] ] parser = isa_parser.ISAParser(target[0].dir.abspath) parser.parse_isa_desc(source[0].abspath) -isa_desc_action = MakeAction(isa_desc_action_func, Transform("ISA DESC", 1)) - -# Also include the CheckerCPU as one of the models if it is being -# enabled via command line. -isa_desc_builder = Builder(action=isa_desc_action, emitter=isa_desc_emitter) - -env.Append(BUILDERS = { 'ISADesc' : isa_desc_builder }) - -# The ISA is generated twice: the first time to find out what it generates, -# and the second time to make scons happy by telling the ISADesc builder -# what it will make before it builds it. -def scan_isa_deps(target, source, env): - # Process dependency file generated by the ISA parser -- - # add the listed files to the dependency tree of the build. - source = source[0] - archbase = source.dir.up().path - - try: - depfile = open(source.abspath, 'r') - except: - print "scan_isa_deps: Can't open ISA deps file '%s' in %s" % \ - (source.path,os.getcwd()) - raise - - # Scan through the lines - targets = {} - for line in depfile: - # Read the dependency line with the format - # : [ * ] - m = re.match(r'^\s*([^:]+\.([^\.:]+))\s*:\s*(.*)', line) - assert(m) - targ, extn = m.group(1,2) - deps = m.group(3).split() - - files = [ targ ] + deps - for f in files: - targets[f] = True - # Eliminate unnecessary re-generation if we already generated it - env.Precious(os.path.join(archbase, 'generated', f)) - - files = [ os.path.join(archbase, 'generated', f) for f in files ] - - if extn == 'cc': - Source(os.path.join(archbase,'generated', targ)) - depfile.close() - env[env['TARGET_ISA'] + '_isa_outputs'] = targets.keys() - - isa = env.ISADesc(os.path.join(archbase,'isa','main.isa')) - for t in targets: - env.Depends('#all-isas', isa) - -env.Append(BUILDERS = {'ScanISA' : - Builder(action=MakeAction(scan_isa_deps, - Transform("NEW DEPS", 1)))}) + +desc_action = MakeAction(run_parser, Transform("ISA DESC", 1)) + +IsaDescBuilder = Builder(action=desc_action) + + +# ISAs should use this function to set up an IsaDescBuilder and not try to +# set one up manually. +def ISADesc(desc, decoder_splits=1, exec_splits=1): + '''Set up a builder for an ISA description. + + The decoder_splits and exec_splits parameters let us determine what + files the isa parser is actually going to generate. This needs to match + what files are actually generated, and there's no specific check for that + right now. + + If the parser itself is responsible for generating a list of its products + and their dependencies, then using that output to set up the right + dependencies. This is what we used to do. The problem is that scons + fundamentally doesn't support using a build product to affect its graph + of possible products, dependencies, builders, etc. There are a couple ways + to work around that limitation. + + One option is to compute dependencies while the build phase of scons is + running. That method can be quite complicated and cumbersome, because we + have to make sure our modifications are made before scons tries to + consume them. There's also no guarantee that this mechanism will work since + it subverts scons expectations and changes things behind its back. This + was implemented previously and constrained the builds parallelism + significantly. + + Another option would be to recursively call scons to have it update the + list of products/dependencies during the setup phase of this invocation of + scons. The problem with that is that it would be very difficult to make + the sub-invocation of scons observe the options passed to the primary one + in all possible cases, or to even determine conclusively what the name of + the scons executable is in the first place. + + Possible future changes to the isa parser might make it easier to + determine what files it would generate, perhaps because there was a more + direct correspondence between input files and output files. Or, if the + parser could run quickly and determine what its output files would be + without having do actually generate those files, then it could be run + unconditionally without slowing down all builds or touching the output + files unnecessarily. + ''' + generated_dir = File(desc).dir.up().Dir('generated') + def gen_file(name): + return generated_dir.File(name) + + gen = [] + def add_gen(name): + gen.append(gen_file(name)) + + # Tell scons about the various files the ISA parser will generate. + add_gen('decoder-g.cc.inc') + add_gen('decoder-ns.cc.inc') + add_gen('decode-method.cc.inc') + + add_gen('decoder.hh') + add_gen('decoder-g.hh.inc') + add_gen('decoder-ns.hh.inc') + + add_gen('exec-g.cc.inc') + add_gen('exec-ns.cc.inc') + + add_gen('max_inst_regs.hh') + + + # These generated files are also top level sources. + def source_gen(name): + add_gen(name) + Source(gen_file(name)) + + source_gen('decoder.cc') + + if decoder_splits == 1: + source_gen('inst-constrs.cc') + else: + for i in range(1, decoder_splits + 1): + source_gen('inst-constrs-%d.cc' % i) + + if exec_splits == 1: + source_gen('generic_cpu_exec.cc') + else: + for i in range(1, exec_splits + 1): + source_gen('generic_cpu_exec_%d.cc' % i) + + # Actually create the builder. + sources = [desc, parser_py, micro_asm_py] + IsaDescBuilder(target=gen, source=sources, env=env) + return gen + +Export('ISADesc') DebugFlag('IntRegs') DebugFlag('FloatRegs') -- cgit v1.2.3