diff options
Diffstat (limited to 'src/python')
37 files changed, 946 insertions, 395 deletions
diff --git a/src/python/SConscript b/src/python/SConscript index 4407e403d..3a9def9a8 100644 --- a/src/python/SConscript +++ b/src/python/SConscript @@ -25,183 +25,82 @@ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Authors: Steve Reinhardt +# Nathan Binkert import os, os.path, re, sys +from zipfile import PyZipFile -Import('env') +# handy function for path joins +def join(*args): + return os.path.normpath(os.path.join(*args)) -import scons_helper - -def WriteEmbeddedPyFile(target, source, path, name, ext, filename): - if isinstance(source, str): - source = file(source, 'r') - - if isinstance(target, str): - target = file(target, 'w') - - print >>target, "AddModule(%s, %s, %s, %s, '''\\" % \ - (`path`, `name`, `ext`, `filename`) - - for line in source: - line = line - # escape existing backslashes - line = line.replace('\\', '\\\\') - # escape existing triple quotes - line = line.replace("'''", r"\'\'\'") - - print >>target, line, - - print >>target, "''')" - print >>target - -def WriteCFile(target, source, name): - if isinstance(source, str): - source = file(source, 'r') - - if isinstance(target, str): - target = file(target, 'w') - - print >>target, 'const char %s_string[] = {' % name - - count = 0 - from array import array - try: - while True: - foo = array('B') - foo.fromfile(source, 10000) - l = [ str(i) for i in foo.tolist() ] - count += len(l) - for i in xrange(0,9999,20): - print >>target, ','.join(l[i:i+20]) + ',' - except EOFError: - l = [ str(i) for i in foo.tolist() ] - count += len(l) - for i in xrange(0,len(l),20): - print >>target, ','.join(l[i:i+20]) + ',' - print >>target, ','.join(l[i:]) + ',' - - print >>target, '};' - print >>target, 'const int %s_length = %d;' % (name, count) - print >>target - -def splitpath(path): - dir,file = os.path.split(path) - path = [] - assert(file) - while dir: - dir,base = os.path.split(dir) - path.insert(0, base) - return path, file - -def MakeEmbeddedPyFile(target, source, env): - target = file(str(target[0]), 'w') - - tree = {} - for src in source: - src = str(src) - path,pyfile = splitpath(src) - node = tree - for dir in path: - if not node.has_key(dir): - node[dir] = { } - node = node[dir] - - name,ext = pyfile.split('.') - if name == '__init__': - node['.hasinit'] = True - node[pyfile] = (src,name,ext,src) - - done = False - while not done: - done = True - for name,entry in tree.items(): - if not isinstance(entry, dict): continue - if entry.has_key('.hasinit'): continue - - done = False - del tree[name] - for key,val in entry.iteritems(): - if tree.has_key(key): - raise NameError, \ - "dir already has %s can't add it again" % key - tree[key] = val - - files = [] - def populate(node, path = []): - names = node.keys() - names.sort() - for name in names: - if name == '.hasinit': - continue - - entry = node[name] - if isinstance(entry, dict): - if not entry.has_key('.hasinit'): - raise NameError, 'package directory missing __init__.py' - populate(entry, path + [ name ]) - else: - pyfile,name,ext,filename = entry - files.append((pyfile, path, name, ext, filename)) - populate(tree) - - for pyfile, path, name, ext, filename in files: - WriteEmbeddedPyFile(target, pyfile, path, name, ext, filename) +Import('env') +# This SConscript is in charge of collecting .py files and generating +# a zip archive that is appended to the m5 binary. + +# List of files & directories to include in the zip file. To include +# a package, list only the root directory of the package, not any +# internal .py files (else they will get the path stripped off when +# they are imported into the zip file). +pyzip_files = [] + +# List of additional files on which the zip archive depends, but which +# are not included in pyzip_files... i.e. individual .py files within +# a package. +pyzip_dep_files = [] + +# Add the specified package to the zip archive. Adds the directory to +# pyzip_files and all included .py files to pyzip_dep_files. +def addPkg(pkgdir): + pyzip_files.append(pkgdir) + origdir = os.getcwd() + srcdir = join(Dir('.').srcnode().abspath, pkgdir) + os.chdir(srcdir) + for path, dirs, files in os.walk('.'): + for i,dir in enumerate(dirs): + if dir == 'SCCS': + del dirs[i] + break + + for f in files: + if f.endswith('.py'): + pyzip_dep_files.append(join(pkgdir, path, f)) + + os.chdir(origdir) + +# Generate Python file that contains a dict specifying the current +# build_env flags. def MakeDefinesPyFile(target, source, env): f = file(str(target[0]), 'w') - print >>f, "import __main__" - print >>f, "__main__.m5_build_env = ", + print >>f, "m5_build_env = ", print >>f, source[0] f.close() -CFileCounter = 0 -def MakePythonCFile(target, source, env): - global CFileCounter - target = file(str(target[0]), 'w') - - print >>target, '''\ -#include "base/embedfile.hh" - -namespace { -''' - for src in source: - src = str(src) - fname = os.path.basename(src) - name = 'embedded_file%d' % CFileCounter - CFileCounter += 1 - WriteCFile(target, src, name) - print >>target, '''\ -EmbedMap %(name)s("%(fname)s", - %(name)s_string, %(name)s_length); - -''' % locals() - print >>target, '''\ - -/* namespace */ } -''' - -# base list of .py files to embed -embedded_py_files = [ os.path.join(env['ROOT'], 'util/pbs/jobfile.py') ] -# add all .py files in python/m5 -objpath = os.path.join(env['SRCDIR'], 'python', 'm5') -for root, dirs, files in os.walk(objpath, topdown=True): - for i,dir in enumerate(dirs): - if dir == 'SCCS': - del dirs[i] - break - - assert(root.startswith(objpath)) - for f in files: - if f.endswith('.py'): - embedded_py_files.append(os.path.join(root, f)) - -embedfile_hh = os.path.join(env['SRCDIR'], 'base/embedfile.hh') - optionDict = dict([(opt, env[opt]) for opt in env.ExportOptions]) -env.Command('defines.py', Value(optionDict), MakeDefinesPyFile) - -env.Command('embedded_py.py', embedded_py_files, MakeEmbeddedPyFile) -env.Depends('embedded_py.cc', embedfile_hh) -env.Command('embedded_py.cc', - ['string_importer.py', 'defines.py', 'embedded_py.py'], - MakePythonCFile) +env.Command('m5/defines.py', Value(optionDict), MakeDefinesPyFile) + +# Now specify the packages & files for the zip archive. +addPkg('m5') +pyzip_files.append('m5/defines.py') +pyzip_files.append(join(env['ROOT'], 'util/pbs/jobfile.py')) + +env.Command(['swig/cc_main_wrap.cc', 'm5/cc_main.py'], + 'swig/cc_main.i', + '$SWIG $SWIGFLAGS -outdir ${TARGETS[1].dir} ' + '-o ${TARGETS[0]} $SOURCES') + +pyzip_dep_files.append('m5/cc_main.py') + +# Action function to build the zip archive. Uses the PyZipFile module +# included in the standard Python library. +def buildPyZip(target, source, env): + pzf = PyZipFile(str(target[0]), 'w') + for s in source: + pzf.writepy(str(s)) + +# Add the zip file target to the environment. +env.Command('m5py.zip', pyzip_files, buildPyZip) +env.Depends('m5py.zip', pyzip_dep_files) diff --git a/src/python/m5/__init__.py b/src/python/m5/__init__.py index 9bb68a090..828165d15 100644 --- a/src/python/m5/__init__.py +++ b/src/python/m5/__init__.py @@ -23,24 +23,30 @@ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Authors: Nathan Binkert +# Steve Reinhardt + +import sys, os, time, atexit, optparse + +# import the SWIG-wrapped main C++ functions +import cc_main +# import a few SWIG-wrapped items (those that are likely to be used +# directly by user scripts) completely into this module for +# convenience +from cc_main import simulate, SimLoopExitEvent, setCheckpointDir -import sys, os +# import the m5 compile options +import defines # define this here so we can use it right away if necessary def panic(string): print >>sys.stderr, 'panic:', string sys.exit(1) -def m5execfile(f, global_dict): - # copy current sys.path - oldpath = sys.path[:] - # push file's directory onto front of path - sys.path.insert(0, os.path.abspath(os.path.dirname(f))) - execfile(f, global_dict) - # restore original path - sys.path = oldpath - -# Prepend given directory to system module search path. +# Prepend given directory to system module search path. We may not +# need this anymore if we can structure our config library more like a +# Python package. def AddToPath(path): # if it's a relative path and we know what directory the current # python script is in, make the path relative to that directory. @@ -51,24 +57,236 @@ def AddToPath(path): # so place the new dir right after that. sys.path.insert(1, path) -# find the m5 compile options: must be specified as a dict in -# __main__.m5_build_env. -import __main__ -if not hasattr(__main__, 'm5_build_env'): - panic("__main__ must define m5_build_env") + +# The m5 module's pointer to the parsed options object +options = None + + +# User should call this function after calling parse_args() to pass +# parsed standard option values back into the m5 module for +# processing. +def setStandardOptions(_options): + # Set module global var + global options + options = _options + # tell C++ about output directory + cc_main.setOutputDir(options.outdir) + +# Callback to set trace flags. Not necessarily the best way to do +# things in the long run (particularly if we change how these global +# options are handled). +def setTraceFlags(option, opt_str, value, parser): + objects.Trace.flags = value + +def setTraceStart(option, opt_str, value, parser): + objects.Trace.start = value + +def setTraceFile(option, opt_str, value, parser): + objects.Trace.file = value + +def noPCSymbol(option, opt_str, value, parser): + objects.ExecutionTrace.pc_symbol = False + +def noPrintCycle(option, opt_str, value, parser): + objects.ExecutionTrace.print_cycle = False + +def noPrintOpclass(option, opt_str, value, parser): + objects.ExecutionTrace.print_opclass = False + +def noPrintThread(option, opt_str, value, parser): + objects.ExecutionTrace.print_thread = False + +def noPrintEA(option, opt_str, value, parser): + objects.ExecutionTrace.print_effaddr = False + +def noPrintData(option, opt_str, value, parser): + objects.ExecutionTrace.print_data = False + +def printFetchseq(option, opt_str, value, parser): + objects.ExecutionTrace.print_fetchseq = True + +def printCpseq(option, opt_str, value, parser): + objects.ExecutionTrace.print_cpseq = True + +def dumpOnExit(option, opt_str, value, parser): + objects.Trace.dump_on_exit = True + +def debugBreak(option, opt_str, value, parser): + objects.Debug.break_cycles = value + +def statsTextFile(option, opt_str, value, parser): + objects.Statistics.text_file = value + +# Standard optparse options. Need to be explicitly included by the +# user script when it calls optparse.OptionParser(). +standardOptions = [ + optparse.make_option("--outdir", type="string", default="."), + optparse.make_option("--traceflags", type="string", action="callback", + callback=setTraceFlags), + optparse.make_option("--tracestart", type="int", action="callback", + callback=setTraceStart), + optparse.make_option("--tracefile", type="string", action="callback", + callback=setTraceFile), + optparse.make_option("--nopcsymbol", + action="callback", callback=noPCSymbol, + help="Disable PC symbols in trace output"), + optparse.make_option("--noprintcycle", + action="callback", callback=noPrintCycle, + help="Don't print cycle numbers in trace output"), + optparse.make_option("--noprintopclass", + action="callback", callback=noPrintOpclass, + help="Don't print op class type in trace output"), + optparse.make_option("--noprintthread", + action="callback", callback=noPrintThread, + help="Don't print thread number in trace output"), + optparse.make_option("--noprinteffaddr", + action="callback", callback=noPrintEA, + help="Don't print effective address in trace output"), + optparse.make_option("--noprintdata", + action="callback", callback=noPrintData, + help="Don't print result data in trace output"), + optparse.make_option("--printfetchseq", + action="callback", callback=printFetchseq, + help="Print fetch sequence numbers in trace output"), + optparse.make_option("--printcpseq", + action="callback", callback=printCpseq, + help="Print correct path sequence numbers in trace output"), + optparse.make_option("--dumponexit", + action="callback", callback=dumpOnExit, + help="Dump trace buffer on exit"), + optparse.make_option("--debugbreak", type="int", metavar="CYCLE", + action="callback", callback=debugBreak, + help="Cycle to create a breakpoint"), + optparse.make_option("--statsfile", type="string", action="callback", + callback=statsTextFile, metavar="FILE", + help="Sets the output file for the statistics") + ] # make a SmartDict out of the build options for our local use import smartdict build_env = smartdict.SmartDict() -build_env.update(__main__.m5_build_env) +build_env.update(defines.m5_build_env) # make a SmartDict out of the OS environment too env = smartdict.SmartDict() env.update(os.environ) -# import the main m5 config code -from config import * -# import the built-in object definitions -from objects import * +# Function to provide to C++ so it can look up instances based on paths +def resolveSimObject(name): + obj = config.instanceDict[name] + return obj.getCCObject() + +# The final hook to generate .ini files. Called from the user script +# once the config is built. +def instantiate(root): + config.ticks_per_sec = float(root.clock.frequency) + # ugly temporary hack to get output to config.ini + sys.stdout = file(os.path.join(options.outdir, 'config.ini'), 'w') + root.print_ini() + sys.stdout.close() # close config.ini + sys.stdout = sys.__stdout__ # restore to original + cc_main.loadIniFile(resolveSimObject) # load config.ini into C++ + root.createCCObject() + root.connectPorts() + cc_main.finalInit() + noDot = True # temporary until we fix dot + if not noDot: + dot = pydot.Dot() + instance.outputDot(dot) + dot.orientation = "portrait" + dot.size = "8.5,11" + dot.ranksep="equally" + dot.rank="samerank" + dot.write("config.dot") + dot.write_ps("config.ps") + +# Export curTick to user script. +def curTick(): + return cc_main.cvar.curTick + +# register our C++ exit callback function with Python +atexit.register(cc_main.doExitCleanup) + +# This import allows user scripts to reference 'm5.objects.Foo' after +# just doing an 'import m5' (without an 'import m5.objects'). May not +# matter since most scripts will probably 'from m5.objects import *'. +import objects + +def doQuiesce(root): + quiesce = cc_main.createCountedQuiesce() + unready_objects = root.startQuiesce(quiesce, True) + # If we've got some objects that can't quiesce immediately, then simulate + if unready_objects > 0: + quiesce.setCount(unready_objects) + simulate() + cc_main.cleanupCountedQuiesce(quiesce) + +def resume(root): + root.resume() + +def checkpoint(root): + if not isinstance(root, objects.Root): + raise TypeError, "Object is not a root object. Checkpoint must be called on a root object." + doQuiesce(root) + print "Writing checkpoint" + cc_main.serializeAll() + resume(root) + +def restoreCheckpoint(root): + print "Restoring from checkpoint" + cc_main.unserializeAll() + +def changeToAtomic(system): + if not isinstance(system, objects.Root) and not isinstance(system, System): + raise TypeError, "Object is not a root or system object. Checkpoint must be " + "called on a root object." + doQuiesce(system) + print "Changing memory mode to atomic" + system.changeTiming(cc_main.SimObject.Atomic) + resume(system) + +def changeToTiming(system): + if not isinstance(system, objects.Root) and not isinstance(system, System): + raise TypeError, "Object is not a root or system object. Checkpoint must be " + "called on a root object." + doQuiesce(system) + print "Changing memory mode to timing" + system.changeTiming(cc_main.SimObject.Timing) + resume(system) + +def switchCpus(cpuList): + if not isinstance(cpuList, list): + raise RuntimeError, "Must pass a list to this function" + for i in cpuList: + if not isinstance(i, tuple): + raise RuntimeError, "List must have tuples of (oldCPU,newCPU)" + + [old_cpus, new_cpus] = zip(*cpuList) + + for cpu in old_cpus: + if not isinstance(cpu, objects.BaseCPU): + raise TypeError, "%s is not of type BaseCPU", cpu + for cpu in new_cpus: + if not isinstance(cpu, objects.BaseCPU): + raise TypeError, "%s is not of type BaseCPU", cpu + # Quiesce all of the individual CPUs + quiesce = cc_main.createCountedQuiesce() + unready_cpus = 0 + for old_cpu in old_cpus: + unready_cpus += old_cpu.startQuiesce(quiesce, False) + # If we've got some objects that can't quiesce immediately, then simulate + if unready_cpus > 0: + quiesce.setCount(unready_cpus) + simulate() + cc_main.cleanupCountedQuiesce(quiesce) + # Now all of the CPUs are ready to be switched out + for old_cpu in old_cpus: + old_cpu._ccObject.switchOut() + index = 0 + print "Switching CPUs" + for new_cpu in new_cpus: + new_cpu.takeOverFrom(old_cpus[index]) + new_cpu._ccObject.resume() + index += 1 diff --git a/src/python/m5/config.py b/src/python/m5/config.py index 1e25e0d09..6f2873d40 100644 --- a/src/python/m5/config.py +++ b/src/python/m5/config.py @@ -1,4 +1,4 @@ -# Copyright (c) 2004-2005 The Regents of The University of Michigan +# Copyright (c) 2004-2006 The Regents of The University of Michigan # All rights reserved. # # Redistribution and use in source and binary forms, with or without @@ -23,12 +23,14 @@ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Authors: Steve Reinhardt +# Nathan Binkert -from __future__ import generators -import os, re, sys, types, inspect +import os, re, sys, types, inspect, copy import m5 -panic = m5.panic +from m5 import panic, cc_main from convert import * from multidict import multidict @@ -82,60 +84,24 @@ class Singleton(type): # # Once a set of Python objects have been instantiated in a hierarchy, # calling 'instantiate(obj)' (where obj is the root of the hierarchy) -# will generate a .ini file. See simple-4cpu.py for an example -# (corresponding to m5-test/simple-4cpu.ini). +# will generate a .ini file. # ##################################################################### -##################################################################### -# -# ConfigNode/SimObject classes -# -# The Python class hierarchy rooted by ConfigNode (which is the base -# class of SimObject, which in turn is the base class of all other M5 -# SimObject classes) has special attribute behavior. In general, an -# object in this hierarchy has three categories of attribute-like -# things: -# -# 1. Regular Python methods and variables. These must start with an -# underscore to be treated normally. -# -# 2. SimObject parameters. These values are stored as normal Python -# attributes, but all assignments to these attributes are checked -# against the pre-defined set of parameters stored in the class's -# _params dictionary. Assignments to attributes that do not -# correspond to predefined parameters, or that are not of the correct -# type, incur runtime errors. +# dict to look up SimObjects based on path +instanceDict = {} + +############################# # -# 3. Hierarchy children. The child nodes of a ConfigNode are stored -# in the node's _children dictionary, but can be accessed using the -# Python attribute dot-notation (just as they are printed out by the -# simulator). Children cannot be created using attribute assigment; -# they must be added by specifying the parent node in the child's -# constructor or using the '+=' operator. - -# The SimObject parameters are the most complex, for a few reasons. -# First, both parameter descriptions and parameter values are -# inherited. Thus parameter description lookup must go up the -# inheritance chain like normal attribute lookup, but this behavior -# must be explicitly coded since the lookup occurs in each class's -# _params attribute. Second, because parameter values can be set -# on SimObject classes (to implement default values), the parameter -# checking behavior must be enforced on class attribute assignments as -# well as instance attribute assignments. Finally, because we allow -# class specialization via inheritance (e.g., see the L1Cache class in -# the simple-4cpu.py example), we must do parameter checking even on -# class instantiation. To provide all these features, we use a -# metaclass to define most of the SimObject parameter behavior for -# this class hierarchy. +# Utility methods # -##################################################################### +############################# def isSimObject(value): return isinstance(value, SimObject) -def isSimObjSequence(value): - if not isinstance(value, (list, tuple)): +def isSimObjectSequence(value): + if not isinstance(value, (list, tuple)) or len(value) == 0: return False for val in value: @@ -144,48 +110,76 @@ def isSimObjSequence(value): return True +def isSimObjectOrSequence(value): + return isSimObject(value) or isSimObjectSequence(value) + def isNullPointer(value): return isinstance(value, NullSimObject) -# The metaclass for ConfigNode (and thus for everything that derives -# from ConfigNode, including SimObject). This class controls how new -# classes that derive from ConfigNode are instantiated, and provides -# inherited class behavior (just like a class controls how instances -# of that class are instantiated, and provides inherited instance -# behavior). +# Apply method to object. +# applyMethod(obj, 'meth', <args>) is equivalent to obj.meth(<args>) +def applyMethod(obj, meth, *args, **kwargs): + return getattr(obj, meth)(*args, **kwargs) + +# If the first argument is an (non-sequence) object, apply the named +# method with the given arguments. If the first argument is a +# sequence, apply the method to each element of the sequence (a la +# 'map'). +def applyOrMap(objOrSeq, meth, *args, **kwargs): + if not isinstance(objOrSeq, (list, tuple)): + return applyMethod(objOrSeq, meth, *args, **kwargs) + else: + return [applyMethod(o, meth, *args, **kwargs) for o in objOrSeq] + + +# The metaclass for SimObject. This class controls how new classes +# that derive from SimObject are instantiated, and provides inherited +# class behavior (just like a class controls how instances of that +# class are instantiated, and provides inherited instance behavior). class MetaSimObject(type): # Attributes that can be set only at initialization time init_keywords = { 'abstract' : types.BooleanType, 'type' : types.StringType } # Attributes that can be set any time - keywords = { 'check' : types.FunctionType, - 'children' : types.ListType } + keywords = { 'check' : types.FunctionType } # __new__ is called before __init__, and is where the statements # in the body of the class definition get loaded into the class's - # __dict__. We intercept this to filter out parameter assignments + # __dict__. We intercept this to filter out parameter & port assignments # and only allow "private" attributes to be passed to the base # __new__ (starting with underscore). def __new__(mcls, name, bases, dict): - # Copy "private" attributes (including special methods such as __new__) - # to the official dict. Everything else goes in _init_dict to be + # Copy "private" attributes, functions, and classes to the + # official dict. Everything else goes in _init_dict to be # filtered in __init__. cls_dict = {} + value_dict = {} for key,val in dict.items(): - if key.startswith('_'): + if key.startswith('_') or isinstance(val, (types.FunctionType, + types.TypeType)): cls_dict[key] = val - del dict[key] - cls_dict['_init_dict'] = dict + else: + # must be a param/port setting + value_dict[key] = val + cls_dict['_value_dict'] = value_dict return super(MetaSimObject, mcls).__new__(mcls, name, bases, cls_dict) - # initialization + # subclass initialization def __init__(cls, name, bases, dict): + # calls type.__init__()... I think that's a no-op, but leave + # it here just in case it's not. super(MetaSimObject, cls).__init__(name, bases, dict) # initialize required attributes - cls._params = multidict() - cls._values = multidict() - cls._anon_subclass_counter = 0 + + # class-only attributes + cls._params = multidict() # param descriptions + cls._ports = multidict() # port descriptions + + # class or instance attributes + cls._values = multidict() # param values + cls._port_map = multidict() # port bindings + cls._instantiated = False # really instantiated, cloned, or subclassed # We don't support multiple inheritance. If you want to, you # must fix multidict to deal with it properly. @@ -194,35 +188,34 @@ class MetaSimObject(type): base = bases[0] + # Set up general inheritance via multidicts. A subclass will + # inherit all its settings from the base class. The only time + # the following is not true is when we define the SimObject + # class itself (in which case the multidicts have no parent). if isinstance(base, MetaSimObject): cls._params.parent = base._params + cls._ports.parent = base._ports cls._values.parent = base._values - - # If your parent has a value in it that's a config node, clone - # it. Do this now so if we update any of the values' - # attributes we are updating the clone and not the original. - for key,val in base._values.iteritems(): - - # don't clone if (1) we're about to overwrite it with - # a local setting or (2) we've already cloned a copy - # from an earlier (more derived) base - if cls._init_dict.has_key(key) or cls._values.has_key(key): - continue - - if isSimObject(val): - cls._values[key] = val() - elif isSimObjSequence(val) and len(val): - cls._values[key] = [ v() for v in val ] - - # now process remaining _init_dict items - for key,val in cls._init_dict.items(): - if isinstance(val, (types.FunctionType, types.TypeType)): - type.__setattr__(cls, key, val) - + cls._port_map.parent = base._port_map + # mark base as having been subclassed + base._instantiated = True + + # Now process the _value_dict items. They could be defining + # new (or overriding existing) parameters or ports, setting + # class keywords (e.g., 'abstract'), or setting parameter + # values or port bindings. The first 3 can only be set when + # the class is defined, so we handle them here. The others + # can be set later too, so just emulate that by calling + # setattr(). + for key,val in cls._value_dict.items(): # param descriptions - elif isinstance(val, ParamDesc): + if isinstance(val, ParamDesc): cls._new_param(key, val) + # port objects + elif isinstance(val, Port): + cls._ports[key] = val + # init-time-only keywords elif cls.init_keywords.has_key(key): cls._set_keyword(key, val, cls.init_keywords[key]) @@ -256,10 +249,19 @@ class MetaSimObject(type): cls._set_keyword(attr, value, cls.keywords[attr]) return - # must be SimObject param + if cls._ports.has_key(attr): + self._ports[attr].connect(self, attr, value) + return + + if isSimObjectOrSequence(value) and cls._instantiated: + raise RuntimeError, \ + "cannot set SimObject parameter '%s' after\n" \ + " class %s has been instantiated or subclassed" \ + % (attr, cls.__name__) + + # check for param param = cls._params.get(attr, None) if param: - # It's ok: set attribute by delegating to 'object' class. try: cls._values[attr] = param.convert(value) except Exception, e: @@ -267,12 +269,12 @@ class MetaSimObject(type): (e, cls.__name__, attr, value) e.args = (msg, ) raise - # I would love to get rid of this - elif isSimObject(value) or isSimObjSequence(value): - cls._values[attr] = value + elif isSimObjectOrSequence(value): + # if RHS is a SimObject, it's an implicit child assignment + cls._values[attr] = value else: raise AttributeError, \ - "Class %s has no parameter %s" % (cls.__name__, attr) + "Class %s has no parameter \'%s\'" % (cls.__name__, attr) def __getattr__(cls, attr): if cls._values.has_key(attr): @@ -281,7 +283,7 @@ class MetaSimObject(type): raise AttributeError, \ "object '%s' has no attribute '%s'" % (cls.__name__, attr) -# The ConfigNode class is the root of the special hierarchy. Most of +# The SimObject class is the root of the special hierarchy. Most of # the code in this class deals with the configuration hierarchy itself # (parent/child node relationships). class SimObject(object): @@ -289,29 +291,79 @@ class SimObject(object): # get this metaclass. __metaclass__ = MetaSimObject - def __init__(self, _value_parent = None, **kwargs): + # Initialize new instance. For objects with SimObject-valued + # children, we need to recursively clone the classes represented + # by those param values as well in a consistent "deep copy"-style + # fashion. That is, we want to make sure that each instance is + # cloned only once, and that if there are multiple references to + # the same original object, we end up with the corresponding + # cloned references all pointing to the same cloned instance. + def __init__(self, **kwargs): + ancestor = kwargs.get('_ancestor') + memo_dict = kwargs.get('_memo') + if memo_dict is None: + # prepare to memoize any recursively instantiated objects + memo_dict = {} + elif ancestor: + # memoize me now to avoid problems with recursive calls + memo_dict[ancestor] = self + + if not ancestor: + ancestor = self.__class__ + ancestor._instantiated = True + + # initialize required attributes + self._parent = None self._children = {} - if _value_parent and type(_value_parent) != type(self): - # this was called as a type conversion rather than a clone - raise TypeError, "Cannot convert %s to %s" % \ - (_value_parent.__class__.__name__, self.__class__.__name__) - if not _value_parent: - _value_parent = self.__class__ - # clone values - self._values = multidict(_value_parent._values) - for key,val in _value_parent._values.iteritems(): + self._ccObject = None # pointer to C++ object + self._instantiated = False # really "cloned" + + # Inherit parameter values from class using multidict so + # individual value settings can be overridden. + self._values = multidict(ancestor._values) + # clone SimObject-valued parameters + for key,val in ancestor._values.iteritems(): if isSimObject(val): - setattr(self, key, val()) - elif isSimObjSequence(val) and len(val): - setattr(self, key, [ v() for v in val ]) + setattr(self, key, val(_memo=memo_dict)) + elif isSimObjectSequence(val) and len(val): + setattr(self, key, [ v(_memo=memo_dict) for v in val ]) + # clone port references. no need to use a multidict here + # since we will be creating new references for all ports. + self._port_map = {} + for key,val in ancestor._port_map.iteritems(): + self._port_map[key] = applyOrMap(val, 'clone', memo_dict) # apply attribute assignments from keyword args, if any for key,val in kwargs.iteritems(): setattr(self, key, val) + # "Clone" the current instance by creating another instance of + # this instance's class, but that inherits its parameter values + # and port mappings from the current instance. If we're in a + # "deep copy" recursive clone, check the _memo dict to see if + # we've already cloned this instance. def __call__(self, **kwargs): - return self.__class__(_value_parent = self, **kwargs) + memo_dict = kwargs.get('_memo') + if memo_dict is None: + # no memo_dict: must be top-level clone operation. + # this is only allowed at the root of a hierarchy + if self._parent: + raise RuntimeError, "attempt to clone object %s " \ + "not at the root of a tree (parent = %s)" \ + % (self, self._parent) + # create a new dict and use that. + memo_dict = {} + kwargs['_memo'] = memo_dict + elif memo_dict.has_key(self): + # clone already done & memoized + return memo_dict[self] + return self.__class__(_ancestor = self, **kwargs) def __getattr__(self, attr): + if self._ports.has_key(attr): + # return reference that can be assigned to another port + # via __setattr__ + return self._ports[attr].makeRef(self, attr) + if self._values.has_key(attr): return self._values[attr] @@ -326,10 +378,19 @@ class SimObject(object): object.__setattr__(self, attr, value) return + if self._ports.has_key(attr): + # set up port connection + self._ports[attr].connect(self, attr, value) + return + + if isSimObjectOrSequence(value) and self._instantiated: + raise RuntimeError, \ + "cannot set SimObject parameter '%s' after\n" \ + " instance been cloned %s" % (attr, `self`) + # must be SimObject param param = self._params.get(attr, None) if param: - # It's ok: set attribute by delegating to 'object' class. try: value = param.convert(value) except Exception, e: @@ -337,8 +398,7 @@ class SimObject(object): (e, self.__class__.__name__, attr, value) e.args = (msg, ) raise - # I would love to get rid of this - elif isSimObject(value) or isSimObjSequence(value): + elif isSimObjectOrSequence(value): pass else: raise AttributeError, "Class %s has no parameter %s" \ @@ -349,7 +409,7 @@ class SimObject(object): if isSimObject(value): value.set_path(self, attr) - elif isSimObjSequence(value): + elif isSimObjectSequence(value): value = SimObjVector(value) [v.set_path(self, "%s%d" % (attr, i)) for i,v in enumerate(value)] @@ -376,13 +436,13 @@ class SimObject(object): self._children[name] = value def set_path(self, parent, name): - if not hasattr(self, '_parent'): + if not self._parent: self._parent = parent self._name = name parent.add_child(name, self) def path(self): - if not hasattr(self, '_parent'): + if not self._parent: return 'root' ppath = self._parent.path() if ppath == 'root': @@ -423,6 +483,8 @@ class SimObject(object): def print_ini(self): print '[' + self.path() + ']' # .ini section header + instanceDict[self.path()] = self + if hasattr(self, 'type') and not isinstance(self, ParamContext): print 'type=%s' % self.type @@ -454,6 +516,60 @@ class SimObject(object): for child in child_names: self._children[child].print_ini() + # Call C++ to create C++ object corresponding to this object and + # (recursively) all its children + def createCCObject(self): + self.getCCObject() # force creation + for child in self._children.itervalues(): + child.createCCObject() + + # Get C++ object corresponding to this object, calling C++ if + # necessary to construct it. Does *not* recursively create + # children. + def getCCObject(self): + if not self._ccObject: + self._ccObject = -1 # flag to catch cycles in recursion + self._ccObject = cc_main.createSimObject(self.path()) + elif self._ccObject == -1: + raise RuntimeError, "%s: recursive call to getCCObject()" \ + % self.path() + return self._ccObject + + # Create C++ port connections corresponding to the connections in + # _port_map (& recursively for all children) + def connectPorts(self): + for portRef in self._port_map.itervalues(): + applyOrMap(portRef, 'ccConnect') + for child in self._children.itervalues(): + child.connectPorts() + + def startQuiesce(self, quiesce_event, recursive): + count = 0 + # ParamContexts don't serialize + if isinstance(self, SimObject) and not isinstance(self, ParamContext): + if self._ccObject.quiesce(quiesce_event): + count = 1 + if recursive: + for child in self._children.itervalues(): + count += child.startQuiesce(quiesce_event, True) + return count + + def resume(self): + if isinstance(self, SimObject) and not isinstance(self, ParamContext): + self._ccObject.resume() + for child in self._children.itervalues(): + child.resume() + + def changeTiming(self, mode): + if isinstance(self, SimObject) and not isinstance(self, ParamContext): + self._ccObject.setMemoryMode(mode) + for child in self._children.itervalues(): + child.changeTiming(mode) + + def takeOverFrom(self, old_cpu): + cpu_ptr = cc_main.convertToBaseCPUPtr(old_cpu._ccObject) + self._ccObject.takeOverFrom(cpu_ptr) + # generate output file for 'dot' to display as a pretty graph. # this code is currently broken. def outputDot(self, dot): @@ -544,9 +660,9 @@ class BaseProxy(object): if self._search_up: while not done: - try: obj = obj._parent - except: break - + obj = obj._parent + if not obj: + break result, done = self.find(obj) if not done: @@ -662,16 +778,16 @@ Self = ProxyFactory(search_self = True, search_up = False) # # Parameter description classes # -# The _params dictionary in each class maps parameter names to -# either a Param or a VectorParam object. These objects contain the +# The _params dictionary in each class maps parameter names to either +# a Param or a VectorParam object. These objects contain the # parameter description string, the parameter type, and the default -# value (loaded from the PARAM section of the .odesc files). The -# _convert() method on these objects is used to force whatever value -# is assigned to the parameter to the appropriate type. +# value (if any). The convert() method on these objects is used to +# force whatever value is assigned to the parameter to the appropriate +# type. # # Note that the default values are loaded into the class's attribute # space when the parameter dictionary is initialized (in -# MetaConfigNode._setparams()); after that point they aren't used. +# MetaSimObject._new_param()); after that point they aren't used. # ##################################################################### @@ -725,7 +841,7 @@ class ParamDesc(object): def __getattr__(self, attr): if attr == 'ptype': try: - ptype = eval(self.ptype_str, m5.__dict__) + ptype = eval(self.ptype_str, m5.objects.__dict__) if not isinstance(ptype, type): panic("Param qualifier is not a type: %s" % self.ptype) self.ptype = ptype @@ -772,7 +888,7 @@ class VectorParamDesc(ParamDesc): if isinstance(value, (list, tuple)): # list: coerce each element into new list tmp_list = [ ParamDesc.convert(self, v) for v in value ] - if isSimObjSequence(tmp_list): + if isSimObjectSequence(tmp_list): return SimObjVector(tmp_list) else: return VectorParamValue(tmp_list) @@ -794,7 +910,7 @@ class ParamFactory(object): # E.g., Param.Int(5, "number of widgets") def __call__(self, *args, **kwargs): - caller_frame = inspect.stack()[1][0] + caller_frame = inspect.currentframe().f_back ptype = None try: ptype = eval(self.ptype_str, @@ -1066,7 +1182,10 @@ class EthernetAddr(ParamValue): def __str__(self): if self.value == NextEthernetAddr: - return self.addr + if hasattr(self, 'addr'): + return self.addr + else: + return "NextEthernetAddr (unresolved)" else: return self.value @@ -1285,24 +1404,108 @@ MaxAddr = Addr.max MaxTick = Tick.max AllMemory = AddrRange(0, MaxAddr) + +##################################################################### +# +# Port objects +# +# Ports are used to interconnect objects in the memory system. +# ##################################################################### -# The final hook to generate .ini files. Called from configuration -# script once config is built. -def instantiate(root): - global ticks_per_sec - ticks_per_sec = float(root.clock.frequency) - root.print_ini() - noDot = True # temporary until we fix dot - if not noDot: - dot = pydot.Dot() - instance.outputDot(dot) - dot.orientation = "portrait" - dot.size = "8.5,11" - dot.ranksep="equally" - dot.rank="samerank" - dot.write("config.dot") - dot.write_ps("config.ps") +# Port reference: encapsulates a reference to a particular port on a +# particular SimObject. +class PortRef(object): + def __init__(self, simobj, name, isVec): + assert(isSimObject(simobj)) + self.simobj = simobj + self.name = name + self.index = -1 + self.isVec = isVec # is this a vector port? + self.peer = None # not associated with another port yet + self.ccConnected = False # C++ port connection done? + + # Set peer port reference. Called via __setattr__ as a result of + # a port assignment, e.g., "obj1.port1 = obj2.port2". + def setPeer(self, other): + if self.isVec: + curMap = self.simobj._port_map.get(self.name, []) + self.index = len(curMap) + curMap.append(other) + else: + curMap = self.simobj._port_map.get(self.name) + if curMap and not self.isVec: + print "warning: overwriting port", self.simobj, self.name + curMap = other + self.simobj._port_map[self.name] = curMap + self.peer = other + + def clone(self, memo): + newRef = copy.copy(self) + assert(isSimObject(newRef.simobj)) + newRef.simobj = newRef.simobj(_memo=memo) + # Tricky: if I'm the *second* PortRef in the pair to be + # cloned, then my peer is still in the middle of its clone + # method, and thus hasn't returned to its owner's + # SimObject.__init__ to get installed in _port_map. As a + # result I have no way of finding the *new* peer object. So I + # mark myself as "waiting" for my peer, and I let the *first* + # PortRef clone call set up both peer pointers after I return. + newPeer = newRef.simobj._port_map.get(self.name) + if newPeer: + if self.isVec: + assert(self.index != -1) + newPeer = newPeer[self.index] + # other guy is all set up except for his peer pointer + assert(newPeer.peer == -1) # peer must be waiting for handshake + newPeer.peer = newRef + newRef.peer = newPeer + else: + # other guy is in clone; just wait for him to do the work + newRef.peer = -1 # mark as waiting for handshake + return newRef + + # Call C++ to create corresponding port connection between C++ objects + def ccConnect(self): + if self.ccConnected: # already done this + return + peer = self.peer + cc_main.connectPorts(self.simobj.getCCObject(), self.name, self.index, + peer.simobj.getCCObject(), peer.name, peer.index) + self.ccConnected = True + peer.ccConnected = True + +# Port description object. Like a ParamDesc object, this represents a +# logical port in the SimObject class, not a particular port on a +# SimObject instance. The latter are represented by PortRef objects. +class Port(object): + def __init__(self, desc): + self.desc = desc + self.isVec = False + + # Generate a PortRef for this port on the given SimObject with the + # given name + def makeRef(self, simobj, name): + return PortRef(simobj, name, self.isVec) + + # Connect an instance of this port (on the given SimObject with + # the given name) with the port described by the supplied PortRef + def connect(self, simobj, name, ref): + if not isinstance(ref, PortRef): + raise TypeError, \ + "assigning non-port reference port '%s'" % name + myRef = self.makeRef(simobj, name) + myRef.setPeer(ref) + ref.setPeer(myRef) + +# VectorPort description object. Like Port, but represents a vector +# of connections (e.g., as on a Bus). +class VectorPort(Port): + def __init__(self, desc): + Port.__init__(self, desc) + self.isVec = True + +##################################################################### # __all__ defines the list of symbols that get exported when # 'from config import *' is invoked. Try to keep this reasonably @@ -1319,5 +1522,6 @@ __all__ = ['SimObject', 'ParamContext', 'Param', 'VectorParam', 'NetworkBandwidth', 'MemoryBandwidth', 'Range', 'AddrRange', 'MaxAddr', 'MaxTick', 'AllMemory', 'Null', 'NULL', - 'NextEthernetAddr', 'instantiate'] + 'NextEthernetAddr', + 'Port', 'VectorPort'] diff --git a/src/python/m5/convert.py b/src/python/m5/convert.py index 73181e985..580a579bc 100644 --- a/src/python/m5/convert.py +++ b/src/python/m5/convert.py @@ -23,6 +23,8 @@ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Authors: Nathan Binkert # metric prefixes exa = 1.0e18 diff --git a/src/python/m5/multidict.py b/src/python/m5/multidict.py index fd40ebbbd..34fc3139b 100644 --- a/src/python/m5/multidict.py +++ b/src/python/m5/multidict.py @@ -23,13 +23,15 @@ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Authors: Nathan Binkert __all__ = [ 'multidict' ] class multidict(object): __nodefault = object() def __init__(self, parent = {}, **kwargs): - self.dict = dict(**kwargs) + self.local = dict(**kwargs) self.parent = parent self.deleted = {} @@ -40,11 +42,11 @@ class multidict(object): return `dict(self.items())` def __contains__(self, key): - return self.dict.has_key(key) or self.parent.has_key(key) + return self.local.has_key(key) or self.parent.has_key(key) def __delitem__(self, key): try: - del self.dict[key] + del self.local[key] except KeyError, e: if key in self.parent: self.deleted[key] = True @@ -53,11 +55,11 @@ class multidict(object): def __setitem__(self, key, value): self.deleted.pop(key, False) - self.dict[key] = value + self.local[key] = value def __getitem__(self, key): try: - return self.dict[key] + return self.local[key] except KeyError, e: if not self.deleted.get(key, False) and key in self.parent: return self.parent[key] @@ -65,15 +67,15 @@ class multidict(object): raise KeyError, e def __len__(self): - return len(self.dict) + len(self.parent) + return len(self.local) + len(self.parent) def next(self): - for key,value in self.dict.items(): + for key,value in self.local.items(): yield key,value if self.parent: for key,value in self.parent.next(): - if key not in self.dict and key not in self.deleted: + if key not in self.local and key not in self.deleted: yield key,value def has_key(self, key): @@ -114,22 +116,22 @@ class multidict(object): return self[key] except KeyError: self.deleted.pop(key, False) - self.dict[key] = default + self.local[key] = default return default def _dump(self): print 'multidict dump' node = self while isinstance(node, multidict): - print ' ', node.dict + print ' ', node.local node = node.parent def _dumpkey(self, key): values = [] node = self while isinstance(node, multidict): - if key in node.dict: - values.append(node.dict[key]) + if key in node.local: + values.append(node.local[key]) node = node.parent print key, values diff --git a/src/python/m5/objects/AlphaConsole.py b/src/python/m5/objects/AlphaConsole.py index 68e6089ab..329b8c5bd 100644 --- a/src/python/m5/objects/AlphaConsole.py +++ b/src/python/m5/objects/AlphaConsole.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * from Device import BasicPioDevice class AlphaConsole(BasicPioDevice): diff --git a/src/python/m5/objects/AlphaTLB.py b/src/python/m5/objects/AlphaTLB.py index 5edf8e13d..11c1792ee 100644 --- a/src/python/m5/objects/AlphaTLB.py +++ b/src/python/m5/objects/AlphaTLB.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class AlphaTLB(SimObject): type = 'AlphaTLB' abstract = True diff --git a/src/python/m5/objects/BadDevice.py b/src/python/m5/objects/BadDevice.py index 9cb9a8f03..186b733fa 100644 --- a/src/python/m5/objects/BadDevice.py +++ b/src/python/m5/objects/BadDevice.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * from Device import BasicPioDevice class BadDevice(BasicPioDevice): diff --git a/src/python/m5/objects/BaseCPU.py b/src/python/m5/objects/BaseCPU.py index 49cb2a8f3..2e78578df 100644 --- a/src/python/m5/objects/BaseCPU.py +++ b/src/python/m5/objects/BaseCPU.py @@ -1,4 +1,6 @@ -from m5 import * +from m5 import build_env +from m5.config import * + class BaseCPU(SimObject): type = 'BaseCPU' abstract = True diff --git a/src/python/m5/objects/BaseCache.py b/src/python/m5/objects/BaseCache.py index 79d21572a..497b2b038 100644 --- a/src/python/m5/objects/BaseCache.py +++ b/src/python/m5/objects/BaseCache.py @@ -1,29 +1,26 @@ -from m5 import * -from BaseMem import BaseMem +from m5.config import * +from MemObject import MemObject class Prefetch(Enum): vals = ['none', 'tagged', 'stride', 'ghb'] -class BaseCache(BaseMem): +class BaseCache(MemObject): type = 'BaseCache' adaptive_compression = Param.Bool(False, "Use an adaptive compression scheme") assoc = Param.Int("associativity") block_size = Param.Int("block size in bytes") + latency = Param.Int("Latency") compressed_bus = Param.Bool(False, "This cache connects to a compressed memory") compression_latency = Param.Latency('0ns', "Latency in cycles of compression algorithm") do_copy = Param.Bool(False, "perform fast copies in the cache") hash_delay = Param.Int(1, "time in cycles of hash access") - in_bus = Param.Bus(NULL, "incoming bus object") lifo = Param.Bool(False, "whether this NIC partition should use LIFO repl. policy") max_miss_count = Param.Counter(0, "number of misses to handle before calling exit") - mem_trace = Param.MemTraceWriter(NULL, - "memory trace writer to record accesses") mshrs = Param.Int("number of MSHRs (max outstanding requests)") - out_bus = Param.Bus("outgoing bus object") prioritizeRequests = Param.Bool(False, "always service demand misses first") protocol = Param.CoherenceProtocol(NULL, "coherence protocol to use") @@ -63,3 +60,6 @@ class BaseCache(BaseMem): "Use the CPU ID to seperate calculations of prefetches") prefetch_data_accesses_only = Param.Bool(False, "Only prefetch on data not on instruction accesses") + hit_latency = Param.Int(1,"Hit Latency of the cache") + cpu_side = Port("Port on side closer to CPU") + mem_side = Port("Port on side closer to MEM") diff --git a/src/python/m5/objects/Bridge.py b/src/python/m5/objects/Bridge.py index ada715ce9..c9e673afb 100644 --- a/src/python/m5/objects/Bridge.py +++ b/src/python/m5/objects/Bridge.py @@ -1,8 +1,10 @@ -from m5 import * +from m5.config import * from MemObject import MemObject class Bridge(MemObject): type = 'Bridge' + side_a = Port('Side A port') + side_b = Port('Side B port') queue_size_a = Param.Int(16, "The number of requests to buffer") queue_size_b = Param.Int(16, "The number of requests to buffer") delay = Param.Latency('0ns', "The latency of this bridge") diff --git a/src/python/m5/objects/Bus.py b/src/python/m5/objects/Bus.py index 8c5397281..019e15034 100644 --- a/src/python/m5/objects/Bus.py +++ b/src/python/m5/objects/Bus.py @@ -1,6 +1,7 @@ -from m5 import * +from m5.config import * from MemObject import MemObject class Bus(MemObject): type = 'Bus' + port = VectorPort("vector port for connecting devices") bus_id = Param.Int(0, "blah") diff --git a/src/python/m5/objects/CoherenceProtocol.py b/src/python/m5/objects/CoherenceProtocol.py index 7013000d6..64b6cbacf 100644 --- a/src/python/m5/objects/CoherenceProtocol.py +++ b/src/python/m5/objects/CoherenceProtocol.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class Coherence(Enum): vals = ['uni', 'msi', 'mesi', 'mosi', 'moesi'] class CoherenceProtocol(SimObject): diff --git a/src/python/m5/objects/Device.py b/src/python/m5/objects/Device.py index 2a71bbc65..222f750da 100644 --- a/src/python/m5/objects/Device.py +++ b/src/python/m5/objects/Device.py @@ -1,9 +1,10 @@ -from m5 import * +from m5.config import * from MemObject import MemObject class PioDevice(MemObject): type = 'PioDevice' abstract = True + pio = Port("Programmed I/O port") platform = Param.Platform(Parent.any, "Platform this device is part of") system = Param.System(Parent.any, "System this device is part of") @@ -16,3 +17,4 @@ class BasicPioDevice(PioDevice): class DmaDevice(PioDevice): type = 'DmaDevice' abstract = True + dma = Port("DMA port") diff --git a/src/python/m5/objects/DiskImage.py b/src/python/m5/objects/DiskImage.py index 0d55e9329..70d8b2e45 100644 --- a/src/python/m5/objects/DiskImage.py +++ b/src/python/m5/objects/DiskImage.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class DiskImage(SimObject): type = 'DiskImage' abstract = True diff --git a/src/python/m5/objects/Ethernet.py b/src/python/m5/objects/Ethernet.py index 4286c71c8..418670592 100644 --- a/src/python/m5/objects/Ethernet.py +++ b/src/python/m5/objects/Ethernet.py @@ -1,4 +1,5 @@ -from m5 import * +from m5 import build_env +from m5.config import * from Device import DmaDevice from Pci import PciDevice diff --git a/src/python/m5/objects/FUPool.py b/src/python/m5/objects/FUPool.py new file mode 100644 index 000000000..cbf1089cf --- /dev/null +++ b/src/python/m5/objects/FUPool.py @@ -0,0 +1,5 @@ +from m5.config import * + +class FUPool(SimObject): + type = 'FUPool' + FUList = VectorParam.FUDesc("list of FU's for this pool") diff --git a/src/python/m5/objects/FuncUnit.py b/src/python/m5/objects/FuncUnit.py new file mode 100644 index 000000000..f61590ae9 --- /dev/null +++ b/src/python/m5/objects/FuncUnit.py @@ -0,0 +1,17 @@ +from m5.config import * + +class OpType(Enum): + vals = ['(null)', 'IntAlu', 'IntMult', 'IntDiv', 'FloatAdd', + 'FloatCmp', 'FloatCvt', 'FloatMult', 'FloatDiv', 'FloatSqrt', + 'MemRead', 'MemWrite', 'IprAccess', 'InstPrefetch'] + +class OpDesc(SimObject): + type = 'OpDesc' + issueLat = Param.Int(1, "cycles until another can be issued") + opClass = Param.OpType("type of operation") + opLat = Param.Int(1, "cycles until result is available") + +class FUDesc(SimObject): + type = 'FUDesc' + count = Param.Int("number of these FU's available") + opList = VectorParam.OpDesc("operation classes for this FU type") diff --git a/src/python/m5/objects/Ide.py b/src/python/m5/objects/Ide.py index 2403e6d36..9ee578177 100644 --- a/src/python/m5/objects/Ide.py +++ b/src/python/m5/objects/Ide.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * from Pci import PciDevice class IdeID(Enum): vals = ['master', 'slave'] diff --git a/src/python/m5/objects/IntrControl.py b/src/python/m5/objects/IntrControl.py index 66c82c182..514c3fc62 100644 --- a/src/python/m5/objects/IntrControl.py +++ b/src/python/m5/objects/IntrControl.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class IntrControl(SimObject): type = 'IntrControl' cpu = Param.BaseCPU(Parent.any, "the cpu") diff --git a/src/python/m5/objects/MemObject.py b/src/python/m5/objects/MemObject.py index 4d68243e6..d957dae17 100644 --- a/src/python/m5/objects/MemObject.py +++ b/src/python/m5/objects/MemObject.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class MemObject(SimObject): type = 'MemObject' diff --git a/src/python/m5/objects/MemTest.py b/src/python/m5/objects/MemTest.py index 34299faf0..9916d7cb4 100644 --- a/src/python/m5/objects/MemTest.py +++ b/src/python/m5/objects/MemTest.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class MemTest(SimObject): type = 'MemTest' cache = Param.BaseCache("L1 cache") diff --git a/src/python/m5/objects/O3CPU.py b/src/python/m5/objects/O3CPU.py new file mode 100644 index 000000000..4ecfa8fbd --- /dev/null +++ b/src/python/m5/objects/O3CPU.py @@ -0,0 +1,98 @@ +from m5 import build_env +from m5.config import * +from BaseCPU import BaseCPU + +class DerivO3CPU(BaseCPU): + type = 'DerivO3CPU' + activity = Param.Unsigned("Initial count") + numThreads = Param.Unsigned("number of HW thread contexts") + + checker = Param.BaseCPU(NULL, "checker") + + cachePorts = Param.Unsigned("Cache Ports") + + decodeToFetchDelay = Param.Unsigned("Decode to fetch delay") + renameToFetchDelay = Param.Unsigned("Rename to fetch delay") + iewToFetchDelay = Param.Unsigned("Issue/Execute/Writeback to fetch " + "delay") + commitToFetchDelay = Param.Unsigned("Commit to fetch delay") + fetchWidth = Param.Unsigned("Fetch width") + + renameToDecodeDelay = Param.Unsigned("Rename to decode delay") + iewToDecodeDelay = Param.Unsigned("Issue/Execute/Writeback to decode " + "delay") + commitToDecodeDelay = Param.Unsigned("Commit to decode delay") + fetchToDecodeDelay = Param.Unsigned("Fetch to decode delay") + decodeWidth = Param.Unsigned("Decode width") + + iewToRenameDelay = Param.Unsigned("Issue/Execute/Writeback to rename " + "delay") + commitToRenameDelay = Param.Unsigned("Commit to rename delay") + decodeToRenameDelay = Param.Unsigned("Decode to rename delay") + renameWidth = Param.Unsigned("Rename width") + + commitToIEWDelay = Param.Unsigned("Commit to " + "Issue/Execute/Writeback delay") + renameToIEWDelay = Param.Unsigned("Rename to " + "Issue/Execute/Writeback delay") + issueToExecuteDelay = Param.Unsigned("Issue to execute delay (internal " + "to the IEW stage)") + issueWidth = Param.Unsigned("Issue width") + executeWidth = Param.Unsigned("Execute width") + executeIntWidth = Param.Unsigned("Integer execute width") + executeFloatWidth = Param.Unsigned("Floating point execute width") + executeBranchWidth = Param.Unsigned("Branch execute width") + executeMemoryWidth = Param.Unsigned("Memory execute width") + fuPool = Param.FUPool(NULL, "Functional Unit pool") + + iewToCommitDelay = Param.Unsigned("Issue/Execute/Writeback to commit " + "delay") + renameToROBDelay = Param.Unsigned("Rename to reorder buffer delay") + commitWidth = Param.Unsigned("Commit width") + squashWidth = Param.Unsigned("Squash width") + trapLatency = Param.Tick("Trap latency") + fetchTrapLatency = Param.Tick("Fetch trap latency") + + predType = Param.String("Branch predictor type ('local', 'tournament')") + localPredictorSize = Param.Unsigned("Size of local predictor") + localCtrBits = Param.Unsigned("Bits per counter") + localHistoryTableSize = Param.Unsigned("Size of local history table") + localHistoryBits = Param.Unsigned("Bits for the local history") + globalPredictorSize = Param.Unsigned("Size of global predictor") + globalCtrBits = Param.Unsigned("Bits per counter") + globalHistoryBits = Param.Unsigned("Bits of history") + choicePredictorSize = Param.Unsigned("Size of choice predictor") + choiceCtrBits = Param.Unsigned("Bits of choice counters") + + BTBEntries = Param.Unsigned("Number of BTB entries") + BTBTagSize = Param.Unsigned("Size of the BTB tags, in bits") + + RASSize = Param.Unsigned("RAS size") + + LQEntries = Param.Unsigned("Number of load queue entries") + SQEntries = Param.Unsigned("Number of store queue entries") + LFSTSize = Param.Unsigned("Last fetched store table size") + SSITSize = Param.Unsigned("Store set ID table size") + + numRobs = Param.Unsigned("Number of Reorder Buffers"); + + numPhysIntRegs = Param.Unsigned("Number of physical integer registers") + numPhysFloatRegs = Param.Unsigned("Number of physical floating point " + "registers") + numIQEntries = Param.Unsigned("Number of instruction queue entries") + numROBEntries = Param.Unsigned("Number of reorder buffer entries") + + instShiftAmt = Param.Unsigned("Number of bits to shift instructions by") + + function_trace = Param.Bool(False, "Enable function trace") + function_trace_start = Param.Tick(0, "Cycle to start function trace") + + smtNumFetchingThreads = Param.Unsigned("SMT Number of Fetching Threads") + smtFetchPolicy = Param.String("SMT Fetch policy") + smtLSQPolicy = Param.String("SMT LSQ Sharing Policy") + smtLSQThreshold = Param.String("SMT LSQ Threshold Sharing Parameter") + smtIQPolicy = Param.String("SMT IQ Sharing Policy") + smtIQThreshold = Param.String("SMT IQ Threshold Sharing Parameter") + smtROBPolicy = Param.String("SMT ROB Sharing Policy") + smtROBThreshold = Param.String("SMT ROB Threshold Sharing Parameter") + smtCommitPolicy = Param.String("SMT Commit Policy") diff --git a/src/python/m5/objects/OzoneCPU.py b/src/python/m5/objects/OzoneCPU.py new file mode 100644 index 000000000..8aff89203 --- /dev/null +++ b/src/python/m5/objects/OzoneCPU.py @@ -0,0 +1,88 @@ +from m5 import build_env +from m5.config import * +from BaseCPU import BaseCPU + +class DerivOzoneCPU(BaseCPU): + type = 'DerivOzoneCPU' + + numThreads = Param.Unsigned("number of HW thread contexts") + + checker = Param.BaseCPU("Checker CPU") + + width = Param.Unsigned("Width") + frontEndWidth = Param.Unsigned("Front end width") + backEndWidth = Param.Unsigned("Back end width") + backEndSquashLatency = Param.Unsigned("Back end squash latency") + backEndLatency = Param.Unsigned("Back end latency") + maxInstBufferSize = Param.Unsigned("Maximum instruction buffer size") + maxOutstandingMemOps = Param.Unsigned("Maximum number of outstanding memory operations") + decodeToFetchDelay = Param.Unsigned("Decode to fetch delay") + renameToFetchDelay = Param.Unsigned("Rename to fetch delay") + iewToFetchDelay = Param.Unsigned("Issue/Execute/Writeback to fetch " + "delay") + commitToFetchDelay = Param.Unsigned("Commit to fetch delay") + fetchWidth = Param.Unsigned("Fetch width") + + renameToDecodeDelay = Param.Unsigned("Rename to decode delay") + iewToDecodeDelay = Param.Unsigned("Issue/Execute/Writeback to decode " + "delay") + commitToDecodeDelay = Param.Unsigned("Commit to decode delay") + fetchToDecodeDelay = Param.Unsigned("Fetch to decode delay") + decodeWidth = Param.Unsigned("Decode width") + + iewToRenameDelay = Param.Unsigned("Issue/Execute/Writeback to rename " + "delay") + commitToRenameDelay = Param.Unsigned("Commit to rename delay") + decodeToRenameDelay = Param.Unsigned("Decode to rename delay") + renameWidth = Param.Unsigned("Rename width") + + commitToIEWDelay = Param.Unsigned("Commit to " + "Issue/Execute/Writeback delay") + renameToIEWDelay = Param.Unsigned("Rename to " + "Issue/Execute/Writeback delay") + issueToExecuteDelay = Param.Unsigned("Issue to execute delay (internal " + "to the IEW stage)") + issueWidth = Param.Unsigned("Issue width") + executeWidth = Param.Unsigned("Execute width") + executeIntWidth = Param.Unsigned("Integer execute width") + executeFloatWidth = Param.Unsigned("Floating point execute width") + executeBranchWidth = Param.Unsigned("Branch execute width") + executeMemoryWidth = Param.Unsigned("Memory execute width") + + iewToCommitDelay = Param.Unsigned("Issue/Execute/Writeback to commit " + "delay") + renameToROBDelay = Param.Unsigned("Rename to reorder buffer delay") + commitWidth = Param.Unsigned("Commit width") + squashWidth = Param.Unsigned("Squash width") + + predType = Param.String("Type of branch predictor ('local', 'tournament')") + localPredictorSize = Param.Unsigned("Size of local predictor") + localCtrBits = Param.Unsigned("Bits per counter") + localHistoryTableSize = Param.Unsigned("Size of local history table") + localHistoryBits = Param.Unsigned("Bits for the local history") + globalPredictorSize = Param.Unsigned("Size of global predictor") + globalCtrBits = Param.Unsigned("Bits per counter") + globalHistoryBits = Param.Unsigned("Bits of history") + choicePredictorSize = Param.Unsigned("Size of choice predictor") + choiceCtrBits = Param.Unsigned("Bits of choice counters") + + BTBEntries = Param.Unsigned("Number of BTB entries") + BTBTagSize = Param.Unsigned("Size of the BTB tags, in bits") + + RASSize = Param.Unsigned("RAS size") + + LQEntries = Param.Unsigned("Number of load queue entries") + SQEntries = Param.Unsigned("Number of store queue entries") + LFSTSize = Param.Unsigned("Last fetched store table size") + SSITSize = Param.Unsigned("Store set ID table size") + + numPhysIntRegs = Param.Unsigned("Number of physical integer registers") + numPhysFloatRegs = Param.Unsigned("Number of physical floating point " + "registers") + numIQEntries = Param.Unsigned("Number of instruction queue entries") + numROBEntries = Param.Unsigned("Number of reorder buffer entries") + + instShiftAmt = Param.Unsigned("Number of bits to shift instructions by") + + function_trace = Param.Bool(False, "Enable function trace") + function_trace_start = Param.Tick(0, "Cycle to start function trace") diff --git a/src/python/m5/objects/Pci.py b/src/python/m5/objects/Pci.py index 85cefcd44..9e1e91b13 100644 --- a/src/python/m5/objects/Pci.py +++ b/src/python/m5/objects/Pci.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * from Device import BasicPioDevice, DmaDevice class PciConfigData(SimObject): diff --git a/src/python/m5/objects/PhysicalMemory.py b/src/python/m5/objects/PhysicalMemory.py index c59910093..9cc7510a2 100644 --- a/src/python/m5/objects/PhysicalMemory.py +++ b/src/python/m5/objects/PhysicalMemory.py @@ -1,8 +1,9 @@ -from m5 import * +from m5.config import * from MemObject import * class PhysicalMemory(MemObject): type = 'PhysicalMemory' + port = Port("the access port") range = Param.AddrRange("Device Address") file = Param.String('', "memory mapped file") latency = Param.Latency(Parent.clock, "latency of an access") diff --git a/src/python/m5/objects/Platform.py b/src/python/m5/objects/Platform.py index 4da0ffab4..89fee9991 100644 --- a/src/python/m5/objects/Platform.py +++ b/src/python/m5/objects/Platform.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class Platform(SimObject): type = 'Platform' abstract = True diff --git a/src/python/m5/objects/Process.py b/src/python/m5/objects/Process.py index 60b00229e..0091d8654 100644 --- a/src/python/m5/objects/Process.py +++ b/src/python/m5/objects/Process.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class Process(SimObject): type = 'Process' abstract = True diff --git a/src/python/m5/objects/Repl.py b/src/python/m5/objects/Repl.py index afd256082..8e9f1094f 100644 --- a/src/python/m5/objects/Repl.py +++ b/src/python/m5/objects/Repl.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class Repl(SimObject): type = 'Repl' abstract = True diff --git a/src/python/m5/objects/Root.py b/src/python/m5/objects/Root.py index 205a93c76..373475a7a 100644 --- a/src/python/m5/objects/Root.py +++ b/src/python/m5/objects/Root.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * from Serialize import Serialize from Statistics import Statistics from Trace import Trace diff --git a/src/python/m5/objects/SimConsole.py b/src/python/m5/objects/SimConsole.py index df3061908..9e1452c6d 100644 --- a/src/python/m5/objects/SimConsole.py +++ b/src/python/m5/objects/SimConsole.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class ConsoleListener(SimObject): type = 'ConsoleListener' port = Param.TcpPort(3456, "listen port") diff --git a/src/python/m5/objects/SimpleDisk.py b/src/python/m5/objects/SimpleDisk.py index e34155ace..44ef709af 100644 --- a/src/python/m5/objects/SimpleDisk.py +++ b/src/python/m5/objects/SimpleDisk.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class SimpleDisk(SimObject): type = 'SimpleDisk' disk = Param.DiskImage("Disk Image") diff --git a/src/python/m5/objects/AlphaFullCPU.py b/src/python/m5/objects/SimpleOzoneCPU.py index 48989d057..5d968cab0 100644 --- a/src/python/m5/objects/AlphaFullCPU.py +++ b/src/python/m5/objects/SimpleOzoneCPU.py @@ -1,14 +1,21 @@ -from m5 import * +from m5 import build_env +from m5.config import * from BaseCPU import BaseCPU -class DerivAlphaFullCPU(BaseCPU): - type = 'DerivAlphaFullCPU' +class SimpleOzoneCPU(BaseCPU): + type = 'SimpleOzoneCPU' numThreads = Param.Unsigned("number of HW thread contexts") if not build_env['FULL_SYSTEM']: mem = Param.FunctionalMemory(NULL, "memory") + width = Param.Unsigned("Width") + frontEndWidth = Param.Unsigned("Front end width") + backEndWidth = Param.Unsigned("Back end width") + backEndSquashLatency = Param.Unsigned("Back end squash latency") + backEndLatency = Param.Unsigned("Back end latency") + maxInstBufferSize = Param.Unsigned("Maximum instruction buffer size") decodeToFetchDelay = Param.Unsigned("Decode to fetch delay") renameToFetchDelay = Param.Unsigned("Rename to fetch delay") iewToFetchDelay = Param.Unsigned("Issue/Execute/Writeback to fetch " @@ -48,15 +55,15 @@ class DerivAlphaFullCPU(BaseCPU): commitWidth = Param.Unsigned("Commit width") squashWidth = Param.Unsigned("Squash width") - local_predictor_size = Param.Unsigned("Size of local predictor") - local_ctr_bits = Param.Unsigned("Bits per counter") - local_history_table_size = Param.Unsigned("Size of local history table") - local_history_bits = Param.Unsigned("Bits for the local history") - global_predictor_size = Param.Unsigned("Size of global predictor") - global_ctr_bits = Param.Unsigned("Bits per counter") - global_history_bits = Param.Unsigned("Bits of history") - choice_predictor_size = Param.Unsigned("Size of choice predictor") - choice_ctr_bits = Param.Unsigned("Bits of choice counters") + localPredictorSize = Param.Unsigned("Size of local predictor") + localCtrBits = Param.Unsigned("Bits per counter") + localHistoryTableSize = Param.Unsigned("Size of local history table") + localHistoryBits = Param.Unsigned("Bits for the local history") + globalPredictorSize = Param.Unsigned("Size of global predictor") + globalCtrBits = Param.Unsigned("Bits per counter") + globalHistoryBits = Param.Unsigned("Bits of history") + choicePredictorSize = Param.Unsigned("Size of choice predictor") + choiceCtrBits = Param.Unsigned("Bits of choice counters") BTBEntries = Param.Unsigned("Number of BTB entries") BTBTagSize = Param.Unsigned("Size of the BTB tags, in bits") diff --git a/src/python/m5/objects/System.py b/src/python/m5/objects/System.py index 622b5a870..9a1e1d690 100644 --- a/src/python/m5/objects/System.py +++ b/src/python/m5/objects/System.py @@ -1,4 +1,5 @@ -from m5 import * +from m5 import build_env +from m5.config import * class System(SimObject): type = 'System' @@ -7,8 +8,6 @@ class System(SimObject): boot_cpu_frequency = Param.Frequency(Self.cpu[0].clock.frequency, "boot processor frequency") init_param = Param.UInt64(0, "numerical value to pass into simulator") - bin = Param.Bool(False, "is this system binned") - binned_fns = VectorParam.String([], "functions broken down and binned") boot_osflags = Param.String("a", "boot flags to pass to the kernel") kernel = Param.String("file that contains the kernel code") readfile = Param.String("", "file to read startup script from") diff --git a/src/python/m5/objects/Tsunami.py b/src/python/m5/objects/Tsunami.py index 27ea0bce8..4613571d8 100644 --- a/src/python/m5/objects/Tsunami.py +++ b/src/python/m5/objects/Tsunami.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * from Device import BasicPioDevice from Platform import Platform diff --git a/src/python/m5/objects/Uart.py b/src/python/m5/objects/Uart.py index 54754aeb9..8e1fd1a37 100644 --- a/src/python/m5/objects/Uart.py +++ b/src/python/m5/objects/Uart.py @@ -1,4 +1,5 @@ -from m5 import * +from m5 import build_env +from m5.config import * from Device import BasicPioDevice class Uart(BasicPioDevice): diff --git a/src/python/m5/smartdict.py b/src/python/m5/smartdict.py index cd38d7326..d85dbd517 100644 --- a/src/python/m5/smartdict.py +++ b/src/python/m5/smartdict.py @@ -23,6 +23,8 @@ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Authors: Nathan Binkert # The SmartDict class fixes a couple of issues with using the content # of os.environ or similar dicts of strings as Python variables: |