diff options
author | Gabe Black <gblack@eecs.umich.edu> | 2006-06-12 00:49:24 -0400 |
---|---|---|
committer | Gabe Black <gblack@eecs.umich.edu> | 2006-06-12 00:49:24 -0400 |
commit | 15a8f050605919579e81b6abb98a0b596334216d (patch) | |
tree | 583b0b30f13d9874a69015d58d041ce2d7352960 /src/python | |
parent | 60a734e1750f3c051fe92fea6f12158db6e2dcb9 (diff) | |
parent | df4b4f001e4db902297acf3b75480e4886e4e882 (diff) | |
download | gem5-15a8f050605919579e81b6abb98a0b596334216d.tar.xz |
Merge m5.eecs.umich.edu:/bk/newmem
into ewok.(none):/home/gblack/m5/newmem
src/arch/sparc/regfile.hh:
Hand Merge
--HG--
extra : convert_revision : c47202689202069892524a7d71962082469996ee
Diffstat (limited to 'src/python')
36 files changed, 580 insertions, 314 deletions
diff --git a/src/python/SConscript b/src/python/SConscript index 4407e403d..7b0f591eb 100644 --- a/src/python/SConscript +++ b/src/python/SConscript @@ -25,183 +25,82 @@ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Authors: Steve Reinhardt +# Nathan Binkert import os, os.path, re, sys +from zipfile import PyZipFile -Import('env') +# handy function for path joins +def join(*args): + return os.path.normpath(os.path.join(*args)) -import scons_helper - -def WriteEmbeddedPyFile(target, source, path, name, ext, filename): - if isinstance(source, str): - source = file(source, 'r') - - if isinstance(target, str): - target = file(target, 'w') - - print >>target, "AddModule(%s, %s, %s, %s, '''\\" % \ - (`path`, `name`, `ext`, `filename`) - - for line in source: - line = line - # escape existing backslashes - line = line.replace('\\', '\\\\') - # escape existing triple quotes - line = line.replace("'''", r"\'\'\'") - - print >>target, line, - - print >>target, "''')" - print >>target - -def WriteCFile(target, source, name): - if isinstance(source, str): - source = file(source, 'r') - - if isinstance(target, str): - target = file(target, 'w') - - print >>target, 'const char %s_string[] = {' % name - - count = 0 - from array import array - try: - while True: - foo = array('B') - foo.fromfile(source, 10000) - l = [ str(i) for i in foo.tolist() ] - count += len(l) - for i in xrange(0,9999,20): - print >>target, ','.join(l[i:i+20]) + ',' - except EOFError: - l = [ str(i) for i in foo.tolist() ] - count += len(l) - for i in xrange(0,len(l),20): - print >>target, ','.join(l[i:i+20]) + ',' - print >>target, ','.join(l[i:]) + ',' - - print >>target, '};' - print >>target, 'const int %s_length = %d;' % (name, count) - print >>target - -def splitpath(path): - dir,file = os.path.split(path) - path = [] - assert(file) - while dir: - dir,base = os.path.split(dir) - path.insert(0, base) - return path, file - -def MakeEmbeddedPyFile(target, source, env): - target = file(str(target[0]), 'w') - - tree = {} - for src in source: - src = str(src) - path,pyfile = splitpath(src) - node = tree - for dir in path: - if not node.has_key(dir): - node[dir] = { } - node = node[dir] - - name,ext = pyfile.split('.') - if name == '__init__': - node['.hasinit'] = True - node[pyfile] = (src,name,ext,src) - - done = False - while not done: - done = True - for name,entry in tree.items(): - if not isinstance(entry, dict): continue - if entry.has_key('.hasinit'): continue - - done = False - del tree[name] - for key,val in entry.iteritems(): - if tree.has_key(key): - raise NameError, \ - "dir already has %s can't add it again" % key - tree[key] = val - - files = [] - def populate(node, path = []): - names = node.keys() - names.sort() - for name in names: - if name == '.hasinit': - continue - - entry = node[name] - if isinstance(entry, dict): - if not entry.has_key('.hasinit'): - raise NameError, 'package directory missing __init__.py' - populate(entry, path + [ name ]) - else: - pyfile,name,ext,filename = entry - files.append((pyfile, path, name, ext, filename)) - populate(tree) - - for pyfile, path, name, ext, filename in files: - WriteEmbeddedPyFile(target, pyfile, path, name, ext, filename) +Import('env') +# This SConscript is in charge of collecting .py files and generating +# a zip archive that is appended to the m5 binary. + +# List of files & directories to include in the zip file. To include +# a package, list only the root directory of the package, not any +# internal .py files (else they will get the path stripped off when +# they are imported into the zip file). +pyzip_files = [] + +# List of additional files on which the zip archive depends, but which +# are not included in pyzip_files... i.e. individual .py files within +# a package. +pyzip_dep_files = [] + +# Add the specified package to the zip archive. Adds the directory to +# pyzip_files and all included .py files to pyzip_dep_files. +def addPkg(pkgdir): + pyzip_files.append(pkgdir) + origdir = os.getcwd() + srcdir = join(Dir('.').srcnode().abspath, pkgdir) + os.chdir(srcdir) + for path, dirs, files in os.walk('.'): + for i,dir in enumerate(dirs): + if dir == 'SCCS': + del dirs[i] + break + + for f in files: + if f.endswith('.py'): + pyzip_dep_files.append(join(pkgdir, path, f)) + + os.chdir(origdir) + +# Generate Python file that contains a dict specifying the current +# build_env flags. def MakeDefinesPyFile(target, source, env): f = file(str(target[0]), 'w') - print >>f, "import __main__" - print >>f, "__main__.m5_build_env = ", + print >>f, "m5_build_env = ", print >>f, source[0] f.close() -CFileCounter = 0 -def MakePythonCFile(target, source, env): - global CFileCounter - target = file(str(target[0]), 'w') - - print >>target, '''\ -#include "base/embedfile.hh" - -namespace { -''' - for src in source: - src = str(src) - fname = os.path.basename(src) - name = 'embedded_file%d' % CFileCounter - CFileCounter += 1 - WriteCFile(target, src, name) - print >>target, '''\ -EmbedMap %(name)s("%(fname)s", - %(name)s_string, %(name)s_length); - -''' % locals() - print >>target, '''\ - -/* namespace */ } -''' - -# base list of .py files to embed -embedded_py_files = [ os.path.join(env['ROOT'], 'util/pbs/jobfile.py') ] -# add all .py files in python/m5 -objpath = os.path.join(env['SRCDIR'], 'python', 'm5') -for root, dirs, files in os.walk(objpath, topdown=True): - for i,dir in enumerate(dirs): - if dir == 'SCCS': - del dirs[i] - break - - assert(root.startswith(objpath)) - for f in files: - if f.endswith('.py'): - embedded_py_files.append(os.path.join(root, f)) - -embedfile_hh = os.path.join(env['SRCDIR'], 'base/embedfile.hh') - optionDict = dict([(opt, env[opt]) for opt in env.ExportOptions]) -env.Command('defines.py', Value(optionDict), MakeDefinesPyFile) - -env.Command('embedded_py.py', embedded_py_files, MakeEmbeddedPyFile) -env.Depends('embedded_py.cc', embedfile_hh) -env.Command('embedded_py.cc', - ['string_importer.py', 'defines.py', 'embedded_py.py'], - MakePythonCFile) +env.Command('m5/defines.py', Value(optionDict), MakeDefinesPyFile) + +# Now specify the packages & files for the zip archive. +addPkg('m5') +pyzip_files.append('m5/defines.py') +pyzip_files.append(join(env['ROOT'], 'util/pbs/jobfile.py')) + +env.Command(['swig/main_wrap.cc', 'm5/main.py'], + 'swig/main.i', + '$SWIG $SWIGFLAGS -outdir ${TARGETS[1].dir} ' + '-o ${TARGETS[0]} $SOURCES') + +pyzip_dep_files.append('m5/main.py') + +# Action function to build the zip archive. Uses the PyZipFile module +# included in the standard Python library. +def buildPyZip(target, source, env): + pzf = PyZipFile(str(target[0]), 'w') + for s in source: + pzf.writepy(str(s)) + +# Add the zip file target to the environment. +env.Command('m5py.zip', pyzip_files, buildPyZip) +env.Depends('m5py.zip', pyzip_dep_files) diff --git a/src/python/m5/__init__.py b/src/python/m5/__init__.py index 9bb68a090..60a61d66e 100644 --- a/src/python/m5/__init__.py +++ b/src/python/m5/__init__.py @@ -23,24 +23,30 @@ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Authors: Nathan Binkert +# Steve Reinhardt + +import sys, os, time, atexit, optparse -import sys, os +# import the SWIG-wrapped main C++ functions +import main +# import a few SWIG-wrapped items (those that are likely to be used +# directly by user scripts) completely into this module for +# convenience +from main import simulate, SimLoopExitEvent + +# import the m5 compile options +import defines # define this here so we can use it right away if necessary def panic(string): print >>sys.stderr, 'panic:', string sys.exit(1) -def m5execfile(f, global_dict): - # copy current sys.path - oldpath = sys.path[:] - # push file's directory onto front of path - sys.path.insert(0, os.path.abspath(os.path.dirname(f))) - execfile(f, global_dict) - # restore original path - sys.path = oldpath - -# Prepend given directory to system module search path. +# Prepend given directory to system module search path. We may not +# need this anymore if we can structure our config library more like a +# Python package. def AddToPath(path): # if it's a relative path and we know what directory the current # python script is in, make the path relative to that directory. @@ -51,24 +57,58 @@ def AddToPath(path): # so place the new dir right after that. sys.path.insert(1, path) -# find the m5 compile options: must be specified as a dict in -# __main__.m5_build_env. -import __main__ -if not hasattr(__main__, 'm5_build_env'): - panic("__main__ must define m5_build_env") + +# Callback to set trace flags. Not necessarily the best way to do +# things in the long run (particularly if we change how these global +# options are handled). +def setTraceFlags(option, opt_str, value, parser): + objects.Trace.flags = value + +# Standard optparse options. Need to be explicitly included by the +# user script when it calls optparse.OptionParser(). +standardOptions = [ + optparse.make_option("--traceflags", type="string", action="callback", + callback=setTraceFlags) + ] # make a SmartDict out of the build options for our local use import smartdict build_env = smartdict.SmartDict() -build_env.update(__main__.m5_build_env) +build_env.update(defines.m5_build_env) # make a SmartDict out of the OS environment too env = smartdict.SmartDict() env.update(os.environ) -# import the main m5 config code -from config import * +# The final hook to generate .ini files. Called from the user script +# once the config is built. +def instantiate(root): + config.ticks_per_sec = float(root.clock.frequency) + # ugly temporary hack to get output to config.ini + sys.stdout = file('config.ini', 'w') + root.print_ini() + sys.stdout.close() # close config.ini + sys.stdout = sys.__stdout__ # restore to original + main.initialize() # load config.ini into C++ and process it + noDot = True # temporary until we fix dot + if not noDot: + dot = pydot.Dot() + instance.outputDot(dot) + dot.orientation = "portrait" + dot.size = "8.5,11" + dot.ranksep="equally" + dot.rank="samerank" + dot.write("config.dot") + dot.write_ps("config.ps") + +# Export curTick to user script. +def curTick(): + return main.cvar.curTick -# import the built-in object definitions -from objects import * +# register our C++ exit callback function with Python +atexit.register(main.doExitCleanup) +# This import allows user scripts to reference 'm5.objects.Foo' after +# just doing an 'import m5' (without an 'import m5.objects'). May not +# matter since most scripts will probably 'from m5.objects import *'. +import objects diff --git a/src/python/m5/config.py b/src/python/m5/config.py index 1e25e0d09..97e13c900 100644 --- a/src/python/m5/config.py +++ b/src/python/m5/config.py @@ -23,12 +23,14 @@ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Authors: Steve Reinhardt +# Nathan Binkert -from __future__ import generators import os, re, sys, types, inspect import m5 -panic = m5.panic +from m5 import panic from convert import * from multidict import multidict @@ -134,8 +136,15 @@ class Singleton(type): def isSimObject(value): return isinstance(value, SimObject) -def isSimObjSequence(value): - if not isinstance(value, (list, tuple)): +def isSimObjectClass(value): + try: + return issubclass(value, SimObject) + except TypeError: + # happens if value is not a class at all + return False + +def isSimObjectSequence(value): + if not isinstance(value, (list, tuple)) or len(value) == 0: return False for val in value: @@ -144,9 +153,41 @@ def isSimObjSequence(value): return True +def isSimObjectClassSequence(value): + if not isinstance(value, (list, tuple)) or len(value) == 0: + return False + + for val in value: + if not isNullPointer(val) and not isSimObjectClass(val): + return False + + return True + +def isSimObjectOrSequence(value): + return isSimObject(value) or isSimObjectSequence(value) + +def isSimObjectClassOrSequence(value): + return isSimObjectClass(value) or isSimObjectClassSequence(value) + def isNullPointer(value): return isinstance(value, NullSimObject) +# Apply method to object. +# applyMethod(obj, 'meth', <args>) is equivalent to obj.meth(<args>) +def applyMethod(obj, meth, *args, **kwargs): + return getattr(obj, meth)(*args, **kwargs) + +# If the first argument is an (non-sequence) object, apply the named +# method with the given arguments. If the first argument is a +# sequence, apply the method to each element of the sequence (a la +# 'map'). +def applyOrMap(objOrSeq, meth, *args, **kwargs): + if not isinstance(objOrSeq, (list, tuple)): + return applyMethod(objOrSeq, meth, *args, **kwargs) + else: + return [applyMethod(o, meth, *args, **kwargs) for o in objOrSeq] + + # The metaclass for ConfigNode (and thus for everything that derives # from ConfigNode, including SimObject). This class controls how new # classes that derive from ConfigNode are instantiated, and provides @@ -167,24 +208,32 @@ class MetaSimObject(type): # and only allow "private" attributes to be passed to the base # __new__ (starting with underscore). def __new__(mcls, name, bases, dict): - # Copy "private" attributes (including special methods such as __new__) - # to the official dict. Everything else goes in _init_dict to be - # filtered in __init__. - cls_dict = {} - for key,val in dict.items(): - if key.startswith('_'): - cls_dict[key] = val - del dict[key] - cls_dict['_init_dict'] = dict + if dict.has_key('_init_dict'): + # must have been called from makeSubclass() rather than + # via Python class declaration; bypass filtering process. + cls_dict = dict + else: + # Copy "private" attributes (including special methods + # such as __new__) to the official dict. Everything else + # goes in _init_dict to be filtered in __init__. + cls_dict = {} + for key,val in dict.items(): + if key.startswith('_'): + cls_dict[key] = val + del dict[key] + cls_dict['_init_dict'] = dict return super(MetaSimObject, mcls).__new__(mcls, name, bases, cls_dict) - # initialization + # subclass initialization def __init__(cls, name, bases, dict): + # calls type.__init__()... I think that's a no-op, but leave + # it here just in case it's not. super(MetaSimObject, cls).__init__(name, bases, dict) # initialize required attributes cls._params = multidict() cls._values = multidict() + cls._instantiated = False # really instantiated or subclassed cls._anon_subclass_counter = 0 # We don't support multiple inheritance. If you want to, you @@ -194,27 +243,14 @@ class MetaSimObject(type): base = bases[0] + # the only time the following is not true is when we define + # the SimObject class itself if isinstance(base, MetaSimObject): cls._params.parent = base._params cls._values.parent = base._values + base._instantiated = True - # If your parent has a value in it that's a config node, clone - # it. Do this now so if we update any of the values' - # attributes we are updating the clone and not the original. - for key,val in base._values.iteritems(): - - # don't clone if (1) we're about to overwrite it with - # a local setting or (2) we've already cloned a copy - # from an earlier (more derived) base - if cls._init_dict.has_key(key) or cls._values.has_key(key): - continue - - if isSimObject(val): - cls._values[key] = val() - elif isSimObjSequence(val) and len(val): - cls._values[key] = [ v() for v in val ] - - # now process remaining _init_dict items + # now process the _init_dict items for key,val in cls._init_dict.items(): if isinstance(val, (types.FunctionType, types.TypeType)): type.__setattr__(cls, key, val) @@ -231,6 +267,27 @@ class MetaSimObject(type): else: setattr(cls, key, val) + # Pull the deep-copy memoization dict out of the class dict if + # it's there... + memo = cls.__dict__.get('_memo', {}) + + # Handle SimObject values + for key,val in cls._values.iteritems(): + # SimObject instances need to be promoted to classes. + # Existing classes should not have any instance values, so + # these can only occur at the lowest level dict (the + # parameters just being set in this class definition). + if isSimObjectOrSequence(val): + assert(val == cls._values.local[key]) + cls._values[key] = applyOrMap(val, 'makeClass', memo) + # SimObject classes need to be subclassed so that + # parameters that get set at this level only affect this + # level and derivatives. + elif isSimObjectClassOrSequence(val): + assert(not cls._values.local.has_key(key)) + cls._values[key] = applyOrMap(val, 'makeSubclass', {}, memo) + + def _set_keyword(cls, keyword, val, kwtype): if not isinstance(val, kwtype): raise TypeError, 'keyword %s has bad type %s (expecting %s)' % \ @@ -260,6 +317,11 @@ class MetaSimObject(type): param = cls._params.get(attr, None) if param: # It's ok: set attribute by delegating to 'object' class. + if isSimObjectOrSequence(value) and cls._instantiated: + raise AttributeError, \ + "Cannot set SimObject parameter '%s' after\n" \ + " class %s has been instantiated or subclassed" \ + % (attr, cls.__name__) try: cls._values[attr] = param.convert(value) except Exception, e: @@ -268,7 +330,7 @@ class MetaSimObject(type): e.args = (msg, ) raise # I would love to get rid of this - elif isSimObject(value) or isSimObjSequence(value): + elif isSimObjectOrSequence(value): cls._values[attr] = value else: raise AttributeError, \ @@ -281,6 +343,22 @@ class MetaSimObject(type): raise AttributeError, \ "object '%s' has no attribute '%s'" % (cls.__name__, attr) + # Create a subclass of this class. Basically a function interface + # to the standard Python class definition mechanism, primarily for + # internal use. 'memo' dict param supports "deep copy" (really + # "deep subclass") operations... within a given operation, + # multiple references to a class should result in a single + # subclass object with multiple references to it (as opposed to + # mutiple unique subclasses). + def makeSubclass(cls, init_dict, memo = {}): + subcls = memo.get(cls) + if not subcls: + name = cls.__name__ + '_' + str(cls._anon_subclass_counter) + cls._anon_subclass_counter += 1 + subcls = MetaSimObject(name, (cls,), + { '_init_dict': init_dict, '_memo': memo }) + return subcls + # The ConfigNode class is the root of the special hierarchy. Most of # the code in this class deals with the configuration hierarchy itself # (parent/child node relationships). @@ -289,27 +367,80 @@ class SimObject(object): # get this metaclass. __metaclass__ = MetaSimObject - def __init__(self, _value_parent = None, **kwargs): + # __new__ operator allocates new instances of the class. We + # override it here just to support "deep instantiation" operation + # via the _memo dict. When recursively instantiating an object + # hierarchy we want to make sure that each class is instantiated + # only once, and that if there are multiple references to the same + # original class, we end up with the corresponding instantiated + # references all pointing to the same instance. + def __new__(cls, _memo = None, **kwargs): + if _memo is not None and _memo.has_key(cls): + # return previously instantiated object + assert(len(kwargs) == 0) + return _memo[cls] + else: + # Need a new one... if it needs to be memoized, this will + # happen in __init__. We defer the insertion until then + # so __init__ can use the memo dict to tell whether or not + # to perform the initialization. + return super(SimObject, cls).__new__(cls, **kwargs) + + # Initialize new instance previously allocated by __new__. For + # objects with SimObject-valued params, we need to recursively + # instantiate the classes represented by those param values as + # well (in a consistent "deep copy"-style fashion; see comment + # above). + def __init__(self, _memo = None, **kwargs): + if _memo is not None: + # We're inside a "deep instantiation" + assert(isinstance(_memo, dict)) + assert(len(kwargs) == 0) + if _memo.has_key(self.__class__): + # __new__ returned an existing, already initialized + # instance, so there's nothing to do here + assert(_memo[self.__class__] == self) + return + # no pre-existing object, so remember this one here + _memo[self.__class__] = self + else: + # This is a new top-level instantiation... don't memoize + # this objcet, but prepare to memoize any recursively + # instantiated objects. + _memo = {} + + self.__class__._instantiated = True + self._children = {} - if _value_parent and type(_value_parent) != type(self): - # this was called as a type conversion rather than a clone - raise TypeError, "Cannot convert %s to %s" % \ - (_value_parent.__class__.__name__, self.__class__.__name__) - if not _value_parent: - _value_parent = self.__class__ - # clone values - self._values = multidict(_value_parent._values) - for key,val in _value_parent._values.iteritems(): - if isSimObject(val): - setattr(self, key, val()) - elif isSimObjSequence(val) and len(val): - setattr(self, key, [ v() for v in val ]) + # Inherit parameter values from class using multidict so + # individual value settings can be overridden. + self._values = multidict(self.__class__._values) + # For SimObject-valued parameters, the class should have + # classes (not instances) for the values. We need to + # instantiate these classes rather than just inheriting the + # class object. + for key,val in self.__class__._values.iteritems(): + if isSimObjectClass(val): + setattr(self, key, val(_memo)) + elif isSimObjectClassSequence(val) and len(val): + setattr(self, key, [ v(_memo) for v in val ]) # apply attribute assignments from keyword args, if any for key,val in kwargs.iteritems(): setattr(self, key, val) + # Use this instance as a template to create a new class. + def makeClass(self, memo = {}): + cls = memo.get(self) + if not cls: + cls = self.__class__.makeSubclass(self._values.local) + memo[self] = cls + return cls + + # Direct instantiation of instances (cloning) is no longer + # allowed; must generate class from instance first. def __call__(self, **kwargs): - return self.__class__(_value_parent = self, **kwargs) + raise TypeError, "cannot instantiate SimObject; "\ + "use makeClass() to make class first" def __getattr__(self, attr): if self._values.has_key(attr): @@ -338,7 +469,7 @@ class SimObject(object): e.args = (msg, ) raise # I would love to get rid of this - elif isSimObject(value) or isSimObjSequence(value): + elif isSimObjectOrSequence(value): pass else: raise AttributeError, "Class %s has no parameter %s" \ @@ -349,7 +480,7 @@ class SimObject(object): if isSimObject(value): value.set_path(self, attr) - elif isSimObjSequence(value): + elif isSimObjectSequence(value): value = SimObjVector(value) [v.set_path(self, "%s%d" % (attr, i)) for i,v in enumerate(value)] @@ -725,7 +856,7 @@ class ParamDesc(object): def __getattr__(self, attr): if attr == 'ptype': try: - ptype = eval(self.ptype_str, m5.__dict__) + ptype = eval(self.ptype_str, m5.objects.__dict__) if not isinstance(ptype, type): panic("Param qualifier is not a type: %s" % self.ptype) self.ptype = ptype @@ -772,7 +903,7 @@ class VectorParamDesc(ParamDesc): if isinstance(value, (list, tuple)): # list: coerce each element into new list tmp_list = [ ParamDesc.convert(self, v) for v in value ] - if isSimObjSequence(tmp_list): + if isSimObjectSequence(tmp_list): return SimObjVector(tmp_list) else: return VectorParamValue(tmp_list) @@ -794,7 +925,7 @@ class ParamFactory(object): # E.g., Param.Int(5, "number of widgets") def __call__(self, *args, **kwargs): - caller_frame = inspect.stack()[1][0] + caller_frame = inspect.currentframe().f_back ptype = None try: ptype = eval(self.ptype_str, @@ -1066,7 +1197,10 @@ class EthernetAddr(ParamValue): def __str__(self): if self.value == NextEthernetAddr: - return self.addr + if hasattr(self, 'addr'): + return self.addr + else: + return "NextEthernetAddr (unresolved)" else: return self.value @@ -1287,23 +1421,6 @@ AllMemory = AddrRange(0, MaxAddr) ##################################################################### -# The final hook to generate .ini files. Called from configuration -# script once config is built. -def instantiate(root): - global ticks_per_sec - ticks_per_sec = float(root.clock.frequency) - root.print_ini() - noDot = True # temporary until we fix dot - if not noDot: - dot = pydot.Dot() - instance.outputDot(dot) - dot.orientation = "portrait" - dot.size = "8.5,11" - dot.ranksep="equally" - dot.rank="samerank" - dot.write("config.dot") - dot.write_ps("config.ps") - # __all__ defines the list of symbols that get exported when # 'from config import *' is invoked. Try to keep this reasonably # short to avoid polluting other namespaces. @@ -1319,5 +1436,5 @@ __all__ = ['SimObject', 'ParamContext', 'Param', 'VectorParam', 'NetworkBandwidth', 'MemoryBandwidth', 'Range', 'AddrRange', 'MaxAddr', 'MaxTick', 'AllMemory', 'Null', 'NULL', - 'NextEthernetAddr', 'instantiate'] + 'NextEthernetAddr'] diff --git a/src/python/m5/convert.py b/src/python/m5/convert.py index 73181e985..580a579bc 100644 --- a/src/python/m5/convert.py +++ b/src/python/m5/convert.py @@ -23,6 +23,8 @@ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Authors: Nathan Binkert # metric prefixes exa = 1.0e18 diff --git a/src/python/m5/multidict.py b/src/python/m5/multidict.py index fd40ebbbd..34fc3139b 100644 --- a/src/python/m5/multidict.py +++ b/src/python/m5/multidict.py @@ -23,13 +23,15 @@ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Authors: Nathan Binkert __all__ = [ 'multidict' ] class multidict(object): __nodefault = object() def __init__(self, parent = {}, **kwargs): - self.dict = dict(**kwargs) + self.local = dict(**kwargs) self.parent = parent self.deleted = {} @@ -40,11 +42,11 @@ class multidict(object): return `dict(self.items())` def __contains__(self, key): - return self.dict.has_key(key) or self.parent.has_key(key) + return self.local.has_key(key) or self.parent.has_key(key) def __delitem__(self, key): try: - del self.dict[key] + del self.local[key] except KeyError, e: if key in self.parent: self.deleted[key] = True @@ -53,11 +55,11 @@ class multidict(object): def __setitem__(self, key, value): self.deleted.pop(key, False) - self.dict[key] = value + self.local[key] = value def __getitem__(self, key): try: - return self.dict[key] + return self.local[key] except KeyError, e: if not self.deleted.get(key, False) and key in self.parent: return self.parent[key] @@ -65,15 +67,15 @@ class multidict(object): raise KeyError, e def __len__(self): - return len(self.dict) + len(self.parent) + return len(self.local) + len(self.parent) def next(self): - for key,value in self.dict.items(): + for key,value in self.local.items(): yield key,value if self.parent: for key,value in self.parent.next(): - if key not in self.dict and key not in self.deleted: + if key not in self.local and key not in self.deleted: yield key,value def has_key(self, key): @@ -114,22 +116,22 @@ class multidict(object): return self[key] except KeyError: self.deleted.pop(key, False) - self.dict[key] = default + self.local[key] = default return default def _dump(self): print 'multidict dump' node = self while isinstance(node, multidict): - print ' ', node.dict + print ' ', node.local node = node.parent def _dumpkey(self, key): values = [] node = self while isinstance(node, multidict): - if key in node.dict: - values.append(node.dict[key]) + if key in node.local: + values.append(node.local[key]) node = node.parent print key, values diff --git a/src/python/m5/objects/AlphaConsole.py b/src/python/m5/objects/AlphaConsole.py index 68e6089ab..329b8c5bd 100644 --- a/src/python/m5/objects/AlphaConsole.py +++ b/src/python/m5/objects/AlphaConsole.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * from Device import BasicPioDevice class AlphaConsole(BasicPioDevice): diff --git a/src/python/m5/objects/AlphaFullCPU.py b/src/python/m5/objects/AlphaFullCPU.py index 48989d057..2988305d3 100644 --- a/src/python/m5/objects/AlphaFullCPU.py +++ b/src/python/m5/objects/AlphaFullCPU.py @@ -1,13 +1,15 @@ -from m5 import * +from m5 import build_env +from m5.config import * from BaseCPU import BaseCPU class DerivAlphaFullCPU(BaseCPU): type = 'DerivAlphaFullCPU' - + activity = Param.Unsigned("Initial count") numThreads = Param.Unsigned("number of HW thread contexts") - if not build_env['FULL_SYSTEM']: - mem = Param.FunctionalMemory(NULL, "memory") + checker = Param.BaseCPU(NULL, "checker") + + cachePorts = Param.Unsigned("Cache Ports") decodeToFetchDelay = Param.Unsigned("Decode to fetch delay") renameToFetchDelay = Param.Unsigned("Rename to fetch delay") @@ -41,22 +43,26 @@ class DerivAlphaFullCPU(BaseCPU): executeFloatWidth = Param.Unsigned("Floating point execute width") executeBranchWidth = Param.Unsigned("Branch execute width") executeMemoryWidth = Param.Unsigned("Memory execute width") + fuPool = Param.FUPool(NULL, "Functional Unit pool") iewToCommitDelay = Param.Unsigned("Issue/Execute/Writeback to commit " "delay") renameToROBDelay = Param.Unsigned("Rename to reorder buffer delay") commitWidth = Param.Unsigned("Commit width") squashWidth = Param.Unsigned("Squash width") + trapLatency = Param.Tick("Trap latency") + fetchTrapLatency = Param.Tick("Fetch trap latency") - local_predictor_size = Param.Unsigned("Size of local predictor") - local_ctr_bits = Param.Unsigned("Bits per counter") - local_history_table_size = Param.Unsigned("Size of local history table") - local_history_bits = Param.Unsigned("Bits for the local history") - global_predictor_size = Param.Unsigned("Size of global predictor") - global_ctr_bits = Param.Unsigned("Bits per counter") - global_history_bits = Param.Unsigned("Bits of history") - choice_predictor_size = Param.Unsigned("Size of choice predictor") - choice_ctr_bits = Param.Unsigned("Bits of choice counters") + predType = Param.String("Branch predictor type ('local', 'tournament')") + localPredictorSize = Param.Unsigned("Size of local predictor") + localCtrBits = Param.Unsigned("Bits per counter") + localHistoryTableSize = Param.Unsigned("Size of local history table") + localHistoryBits = Param.Unsigned("Bits for the local history") + globalPredictorSize = Param.Unsigned("Size of global predictor") + globalCtrBits = Param.Unsigned("Bits per counter") + globalHistoryBits = Param.Unsigned("Bits of history") + choicePredictorSize = Param.Unsigned("Size of choice predictor") + choiceCtrBits = Param.Unsigned("Bits of choice counters") BTBEntries = Param.Unsigned("Number of BTB entries") BTBTagSize = Param.Unsigned("Size of the BTB tags, in bits") @@ -68,6 +74,8 @@ class DerivAlphaFullCPU(BaseCPU): LFSTSize = Param.Unsigned("Last fetched store table size") SSITSize = Param.Unsigned("Store set ID table size") + numRobs = Param.Unsigned("Number of Reorder Buffers"); + numPhysIntRegs = Param.Unsigned("Number of physical integer registers") numPhysFloatRegs = Param.Unsigned("Number of physical floating point " "registers") @@ -78,3 +86,13 @@ class DerivAlphaFullCPU(BaseCPU): function_trace = Param.Bool(False, "Enable function trace") function_trace_start = Param.Tick(0, "Cycle to start function trace") + + smtNumFetchingThreads = Param.Unsigned("SMT Number of Fetching Threads") + smtFetchPolicy = Param.String("SMT Fetch policy") + smtLSQPolicy = Param.String("SMT LSQ Sharing Policy") + smtLSQThreshold = Param.String("SMT LSQ Threshold Sharing Parameter") + smtIQPolicy = Param.String("SMT IQ Sharing Policy") + smtIQThreshold = Param.String("SMT IQ Threshold Sharing Parameter") + smtROBPolicy = Param.String("SMT ROB Sharing Policy") + smtROBThreshold = Param.String("SMT ROB Threshold Sharing Parameter") + smtCommitPolicy = Param.String("SMT Commit Policy") diff --git a/src/python/m5/objects/AlphaTLB.py b/src/python/m5/objects/AlphaTLB.py index 5edf8e13d..11c1792ee 100644 --- a/src/python/m5/objects/AlphaTLB.py +++ b/src/python/m5/objects/AlphaTLB.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class AlphaTLB(SimObject): type = 'AlphaTLB' abstract = True diff --git a/src/python/m5/objects/BadDevice.py b/src/python/m5/objects/BadDevice.py index 9cb9a8f03..186b733fa 100644 --- a/src/python/m5/objects/BadDevice.py +++ b/src/python/m5/objects/BadDevice.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * from Device import BasicPioDevice class BadDevice(BasicPioDevice): diff --git a/src/python/m5/objects/BaseCPU.py b/src/python/m5/objects/BaseCPU.py index 49cb2a8f3..2e78578df 100644 --- a/src/python/m5/objects/BaseCPU.py +++ b/src/python/m5/objects/BaseCPU.py @@ -1,4 +1,6 @@ -from m5 import * +from m5 import build_env +from m5.config import * + class BaseCPU(SimObject): type = 'BaseCPU' abstract = True diff --git a/src/python/m5/objects/BaseCache.py b/src/python/m5/objects/BaseCache.py index 79d21572a..33f44759b 100644 --- a/src/python/m5/objects/BaseCache.py +++ b/src/python/m5/objects/BaseCache.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * from BaseMem import BaseMem class Prefetch(Enum): vals = ['none', 'tagged', 'stride', 'ghb'] diff --git a/src/python/m5/objects/Bridge.py b/src/python/m5/objects/Bridge.py index ada715ce9..880535755 100644 --- a/src/python/m5/objects/Bridge.py +++ b/src/python/m5/objects/Bridge.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * from MemObject import MemObject class Bridge(MemObject): diff --git a/src/python/m5/objects/Bus.py b/src/python/m5/objects/Bus.py index 8c5397281..c37dab438 100644 --- a/src/python/m5/objects/Bus.py +++ b/src/python/m5/objects/Bus.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * from MemObject import MemObject class Bus(MemObject): diff --git a/src/python/m5/objects/CoherenceProtocol.py b/src/python/m5/objects/CoherenceProtocol.py index 7013000d6..64b6cbacf 100644 --- a/src/python/m5/objects/CoherenceProtocol.py +++ b/src/python/m5/objects/CoherenceProtocol.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class Coherence(Enum): vals = ['uni', 'msi', 'mesi', 'mosi', 'moesi'] class CoherenceProtocol(SimObject): diff --git a/src/python/m5/objects/Device.py b/src/python/m5/objects/Device.py index 2a71bbc65..7798f5f04 100644 --- a/src/python/m5/objects/Device.py +++ b/src/python/m5/objects/Device.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * from MemObject import MemObject class PioDevice(MemObject): diff --git a/src/python/m5/objects/DiskImage.py b/src/python/m5/objects/DiskImage.py index 0d55e9329..70d8b2e45 100644 --- a/src/python/m5/objects/DiskImage.py +++ b/src/python/m5/objects/DiskImage.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class DiskImage(SimObject): type = 'DiskImage' abstract = True diff --git a/src/python/m5/objects/Ethernet.py b/src/python/m5/objects/Ethernet.py index 4286c71c8..418670592 100644 --- a/src/python/m5/objects/Ethernet.py +++ b/src/python/m5/objects/Ethernet.py @@ -1,4 +1,5 @@ -from m5 import * +from m5 import build_env +from m5.config import * from Device import DmaDevice from Pci import PciDevice diff --git a/src/python/m5/objects/FUPool.py b/src/python/m5/objects/FUPool.py new file mode 100644 index 000000000..cbf1089cf --- /dev/null +++ b/src/python/m5/objects/FUPool.py @@ -0,0 +1,5 @@ +from m5.config import * + +class FUPool(SimObject): + type = 'FUPool' + FUList = VectorParam.FUDesc("list of FU's for this pool") diff --git a/src/python/m5/objects/Ide.py b/src/python/m5/objects/Ide.py index 2403e6d36..9ee578177 100644 --- a/src/python/m5/objects/Ide.py +++ b/src/python/m5/objects/Ide.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * from Pci import PciDevice class IdeID(Enum): vals = ['master', 'slave'] diff --git a/src/python/m5/objects/IntrControl.py b/src/python/m5/objects/IntrControl.py index 66c82c182..514c3fc62 100644 --- a/src/python/m5/objects/IntrControl.py +++ b/src/python/m5/objects/IntrControl.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class IntrControl(SimObject): type = 'IntrControl' cpu = Param.BaseCPU(Parent.any, "the cpu") diff --git a/src/python/m5/objects/MemObject.py b/src/python/m5/objects/MemObject.py index 4d68243e6..d957dae17 100644 --- a/src/python/m5/objects/MemObject.py +++ b/src/python/m5/objects/MemObject.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class MemObject(SimObject): type = 'MemObject' diff --git a/src/python/m5/objects/MemTest.py b/src/python/m5/objects/MemTest.py index 34299faf0..9916d7cb4 100644 --- a/src/python/m5/objects/MemTest.py +++ b/src/python/m5/objects/MemTest.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class MemTest(SimObject): type = 'MemTest' cache = Param.BaseCache("L1 cache") diff --git a/src/python/m5/objects/OzoneCPU.py b/src/python/m5/objects/OzoneCPU.py new file mode 100644 index 000000000..f2d9aea84 --- /dev/null +++ b/src/python/m5/objects/OzoneCPU.py @@ -0,0 +1,91 @@ +from m5 import build_env +from m5.config import * +from BaseCPU import BaseCPU + +class DerivOzoneCPU(BaseCPU): + type = 'DerivOzoneCPU' + + numThreads = Param.Unsigned("number of HW thread contexts") + + if not build_env['FULL_SYSTEM']: + mem = Param.FunctionalMemory(NULL, "memory") + + checker = Param.BaseCPU("Checker CPU") + + width = Param.Unsigned("Width") + frontEndWidth = Param.Unsigned("Front end width") + backEndWidth = Param.Unsigned("Back end width") + backEndSquashLatency = Param.Unsigned("Back end squash latency") + backEndLatency = Param.Unsigned("Back end latency") + maxInstBufferSize = Param.Unsigned("Maximum instruction buffer size") + maxOutstandingMemOps = Param.Unsigned("Maximum number of outstanding memory operations") + decodeToFetchDelay = Param.Unsigned("Decode to fetch delay") + renameToFetchDelay = Param.Unsigned("Rename to fetch delay") + iewToFetchDelay = Param.Unsigned("Issue/Execute/Writeback to fetch " + "delay") + commitToFetchDelay = Param.Unsigned("Commit to fetch delay") + fetchWidth = Param.Unsigned("Fetch width") + + renameToDecodeDelay = Param.Unsigned("Rename to decode delay") + iewToDecodeDelay = Param.Unsigned("Issue/Execute/Writeback to decode " + "delay") + commitToDecodeDelay = Param.Unsigned("Commit to decode delay") + fetchToDecodeDelay = Param.Unsigned("Fetch to decode delay") + decodeWidth = Param.Unsigned("Decode width") + + iewToRenameDelay = Param.Unsigned("Issue/Execute/Writeback to rename " + "delay") + commitToRenameDelay = Param.Unsigned("Commit to rename delay") + decodeToRenameDelay = Param.Unsigned("Decode to rename delay") + renameWidth = Param.Unsigned("Rename width") + + commitToIEWDelay = Param.Unsigned("Commit to " + "Issue/Execute/Writeback delay") + renameToIEWDelay = Param.Unsigned("Rename to " + "Issue/Execute/Writeback delay") + issueToExecuteDelay = Param.Unsigned("Issue to execute delay (internal " + "to the IEW stage)") + issueWidth = Param.Unsigned("Issue width") + executeWidth = Param.Unsigned("Execute width") + executeIntWidth = Param.Unsigned("Integer execute width") + executeFloatWidth = Param.Unsigned("Floating point execute width") + executeBranchWidth = Param.Unsigned("Branch execute width") + executeMemoryWidth = Param.Unsigned("Memory execute width") + + iewToCommitDelay = Param.Unsigned("Issue/Execute/Writeback to commit " + "delay") + renameToROBDelay = Param.Unsigned("Rename to reorder buffer delay") + commitWidth = Param.Unsigned("Commit width") + squashWidth = Param.Unsigned("Squash width") + + predType = Param.String("Type of branch predictor ('local', 'tournament')") + localPredictorSize = Param.Unsigned("Size of local predictor") + localCtrBits = Param.Unsigned("Bits per counter") + localHistoryTableSize = Param.Unsigned("Size of local history table") + localHistoryBits = Param.Unsigned("Bits for the local history") + globalPredictorSize = Param.Unsigned("Size of global predictor") + globalCtrBits = Param.Unsigned("Bits per counter") + globalHistoryBits = Param.Unsigned("Bits of history") + choicePredictorSize = Param.Unsigned("Size of choice predictor") + choiceCtrBits = Param.Unsigned("Bits of choice counters") + + BTBEntries = Param.Unsigned("Number of BTB entries") + BTBTagSize = Param.Unsigned("Size of the BTB tags, in bits") + + RASSize = Param.Unsigned("RAS size") + + LQEntries = Param.Unsigned("Number of load queue entries") + SQEntries = Param.Unsigned("Number of store queue entries") + LFSTSize = Param.Unsigned("Last fetched store table size") + SSITSize = Param.Unsigned("Store set ID table size") + + numPhysIntRegs = Param.Unsigned("Number of physical integer registers") + numPhysFloatRegs = Param.Unsigned("Number of physical floating point " + "registers") + numIQEntries = Param.Unsigned("Number of instruction queue entries") + numROBEntries = Param.Unsigned("Number of reorder buffer entries") + + instShiftAmt = Param.Unsigned("Number of bits to shift instructions by") + + function_trace = Param.Bool(False, "Enable function trace") + function_trace_start = Param.Tick(0, "Cycle to start function trace") diff --git a/src/python/m5/objects/Pci.py b/src/python/m5/objects/Pci.py index 85cefcd44..9e1e91b13 100644 --- a/src/python/m5/objects/Pci.py +++ b/src/python/m5/objects/Pci.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * from Device import BasicPioDevice, DmaDevice class PciConfigData(SimObject): diff --git a/src/python/m5/objects/PhysicalMemory.py b/src/python/m5/objects/PhysicalMemory.py index c59910093..bed90d555 100644 --- a/src/python/m5/objects/PhysicalMemory.py +++ b/src/python/m5/objects/PhysicalMemory.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * from MemObject import * class PhysicalMemory(MemObject): diff --git a/src/python/m5/objects/Platform.py b/src/python/m5/objects/Platform.py index 4da0ffab4..89fee9991 100644 --- a/src/python/m5/objects/Platform.py +++ b/src/python/m5/objects/Platform.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class Platform(SimObject): type = 'Platform' abstract = True diff --git a/src/python/m5/objects/Process.py b/src/python/m5/objects/Process.py index 60b00229e..0091d8654 100644 --- a/src/python/m5/objects/Process.py +++ b/src/python/m5/objects/Process.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class Process(SimObject): type = 'Process' abstract = True diff --git a/src/python/m5/objects/Repl.py b/src/python/m5/objects/Repl.py index afd256082..8e9f1094f 100644 --- a/src/python/m5/objects/Repl.py +++ b/src/python/m5/objects/Repl.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class Repl(SimObject): type = 'Repl' abstract = True diff --git a/src/python/m5/objects/Root.py b/src/python/m5/objects/Root.py index 205a93c76..373475a7a 100644 --- a/src/python/m5/objects/Root.py +++ b/src/python/m5/objects/Root.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * from Serialize import Serialize from Statistics import Statistics from Trace import Trace diff --git a/src/python/m5/objects/SimConsole.py b/src/python/m5/objects/SimConsole.py index df3061908..9e1452c6d 100644 --- a/src/python/m5/objects/SimConsole.py +++ b/src/python/m5/objects/SimConsole.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class ConsoleListener(SimObject): type = 'ConsoleListener' port = Param.TcpPort(3456, "listen port") diff --git a/src/python/m5/objects/SimpleDisk.py b/src/python/m5/objects/SimpleDisk.py index e34155ace..44ef709af 100644 --- a/src/python/m5/objects/SimpleDisk.py +++ b/src/python/m5/objects/SimpleDisk.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * class SimpleDisk(SimObject): type = 'SimpleDisk' disk = Param.DiskImage("Disk Image") diff --git a/src/python/m5/objects/SimpleOzoneCPU.py b/src/python/m5/objects/SimpleOzoneCPU.py new file mode 100644 index 000000000..5d968cab0 --- /dev/null +++ b/src/python/m5/objects/SimpleOzoneCPU.py @@ -0,0 +1,87 @@ +from m5 import build_env +from m5.config import * +from BaseCPU import BaseCPU + +class SimpleOzoneCPU(BaseCPU): + type = 'SimpleOzoneCPU' + + numThreads = Param.Unsigned("number of HW thread contexts") + + if not build_env['FULL_SYSTEM']: + mem = Param.FunctionalMemory(NULL, "memory") + + width = Param.Unsigned("Width") + frontEndWidth = Param.Unsigned("Front end width") + backEndWidth = Param.Unsigned("Back end width") + backEndSquashLatency = Param.Unsigned("Back end squash latency") + backEndLatency = Param.Unsigned("Back end latency") + maxInstBufferSize = Param.Unsigned("Maximum instruction buffer size") + decodeToFetchDelay = Param.Unsigned("Decode to fetch delay") + renameToFetchDelay = Param.Unsigned("Rename to fetch delay") + iewToFetchDelay = Param.Unsigned("Issue/Execute/Writeback to fetch " + "delay") + commitToFetchDelay = Param.Unsigned("Commit to fetch delay") + fetchWidth = Param.Unsigned("Fetch width") + + renameToDecodeDelay = Param.Unsigned("Rename to decode delay") + iewToDecodeDelay = Param.Unsigned("Issue/Execute/Writeback to decode " + "delay") + commitToDecodeDelay = Param.Unsigned("Commit to decode delay") + fetchToDecodeDelay = Param.Unsigned("Fetch to decode delay") + decodeWidth = Param.Unsigned("Decode width") + + iewToRenameDelay = Param.Unsigned("Issue/Execute/Writeback to rename " + "delay") + commitToRenameDelay = Param.Unsigned("Commit to rename delay") + decodeToRenameDelay = Param.Unsigned("Decode to rename delay") + renameWidth = Param.Unsigned("Rename width") + + commitToIEWDelay = Param.Unsigned("Commit to " + "Issue/Execute/Writeback delay") + renameToIEWDelay = Param.Unsigned("Rename to " + "Issue/Execute/Writeback delay") + issueToExecuteDelay = Param.Unsigned("Issue to execute delay (internal " + "to the IEW stage)") + issueWidth = Param.Unsigned("Issue width") + executeWidth = Param.Unsigned("Execute width") + executeIntWidth = Param.Unsigned("Integer execute width") + executeFloatWidth = Param.Unsigned("Floating point execute width") + executeBranchWidth = Param.Unsigned("Branch execute width") + executeMemoryWidth = Param.Unsigned("Memory execute width") + + iewToCommitDelay = Param.Unsigned("Issue/Execute/Writeback to commit " + "delay") + renameToROBDelay = Param.Unsigned("Rename to reorder buffer delay") + commitWidth = Param.Unsigned("Commit width") + squashWidth = Param.Unsigned("Squash width") + + localPredictorSize = Param.Unsigned("Size of local predictor") + localCtrBits = Param.Unsigned("Bits per counter") + localHistoryTableSize = Param.Unsigned("Size of local history table") + localHistoryBits = Param.Unsigned("Bits for the local history") + globalPredictorSize = Param.Unsigned("Size of global predictor") + globalCtrBits = Param.Unsigned("Bits per counter") + globalHistoryBits = Param.Unsigned("Bits of history") + choicePredictorSize = Param.Unsigned("Size of choice predictor") + choiceCtrBits = Param.Unsigned("Bits of choice counters") + + BTBEntries = Param.Unsigned("Number of BTB entries") + BTBTagSize = Param.Unsigned("Size of the BTB tags, in bits") + + RASSize = Param.Unsigned("RAS size") + + LQEntries = Param.Unsigned("Number of load queue entries") + SQEntries = Param.Unsigned("Number of store queue entries") + LFSTSize = Param.Unsigned("Last fetched store table size") + SSITSize = Param.Unsigned("Store set ID table size") + + numPhysIntRegs = Param.Unsigned("Number of physical integer registers") + numPhysFloatRegs = Param.Unsigned("Number of physical floating point " + "registers") + numIQEntries = Param.Unsigned("Number of instruction queue entries") + numROBEntries = Param.Unsigned("Number of reorder buffer entries") + + instShiftAmt = Param.Unsigned("Number of bits to shift instructions by") + + function_trace = Param.Bool(False, "Enable function trace") + function_trace_start = Param.Tick(0, "Cycle to start function trace") diff --git a/src/python/m5/objects/System.py b/src/python/m5/objects/System.py index 622b5a870..9a1e1d690 100644 --- a/src/python/m5/objects/System.py +++ b/src/python/m5/objects/System.py @@ -1,4 +1,5 @@ -from m5 import * +from m5 import build_env +from m5.config import * class System(SimObject): type = 'System' @@ -7,8 +8,6 @@ class System(SimObject): boot_cpu_frequency = Param.Frequency(Self.cpu[0].clock.frequency, "boot processor frequency") init_param = Param.UInt64(0, "numerical value to pass into simulator") - bin = Param.Bool(False, "is this system binned") - binned_fns = VectorParam.String([], "functions broken down and binned") boot_osflags = Param.String("a", "boot flags to pass to the kernel") kernel = Param.String("file that contains the kernel code") readfile = Param.String("", "file to read startup script from") diff --git a/src/python/m5/objects/Tsunami.py b/src/python/m5/objects/Tsunami.py index 27ea0bce8..4613571d8 100644 --- a/src/python/m5/objects/Tsunami.py +++ b/src/python/m5/objects/Tsunami.py @@ -1,4 +1,4 @@ -from m5 import * +from m5.config import * from Device import BasicPioDevice from Platform import Platform diff --git a/src/python/m5/objects/Uart.py b/src/python/m5/objects/Uart.py index 54754aeb9..8e1fd1a37 100644 --- a/src/python/m5/objects/Uart.py +++ b/src/python/m5/objects/Uart.py @@ -1,4 +1,5 @@ -from m5 import * +from m5 import build_env +from m5.config import * from Device import BasicPioDevice class Uart(BasicPioDevice): diff --git a/src/python/m5/smartdict.py b/src/python/m5/smartdict.py index cd38d7326..d85dbd517 100644 --- a/src/python/m5/smartdict.py +++ b/src/python/m5/smartdict.py @@ -23,6 +23,8 @@ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Authors: Nathan Binkert # The SmartDict class fixes a couple of issues with using the content # of os.environ or similar dicts of strings as Python variables: |