summaryrefslogtreecommitdiff
path: root/src/mem/ruby/structures
diff options
context:
space:
mode:
authorJoel Hestness <jthestness@gmail.com>2015-08-14 00:19:37 -0500
committerJoel Hestness <jthestness@gmail.com>2015-08-14 00:19:37 -0500
commit9567c839fecfdb29a59f9da50cf706fcb22a2bb1 (patch)
tree395d523bf8b5ffa038f2be1c9f60923b91d26bfa /src/mem/ruby/structures
parentc58bee829f88e7360b631771852efa097388a5da (diff)
downloadgem5-9567c839fecfdb29a59f9da50cf706fcb22a2bb1.tar.xz
ruby: Remove the RubyCache/CacheMemory latency
The RubyCache (CacheMemory) latency parameter is only used for top-level caches instantiated for Ruby coherence protocols. However, the top-level cache hit latency is assessed by the Sequencer as accesses flow through to the cache hierarchy. Further, protocol state machines should be enforcing these cache hit latencies, but RubyCaches do not expose their latency to any existng state machines through the SLICC/C++ interface. Thus, the RubyCache latency parameter is superfluous for all caches. This is confusing for users. As a step toward pushing L0/L1 cache hit latency into the top-level cache controllers, move their latencies out of the RubyCache declarations and over to their Sequencers. Eventually, these Sequencer parameters should be exposed as parameters to the top-level cache controllers, which should assess the latency. NOTE: Assessing these latencies in the cache controllers will require modifying each to eliminate instantaneous Ruby hit callbacks in transitions that finish accesses, which is likely a large undertaking.
Diffstat (limited to 'src/mem/ruby/structures')
-rw-r--r--src/mem/ruby/structures/Cache.py1
-rw-r--r--src/mem/ruby/structures/CacheMemory.cc1
-rw-r--r--src/mem/ruby/structures/CacheMemory.hh3
3 files changed, 0 insertions, 5 deletions
diff --git a/src/mem/ruby/structures/Cache.py b/src/mem/ruby/structures/Cache.py
index 7f26e659f..4eb87ac74 100644
--- a/src/mem/ruby/structures/Cache.py
+++ b/src/mem/ruby/structures/Cache.py
@@ -37,7 +37,6 @@ class RubyCache(SimObject):
cxx_class = 'CacheMemory'
cxx_header = "mem/ruby/structures/CacheMemory.hh"
size = Param.MemorySize("capacity in bytes");
- latency = Param.Cycles("");
assoc = Param.Int("");
replacement_policy = Param.ReplacementPolicy(PseudoLRUReplacementPolicy(),
"")
diff --git a/src/mem/ruby/structures/CacheMemory.cc b/src/mem/ruby/structures/CacheMemory.cc
index e444ae09c..64a8e9e8a 100644
--- a/src/mem/ruby/structures/CacheMemory.cc
+++ b/src/mem/ruby/structures/CacheMemory.cc
@@ -60,7 +60,6 @@ CacheMemory::CacheMemory(const Params *p)
p->start_index_bit, p->ruby_system)
{
m_cache_size = p->size;
- m_latency = p->latency;
m_cache_assoc = p->assoc;
m_replacementPolicy_ptr = p->replacement_policy;
m_replacementPolicy_ptr->setCache(this);
diff --git a/src/mem/ruby/structures/CacheMemory.hh b/src/mem/ruby/structures/CacheMemory.hh
index 57f2885b6..792d8fd93 100644
--- a/src/mem/ruby/structures/CacheMemory.hh
+++ b/src/mem/ruby/structures/CacheMemory.hh
@@ -96,7 +96,6 @@ class CacheMemory : public SimObject
AbstractCacheEntry* lookup(const Address& address);
const AbstractCacheEntry* lookup(const Address& address) const;
- Cycles getLatency() const { return m_latency; }
Cycles getTagLatency() const { return tagArray.getLatency(); }
Cycles getDataLatency() const { return dataArray.getLatency(); }
@@ -159,8 +158,6 @@ class CacheMemory : public SimObject
CacheMemory& operator=(const CacheMemory& obj);
private:
- Cycles m_latency;
-
// Data Members (m_prefix)
bool m_is_instruction_only_cache;