diff options
author | Brad Beckmann <Brad.Beckmann@amd.com> | 2010-08-20 11:46:12 -0700 |
---|---|---|
committer | Brad Beckmann <Brad.Beckmann@amd.com> | 2010-08-20 11:46:12 -0700 |
commit | 54d76f0ce5d721ad3b4de168db98054844e634cc (patch) | |
tree | 19b74bf031e5aa9ecae18b7a1a0d36b5e0fc466c | |
parent | a3b4b9b3e3f8a1462b34d758199312d33af4b0c7 (diff) | |
download | gem5-54d76f0ce5d721ad3b4de168db98054844e634cc.tar.xz |
ruby: Fixed L2 cache miss profiling
Fixed L2 cache miss profiling for the MOESI_CMP_token protocol
-rw-r--r-- | src/mem/protocol/MOESI_CMP_token-L1cache.sm | 6 | ||||
-rw-r--r-- | src/mem/protocol/MOESI_CMP_token-L2cache.sm | 38 | ||||
-rw-r--r-- | src/mem/protocol/MOESI_CMP_token-msg.sm | 11 | ||||
-rw-r--r-- | src/mem/protocol/RubySlicc_Profiler.sm | 1 | ||||
-rw-r--r-- | src/mem/protocol/RubySlicc_Types.sm | 5 | ||||
-rw-r--r-- | src/mem/ruby/profiler/CacheProfiler.cc | 56 | ||||
-rw-r--r-- | src/mem/ruby/profiler/CacheProfiler.hh | 15 | ||||
-rw-r--r-- | src/mem/ruby/slicc_interface/RubySlicc_Profiler_interface.hh | 2 | ||||
-rw-r--r-- | src/mem/ruby/system/CacheMemory.cc | 15 | ||||
-rw-r--r-- | src/mem/ruby/system/CacheMemory.hh | 5 |
10 files changed, 107 insertions, 47 deletions
diff --git a/src/mem/protocol/MOESI_CMP_token-L1cache.sm b/src/mem/protocol/MOESI_CMP_token-L1cache.sm index 381debce5..4af3338d8 100644 --- a/src/mem/protocol/MOESI_CMP_token-L1cache.sm +++ b/src/mem/protocol/MOESI_CMP_token-L1cache.sm @@ -1357,7 +1357,11 @@ machine(L1Cache, "Token protocol") action(uu_profileMiss, "\u", desc="Profile the demand miss") { peek(mandatoryQueue_in, CacheMsg) { - // profile_miss(in_msg, id); + if (L1DcacheMemory.isTagPresent(address)) { + L1DcacheMemory.profileMiss(in_msg); + } else { + L1IcacheMemory.profileMiss(in_msg); + } } } diff --git a/src/mem/protocol/MOESI_CMP_token-L2cache.sm b/src/mem/protocol/MOESI_CMP_token-L2cache.sm index 706b286fa..fcc91f223 100644 --- a/src/mem/protocol/MOESI_CMP_token-L2cache.sm +++ b/src/mem/protocol/MOESI_CMP_token-L2cache.sm @@ -304,6 +304,17 @@ machine(L2Cache, "Token protocol") } } + GenericRequestType convertToGenericType(CoherenceRequestType type) { + if(type == CoherenceRequestType:GETS) { + return GenericRequestType:GETS; + } else if(type == CoherenceRequestType:GETX) { + return GenericRequestType:GETX; + } else { + DEBUG_EXPR(type); + error("invalid CoherenceRequestType"); + } + } + // ** OUT_PORTS ** out_port(globalRequestNetwork_out, RequestMsg, GlobalRequestFromL2Cache); out_port(localRequestNetwork_out, RequestMsg, L1RequestFromL2Cache); @@ -918,12 +929,13 @@ machine(L2Cache, "Token protocol") L2cacheMemory.deallocate(address); } - //action(uu_profileMiss, "\u", desc="Profile the demand miss") { - // peek(L1requestNetwork_in, RequestMsg) { - // AccessModeType not implemented - //profile_L2Cache_miss(convertToGenericType(in_msg.Type), in_msg.AccessMode, MessageSizeTypeToInt(in_msg.MessageSize), in_msg.Prefetch, machineIDToNodeID(in_msg.Requestor)); - // } - //} + action(uu_profileMiss, "\u", desc="Profile the demand miss") { + peek(L1requestNetwork_in, RequestMsg) { + L2cacheMemory.profileGenericRequest(convertToGenericType(in_msg.Type), + in_msg.AccessMode, + in_msg.Prefetch); + } + } action(w_assertIncomingDataAndCacheDataMatch, "w", desc="Assert that the incoming data and the data in the cache match") { @@ -961,7 +973,7 @@ machine(L2Cache, "Token protocol") transition(NP, {L1_GETS, L1_GETX}) { a_broadcastLocalRequest; r_markNewSharer; - //uu_profileMiss; + uu_profileMiss; o_popL1RequestQueue; } @@ -1012,7 +1024,7 @@ machine(L2Cache, "Token protocol") a_broadcastLocalRequest; tt_sendLocalAckWithCollectedTokens; // send any tokens we have collected r_markNewSharer; - //uu_profileMiss; + uu_profileMiss; o_popL1RequestQueue; } @@ -1020,7 +1032,7 @@ machine(L2Cache, "Token protocol") a_broadcastLocalRequest; tt_sendLocalAckWithCollectedTokens; // send any tokens we have collected r_markNewSharer; - //uu_profileMiss; + uu_profileMiss; o_popL1RequestQueue; } @@ -1181,7 +1193,7 @@ machine(L2Cache, "Token protocol") tt_sendLocalAckWithCollectedTokens; r_markNewSharer; r_setMRU; - //uu_profileMiss; + uu_profileMiss; o_popL1RequestQueue; } @@ -1294,7 +1306,7 @@ machine(L2Cache, "Token protocol") k_dataAndAllTokensFromL2CacheToL1Requestor; r_markNewSharer; r_setMRU; - //uu_profileMiss; + uu_profileMiss; o_popL1RequestQueue; } @@ -1382,7 +1394,7 @@ machine(L2Cache, "Token protocol") transition(I_L, {L1_GETX, L1_GETS}) { a_broadcastLocalRequest; r_markNewSharer; - //uu_profileMiss; + uu_profileMiss; o_popL1RequestQueue; } @@ -1391,7 +1403,7 @@ machine(L2Cache, "Token protocol") tt_sendLocalAckWithCollectedTokens; r_markNewSharer; r_setMRU; - //uu_profileMiss; + uu_profileMiss; o_popL1RequestQueue; } diff --git a/src/mem/protocol/MOESI_CMP_token-msg.sm b/src/mem/protocol/MOESI_CMP_token-msg.sm index 40c16b5e1..5bc0b81ab 100644 --- a/src/mem/protocol/MOESI_CMP_token-msg.sm +++ b/src/mem/protocol/MOESI_CMP_token-msg.sm @@ -144,14 +144,3 @@ structure(DMAResponseMsg, desc="...", interface="NetworkMessage") { DataBlock DataBlk, desc="DataBlk attached to this request"; MessageSizeType MessageSize, desc="size category of the message"; } - -//GenericRequestType convertToGenericType(CoherenceRequestType type) { -// if(type == CoherenceRequestType:GETS) { -// return GenericRequestType:GETS; -// } else if(type == CoherenceRequestType:GETX) { -// return GenericRequestType:GETX; -// } else { -// DEBUG_EXPR(type); -// error("invalid CoherenceRequestType"); -// } -//} diff --git a/src/mem/protocol/RubySlicc_Profiler.sm b/src/mem/protocol/RubySlicc_Profiler.sm index ce1183e22..ed6b10d8e 100644 --- a/src/mem/protocol/RubySlicc_Profiler.sm +++ b/src/mem/protocol/RubySlicc_Profiler.sm @@ -40,7 +40,6 @@ void profile_miss(CacheMsg msg); void profile_L1Cache_miss(CacheMsg msg, NodeID l1cacheID); // used by CMP protocols -void profile_L2Cache_miss(GenericRequestType requestType, AccessModeType type, int msgSize, PrefetchBit pfBit, NodeID l2cacheID); void profile_request(std::string L1CacheStateStr, std::string L2CacheStateStr, std::string directoryStateStr, std::string requestTypeStr); void profileMessageReordering(bool wasReordered); diff --git a/src/mem/protocol/RubySlicc_Types.sm b/src/mem/protocol/RubySlicc_Types.sm index 7fc817600..8dcdabeb8 100644 --- a/src/mem/protocol/RubySlicc_Types.sm +++ b/src/mem/protocol/RubySlicc_Types.sm @@ -126,6 +126,11 @@ external_type(CacheMemory) { void changePermission(Address, AccessPermission); bool isTagPresent(Address); void profileMiss(CacheMsg); + + void profileGenericRequest(GenericRequestType, + AccessModeType, + PrefetchBit); + void setMRU(Address); } diff --git a/src/mem/ruby/profiler/CacheProfiler.cc b/src/mem/ruby/profiler/CacheProfiler.cc index 006617190..a969b9074 100644 --- a/src/mem/ruby/profiler/CacheProfiler.cc +++ b/src/mem/ruby/profiler/CacheProfiler.cc @@ -33,7 +33,7 @@ using namespace std; CacheProfiler::CacheProfiler(const string& description) - : m_requestTypeVec(int(CacheRequestType_NUM)) + : m_cacheRequestType(int(CacheRequestType_NUM)), m_genericRequestType(int(GenericRequestType_NUM)) { m_description = description; @@ -60,18 +60,33 @@ CacheProfiler::printStats(ostream& out) const int requests = 0; for (int i = 0; i < int(CacheRequestType_NUM); i++) { - requests += m_requestTypeVec[i]; + requests += m_cacheRequestType[i]; + } + + for (int i = 0; i < int(GenericRequestType_NUM); i++) { + requests += m_genericRequestType[i]; } assert(m_misses == requests); if (requests > 0) { for (int i = 0; i < int(CacheRequestType_NUM); i++) { - if (m_requestTypeVec[i] > 0) { + if (m_cacheRequestType[i] > 0) { out << description << "_request_type_" << CacheRequestType_to_string(CacheRequestType(i)) << ": " - << 100.0 * (double)m_requestTypeVec[i] / + << 100.0 * (double)m_cacheRequestType[i] / + (double)requests + << "%" << endl; + } + } + + for (int i = 0; i < int(GenericRequestType_NUM); i++) { + if (m_genericRequestType[i] > 0) { + out << description << "_request_type_" + << GenericRequestType_to_string(GenericRequestType(i)) + << ": " + << 100.0 * (double)m_genericRequestType[i] / (double)requests << "%" << endl; } @@ -90,7 +105,6 @@ CacheProfiler::printStats(ostream& out) const } } - out << description << "_request_size: " << m_requestSize << endl; out << endl; } @@ -98,9 +112,11 @@ void CacheProfiler::clearStats() { for (int i = 0; i < int(CacheRequestType_NUM); i++) { - m_requestTypeVec[i] = 0; + m_cacheRequestType[i] = 0; + } + for (int i = 0; i < int(GenericRequestType_NUM); i++) { + m_genericRequestType[i] = 0; } - m_requestSize.clear(); m_misses = 0; m_demand_misses = 0; m_prefetches = 0; @@ -112,16 +128,30 @@ CacheProfiler::clearStats() } void -CacheProfiler::addStatSample(CacheRequestType requestType, - AccessModeType type, int msgSize, +CacheProfiler::addCacheStatSample(CacheRequestType requestType, + AccessModeType accessType, + PrefetchBit pfBit) +{ + m_cacheRequestType[requestType]++; + addStatSample(accessType, pfBit); +} + +void +CacheProfiler::addGenericStatSample(GenericRequestType requestType, + AccessModeType accessType, + PrefetchBit pfBit) +{ + m_genericRequestType[requestType]++; + addStatSample(accessType, pfBit); +} + +void +CacheProfiler::addStatSample(AccessModeType accessType, PrefetchBit pfBit) { m_misses++; - m_requestTypeVec[requestType]++; - - m_accessModeTypeHistogram[type]++; - m_requestSize.add(msgSize); + m_accessModeTypeHistogram[accessType]++; if (pfBit == PrefetchBit_No) { m_demand_misses++; } else if (pfBit == PrefetchBit_Yes) { diff --git a/src/mem/ruby/profiler/CacheProfiler.hh b/src/mem/ruby/profiler/CacheProfiler.hh index fad60711b..2e59c9d82 100644 --- a/src/mem/ruby/profiler/CacheProfiler.hh +++ b/src/mem/ruby/profiler/CacheProfiler.hh @@ -35,6 +35,7 @@ #include "mem/protocol/AccessModeType.hh" #include "mem/protocol/CacheRequestType.hh" +#include "mem/protocol/GenericRequestType.hh" #include "mem/protocol/PrefetchBit.hh" #include "mem/ruby/common/Global.hh" #include "mem/ruby/common/Histogram.hh" @@ -49,8 +50,13 @@ class CacheProfiler void printStats(std::ostream& out) const; void clearStats(); - void addStatSample(CacheRequestType requestType, AccessModeType type, - int msgSize, PrefetchBit pfBit); + void addCacheStatSample(CacheRequestType requestType, + AccessModeType type, + PrefetchBit pfBit); + + void addGenericStatSample(GenericRequestType requestType, + AccessModeType type, + PrefetchBit pfBit); void print(std::ostream& out) const; @@ -58,9 +64,9 @@ class CacheProfiler // Private copy constructor and assignment operator CacheProfiler(const CacheProfiler& obj); CacheProfiler& operator=(const CacheProfiler& obj); + void addStatSample(AccessModeType type, PrefetchBit pfBit); std::string m_description; - Histogram m_requestSize; int64 m_misses; int64 m_demand_misses; int64 m_prefetches; @@ -68,7 +74,8 @@ class CacheProfiler int64 m_hw_prefetches; int64 m_accessModeTypeHistogram[AccessModeType_NUM]; - std::vector<int> m_requestTypeVec; + std::vector<int> m_cacheRequestType; + std::vector<int> m_genericRequestType; }; inline std::ostream& diff --git a/src/mem/ruby/slicc_interface/RubySlicc_Profiler_interface.hh b/src/mem/ruby/slicc_interface/RubySlicc_Profiler_interface.hh index 10d1c0e5b..cabba286a 100644 --- a/src/mem/ruby/slicc_interface/RubySlicc_Profiler_interface.hh +++ b/src/mem/ruby/slicc_interface/RubySlicc_Profiler_interface.hh @@ -58,8 +58,6 @@ void profile_request(const std::string& L1CacheStateStr, const std::string& requestTypeStr); void profile_miss(const CacheMsg& msg, NodeID id); void profile_L1Cache_miss(const CacheMsg& msg, NodeID id); -void profile_L2Cache_miss(GenericRequestType requestType, AccessModeType type, - int msgSize, PrefetchBit pfBit, NodeID l2cacheID); void profile_token_retry(const Address& addr, AccessType type, int count); void profile_filter_action(int action); void profile_persistent_prediction(const Address& addr, AccessType type); diff --git a/src/mem/ruby/system/CacheMemory.cc b/src/mem/ruby/system/CacheMemory.cc index c9de85961..9102d1963 100644 --- a/src/mem/ruby/system/CacheMemory.cc +++ b/src/mem/ruby/system/CacheMemory.cc @@ -375,8 +375,19 @@ CacheMemory::setMRU(const Address& address) void CacheMemory::profileMiss(const CacheMsg& msg) { - m_profiler_ptr->addStatSample(msg.getType(), msg.getAccessMode(), - msg.getSize(), msg.getPrefetch()); + m_profiler_ptr->addCacheStatSample(msg.getType(), + msg.getAccessMode(), + msg.getPrefetch()); +} + +void +CacheMemory::profileGenericRequest(GenericRequestType requestType, + AccessModeType accessType, + PrefetchBit pfBit) +{ + m_profiler_ptr->addGenericStatSample(requestType, + accessType, + pfBit); } void diff --git a/src/mem/ruby/system/CacheMemory.hh b/src/mem/ruby/system/CacheMemory.hh index f004b8310..c1d49f784 100644 --- a/src/mem/ruby/system/CacheMemory.hh +++ b/src/mem/ruby/system/CacheMemory.hh @@ -37,6 +37,7 @@ #include "mem/protocol/AccessPermission.hh" #include "mem/protocol/CacheMsg.hh" #include "mem/protocol/CacheRequestType.hh" +#include "mem/protocol/GenericRequestType.hh" #include "mem/protocol/MachineType.hh" #include "mem/ruby/common/Address.hh" #include "mem/ruby/common/DataBlock.hh" @@ -112,6 +113,10 @@ class CacheMemory : public SimObject void profileMiss(const CacheMsg & msg); + void profileGenericRequest(GenericRequestType requestType, + AccessModeType accessType, + PrefetchBit pfBit); + void getMemoryValue(const Address& addr, char* value, unsigned int size_in_bytes); void setMemoryValue(const Address& addr, char* value, |