summaryrefslogtreecommitdiff
path: root/src/mem
diff options
context:
space:
mode:
authorGabe Black <gblack@eecs.umich.edu>2009-04-19 04:25:01 -0700
committerGabe Black <gblack@eecs.umich.edu>2009-04-19 04:25:01 -0700
commit3e5f4876630169e92b3ad736d747bcba1b79c062 (patch)
treecc6f7aa2f13331839567c1b5844ea2d8412df163 /src/mem
parentca8598147835cc3bf4cb6125b4f32cbd941f1ae7 (diff)
downloadgem5-3e5f4876630169e92b3ad736d747bcba1b79c062.tar.xz
Memory: Rename LOCKED for load locked store conditional to LLSC.
Diffstat (limited to 'src/mem')
-rw-r--r--src/mem/cache/blk.hh4
-rw-r--r--src/mem/cache/cache_impl.hh4
-rw-r--r--src/mem/packet.cc6
-rw-r--r--src/mem/packet.hh6
-rw-r--r--src/mem/physical.cc12
-rw-r--r--src/mem/physical.hh6
-rw-r--r--src/mem/request.hh4
7 files changed, 21 insertions, 21 deletions
diff --git a/src/mem/cache/blk.hh b/src/mem/cache/blk.hh
index fe65672d6..acb117f6c 100644
--- a/src/mem/cache/blk.hh
+++ b/src/mem/cache/blk.hh
@@ -218,7 +218,7 @@ class CacheBlk
*/
void trackLoadLocked(PacketPtr pkt)
{
- assert(pkt->isLocked());
+ assert(pkt->isLlsc());
lockList.push_front(Lock(pkt->req));
}
@@ -236,7 +236,7 @@ class CacheBlk
bool checkWrite(PacketPtr pkt)
{
Request *req = pkt->req;
- if (pkt->isLocked()) {
+ if (pkt->isLlsc()) {
// it's a store conditional... have to check for matching
// load locked.
bool success = false;
diff --git a/src/mem/cache/cache_impl.hh b/src/mem/cache/cache_impl.hh
index a78fd3637..f98d6ac34 100644
--- a/src/mem/cache/cache_impl.hh
+++ b/src/mem/cache/cache_impl.hh
@@ -180,7 +180,7 @@ Cache<TagStore>::satisfyCpuSideRequest(PacketPtr pkt, BlkType *blk)
pkt->writeDataToBlock(blk->data, blkSize);
}
} else if (pkt->isRead()) {
- if (pkt->isLocked()) {
+ if (pkt->isLlsc()) {
blk->trackLoadLocked(pkt);
}
pkt->setDataFromBlock(blk->data, blkSize);
@@ -317,7 +317,7 @@ Cache<TagStore>::access(PacketPtr pkt, BlkType *&blk,
incMissCount(pkt);
- if (blk == NULL && pkt->isLocked() && pkt->isWrite()) {
+ if (blk == NULL && pkt->isLlsc() && pkt->isWrite()) {
// complete miss on store conditional... just give up now
pkt->req->setExtraData(0);
return true;
diff --git a/src/mem/packet.cc b/src/mem/packet.cc
index 38b8879e5..2f84e4414 100644
--- a/src/mem/packet.cc
+++ b/src/mem/packet.cc
@@ -105,14 +105,14 @@ MemCmd::commandInfo[] =
InvalidCmd, "ReadExResp" },
/* LoadLockedReq: note that we use plain ReadResp as response, so that
* we can also use ReadRespWithInvalidate when needed */
- { SET4(IsRead, IsLocked, IsRequest, NeedsResponse),
+ { SET4(IsRead, IsLlsc, IsRequest, NeedsResponse),
ReadResp, "LoadLockedReq" },
/* StoreCondReq */
- { SET6(IsWrite, NeedsExclusive, IsLocked,
+ { SET6(IsWrite, NeedsExclusive, IsLlsc,
IsRequest, NeedsResponse, HasData),
StoreCondResp, "StoreCondReq" },
/* StoreCondResp */
- { SET4(IsWrite, NeedsExclusive, IsLocked, IsResponse),
+ { SET4(IsWrite, NeedsExclusive, IsLlsc, IsResponse),
InvalidCmd, "StoreCondResp" },
/* SwapReq -- for Swap ldstub type operations */
{ SET6(IsRead, IsWrite, NeedsExclusive, IsRequest, HasData, NeedsResponse),
diff --git a/src/mem/packet.hh b/src/mem/packet.hh
index 41f599fa0..965482c02 100644
--- a/src/mem/packet.hh
+++ b/src/mem/packet.hh
@@ -120,7 +120,7 @@ class MemCmd
NeedsResponse, //!< Requester needs response from target
IsSWPrefetch,
IsHWPrefetch,
- IsLocked, //!< Alpha/MIPS LL or SC access
+ IsLlsc, //!< Alpha/MIPS LL or SC access
HasData, //!< There is an associated payload
IsError, //!< Error response
IsPrint, //!< Print state matching address (for debugging)
@@ -166,7 +166,7 @@ class MemCmd
bool isInvalidate() const { return testCmdAttrib(IsInvalidate); }
bool hasData() const { return testCmdAttrib(HasData); }
bool isReadWrite() const { return isRead() && isWrite(); }
- bool isLocked() const { return testCmdAttrib(IsLocked); }
+ bool isLlsc() const { return testCmdAttrib(IsLlsc); }
bool isError() const { return testCmdAttrib(IsError); }
bool isPrint() const { return testCmdAttrib(IsPrint); }
@@ -401,7 +401,7 @@ class Packet : public FastAlloc, public Printable
bool isInvalidate() const { return cmd.isInvalidate(); }
bool hasData() const { return cmd.hasData(); }
bool isReadWrite() const { return cmd.isReadWrite(); }
- bool isLocked() const { return cmd.isLocked(); }
+ bool isLlsc() const { return cmd.isLlsc(); }
bool isError() const { return cmd.isError(); }
bool isPrint() const { return cmd.isPrint(); }
diff --git a/src/mem/physical.cc b/src/mem/physical.cc
index 16ff3de6d..86ecb506f 100644
--- a/src/mem/physical.cc
+++ b/src/mem/physical.cc
@@ -125,7 +125,7 @@ PhysicalMemory::calculateLatency(PacketPtr pkt)
// Add load-locked to tracking list. Should only be called if the
-// operation is a load and the LOCKED flag is set.
+// operation is a load and the LLSC flag is set.
void
PhysicalMemory::trackLoadLocked(PacketPtr pkt)
{
@@ -162,12 +162,12 @@ PhysicalMemory::checkLockedAddrList(PacketPtr pkt)
{
Request *req = pkt->req;
Addr paddr = LockedAddr::mask(req->getPaddr());
- bool isLocked = pkt->isLocked();
+ bool isLlsc = pkt->isLlsc();
// Initialize return value. Non-conditional stores always
// succeed. Assume conditional stores will fail until proven
// otherwise.
- bool success = !isLocked;
+ bool success = !isLlsc;
// Iterate over list. Note that there could be multiple matching
// records, as more than one context could have done a load locked
@@ -179,7 +179,7 @@ PhysicalMemory::checkLockedAddrList(PacketPtr pkt)
if (i->addr == paddr) {
// we have a matching address
- if (isLocked && i->matchesContext(req)) {
+ if (isLlsc && i->matchesContext(req)) {
// it's a store conditional, and as far as the memory
// system can tell, the requesting context's lock is
// still valid.
@@ -199,7 +199,7 @@ PhysicalMemory::checkLockedAddrList(PacketPtr pkt)
}
}
- if (isLocked) {
+ if (isLlsc) {
req->setExtraData(success ? 1 : 0);
}
@@ -284,7 +284,7 @@ PhysicalMemory::doAtomicAccess(PacketPtr pkt)
TRACE_PACKET("Read/Write");
} else if (pkt->isRead()) {
assert(!pkt->isWrite());
- if (pkt->isLocked()) {
+ if (pkt->isLlsc()) {
trackLoadLocked(pkt);
}
if (pmemAddr)
diff --git a/src/mem/physical.hh b/src/mem/physical.hh
index d18138ecd..2a3bea7a5 100644
--- a/src/mem/physical.hh
+++ b/src/mem/physical.hh
@@ -129,11 +129,11 @@ class PhysicalMemory : public MemObject
Request *req = pkt->req;
if (lockedAddrList.empty()) {
// no locked addrs: nothing to check, store_conditional fails
- bool isLocked = pkt->isLocked();
- if (isLocked) {
+ bool isLlsc = pkt->isLlsc();
+ if (isLlsc) {
req->setExtraData(0);
}
- return !isLocked; // only do write if not an sc
+ return !isLlsc; // only do write if not an sc
} else {
// iterate over list...
return checkLockedAddrList(pkt);
diff --git a/src/mem/request.hh b/src/mem/request.hh
index ee62ce771..9e8208260 100644
--- a/src/mem/request.hh
+++ b/src/mem/request.hh
@@ -62,7 +62,7 @@ class Request : public FastAlloc
/** ASI information for this request if it exists. */
static const FlagsType ASI_BITS = 0x000000FF;
/** The request is a Load locked/store conditional. */
- static const FlagsType LOCKED = 0x00000100;
+ static const FlagsType LLSC = 0x00000100;
/** The virtual address is also the physical address. */
static const FlagsType PHYSICAL = 0x00000200;
/** The request is an ALPHA VPTE pal access (hw_ld). */
@@ -448,7 +448,7 @@ class Request : public FastAlloc
/** Accessor Function to Check Cacheability. */
bool isUncacheable() const { return flags.isSet(UNCACHEABLE); }
bool isInstRead() const { return flags.isSet(INST_READ); }
- bool isLocked() const { return flags.isSet(LOCKED); }
+ bool isLlsc() const { return flags.isSet(LLSC); }
bool isSwap() const { return flags.isSet(MEM_SWAP|MEM_SWAP_COND); }
bool isCondSwap() const { return flags.isSet(MEM_SWAP_COND); }