diff options
Diffstat (limited to 'configs')
-rw-r--r-- | configs/common/Caches.py | 3 | ||||
-rw-r--r-- | configs/common/O3_ARM_v7a.py | 3 | ||||
-rw-r--r-- | configs/example/memcheck.py | 3 | ||||
-rw-r--r-- | configs/example/memtest.py | 3 |
4 files changed, 2 insertions, 10 deletions
diff --git a/configs/common/Caches.py b/configs/common/Caches.py index 2bdffc6c7..cfb873b5e 100644 --- a/configs/common/Caches.py +++ b/configs/common/Caches.py @@ -52,7 +52,6 @@ class L1Cache(BaseCache): response_latency = 2 mshrs = 4 tgts_per_mshr = 20 - is_top_level = True class L1_ICache(L1Cache): is_read_only = True @@ -76,7 +75,6 @@ class IOCache(BaseCache): size = '1kB' tgts_per_mshr = 12 forward_snoops = False - is_top_level = True class PageTableWalkerCache(BaseCache): assoc = 2 @@ -86,7 +84,6 @@ class PageTableWalkerCache(BaseCache): size = '1kB' tgts_per_mshr = 12 forward_snoops = False - is_top_level = True # the x86 table walker actually writes to the table-walker cache if buildEnv['TARGET_ISA'] == 'x86': is_read_only = False diff --git a/configs/common/O3_ARM_v7a.py b/configs/common/O3_ARM_v7a.py index b4b66df9c..dbfdf6c41 100644 --- a/configs/common/O3_ARM_v7a.py +++ b/configs/common/O3_ARM_v7a.py @@ -149,7 +149,6 @@ class O3_ARM_v7a_ICache(BaseCache): tgts_per_mshr = 8 size = '32kB' assoc = 2 - is_top_level = True forward_snoops = False is_read_only = True @@ -162,7 +161,6 @@ class O3_ARM_v7a_DCache(BaseCache): size = '32kB' assoc = 2 write_buffers = 16 - is_top_level = True # TLB Cache # Use a cache as a L2 TLB @@ -174,7 +172,6 @@ class O3_ARM_v7aWalkCache(BaseCache): size = '1kB' assoc = 8 write_buffers = 16 - is_top_level = True forward_snoops = False is_read_only = True diff --git a/configs/example/memcheck.py b/configs/example/memcheck.py index f0bc26e32..ca2659ed0 100644 --- a/configs/example/memcheck.py +++ b/configs/example/memcheck.py @@ -154,7 +154,7 @@ for t, m in zip(testerspec, multiplier): # Define a prototype L1 cache that we scale for all successive levels proto_l1 = BaseCache(size = '32kB', assoc = 4, hit_latency = 1, response_latency = 1, - tgts_per_mshr = 8, is_top_level = True) + tgts_per_mshr = 8) if options.blocking: proto_l1.mshrs = 1 @@ -179,7 +179,6 @@ for scale in cachespec[:-1]: next.response_latency = prev.response_latency * 10 next.assoc = prev.assoc * scale next.mshrs = prev.mshrs * scale - next.is_top_level = False cache_proto.insert(0, next) # Create a config to be used by all the traffic generators diff --git a/configs/example/memtest.py b/configs/example/memtest.py index 0df9f766f..a51bd2796 100644 --- a/configs/example/memtest.py +++ b/configs/example/memtest.py @@ -177,7 +177,7 @@ else: # Define a prototype L1 cache that we scale for all successive levels proto_l1 = BaseCache(size = '32kB', assoc = 4, hit_latency = 1, response_latency = 1, - tgts_per_mshr = 8, is_top_level = True) + tgts_per_mshr = 8) if options.blocking: proto_l1.mshrs = 1 @@ -197,7 +197,6 @@ for scale in cachespec[:-1]: next.response_latency = prev.response_latency * 10 next.assoc = prev.assoc * scale next.mshrs = prev.mshrs * scale - next.is_top_level = False cache_proto.insert(0, next) # Make a prototype for the tester to be used throughout |