diff options
author | Ali Saidi <Ali.Saidi@ARM.com> | 2014-01-24 15:29:33 -0600 |
---|---|---|
committer | Ali Saidi <Ali.Saidi@ARM.com> | 2014-01-24 15:29:33 -0600 |
commit | f3585c841e964c98911784a187fc4f081a02a0a6 (patch) | |
tree | 2a5a3edeaeb0ffe37ca3a04b884f8f66c7538bbf /tests/quick | |
parent | cfc4a999828a5b51f4c514e3a7c47b4eebc450b9 (diff) | |
download | gem5-f3585c841e964c98911784a187fc4f081a02a0a6.tar.xz |
stats: update stats for cache occupancy and clock domain changes
Diffstat (limited to 'tests/quick')
276 files changed, 6290 insertions, 11964 deletions
diff --git a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic-dual/config.ini b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic-dual/config.ini index 730b05e22..982acdd3c 100644 --- a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic-dual/config.ini +++ b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic-dual/config.ini @@ -15,16 +15,16 @@ boot_cpu_frequency=500 boot_osflags=root=/dev/hda1 console=ttyS0 cache_line_size=64 clk_domain=system.clk_domain -console=/scratch/nilay/GEM5/system/binaries/console +console=/dist/binaries/console eventq_index=0 init_param=0 -kernel=/scratch/nilay/GEM5/system/binaries/vmlinux +kernel=/dist/binaries/vmlinux load_addr_mask=1099511627775 mem_mode=atomic mem_ranges=0:134217727 memories=system.physmem num_work_ids=16 -pal=/scratch/nilay/GEM5/system/binaries/ts_osfpal +pal=/dist/binaries/ts_osfpal readfile=tests/halt.sh symbolfile= system_rev=1024 @@ -108,6 +108,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu0.dcache.tags @@ -124,6 +125,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu0.dtb] @@ -146,6 +148,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu0.icache.tags @@ -162,6 +165,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu0.interrupts] @@ -171,6 +175,7 @@ eventq_index=0 [system.cpu0.isa] type=AlphaISA eventq_index=0 +system=system [system.cpu0.itb] type=AlphaTLB @@ -234,6 +239,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu1.dcache.tags @@ -250,6 +256,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu1.dtb] @@ -272,6 +279,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu1.icache.tags @@ -288,6 +296,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu1.interrupts] @@ -297,6 +306,7 @@ eventq_index=0 [system.cpu1.isa] type=AlphaISA eventq_index=0 +system=system [system.cpu1.itb] type=AlphaTLB @@ -333,7 +343,7 @@ table_size=65536 [system.disk0.image.child] type=RawDiskImage eventq_index=0 -image_file=/scratch/nilay/GEM5/system/disks/linux-latest.img +image_file=/dist/disks/linux-latest.img read_only=true [system.disk2] @@ -356,7 +366,7 @@ table_size=65536 [system.disk2.image.child] type=RawDiskImage eventq_index=0 -image_file=/scratch/nilay/GEM5/system/disks/linux-bigswap2.img +image_file=/dist/disks/linux-bigswap2.img read_only=true [system.intrctrl] @@ -390,6 +400,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=50 +sequential_access=false size=1024 system=system tags=system.iocache.tags @@ -406,6 +417,7 @@ block_size=64 clk_domain=system.clk_domain eventq_index=0 hit_latency=50 +sequential_access=false size=1024 [system.l2c] @@ -423,6 +435,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=4194304 system=system tags=system.l2c.tags @@ -439,6 +452,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=20 +sequential_access=false size=4194304 [system.membus] @@ -495,7 +509,7 @@ system=system [system.simple_disk.disk] type=RawDiskImage eventq_index=0 -image_file=/scratch/nilay/GEM5/system/disks/linux-latest.img +image_file=/dist/disks/linux-latest.img read_only=true [system.terminal] diff --git a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic-dual/simerr b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic-dual/simerr index 0bcb6e870..20fe2d682 100755 --- a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic-dual/simerr +++ b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic-dual/simerr @@ -2,4 +2,3 @@ warn: Sockets disabled, not accepting terminal connections warn: Sockets disabled, not accepting gdb connections warn: Prefetch instructions in Alpha do not do anything warn: Prefetch instructions in Alpha do not do anything -hack: be nice to actually delete the event here diff --git a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic-dual/simout b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic-dual/simout index 410351310..3fdb0e076 100755 --- a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic-dual/simout +++ b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic-dual/simout @@ -1,15 +1,13 @@ -Redirecting stdout to build/ALPHA/tests/opt/quick/fs/10.linux-boot/alpha/linux/tsunami-simple-atomic-dual/simout -Redirecting stderr to build/ALPHA/tests/opt/quick/fs/10.linux-boot/alpha/linux/tsunami-simple-atomic-dual/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 24 2013 03:08:53 -gem5 started Sep 28 2013 03:05:50 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:27:55 +gem5 started Jan 22 2014 17:24:41 +gem5 executing on u200540-lin command line: build/ALPHA/gem5.opt -d build/ALPHA/tests/opt/quick/fs/10.linux-boot/alpha/linux/tsunami-simple-atomic-dual -re tests/run.py build/ALPHA/tests/opt/quick/fs/10.linux-boot/alpha/linux/tsunami-simple-atomic-dual Global frequency set at 1000000000000 ticks per second -info: kernel located at: /dist/m5/system/binaries/vmlinux +info: kernel located at: /dist/binaries/vmlinux 0: system.tsunami.io.rtc: Real-time clock set to Thu Jan 1 00:00:00 2009 info: Entering event queue @ 0. Starting simulation... info: Launching CPU 1 @ 97861500 -Exiting @ tick 1870335643500 because m5_exit instruction encountered +Exiting @ tick 1870335522500 because m5_exit instruction encountered diff --git a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic-dual/stats.txt b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic-dual/stats.txt index e7342cf46..42e3976c4 100644 --- a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic-dual/stats.txt +++ b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic-dual/stats.txt @@ -4,13 +4,15 @@ sim_seconds 1.870336 # Nu sim_ticks 1870335522500 # Number of ticks simulated final_tick 1870335522500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 1806360 # Simulator instruction rate (inst/s) -host_op_rate 1806359 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 53496127424 # Simulator tick rate (ticks/s) -host_mem_usage 353980 # Number of bytes of host memory used -host_seconds 34.96 # Real time elapsed on the host +host_inst_rate 3158607 # Simulator instruction rate (inst/s) +host_op_rate 3158605 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 93543458564 # Simulator tick rate (ticks/s) +host_mem_usage 309852 # Number of bytes of host memory used +host_seconds 19.99 # Real time elapsed on the host sim_insts 63154034 # Number of instructions simulated sim_ops 63154034 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu0.inst 761216 # Number of bytes read from this memory system.physmem.bytes_read::cpu0.data 66693056 # Number of bytes read from this memory system.physmem.bytes_read::tsunami.ide 2649600 # Number of bytes read from this memory @@ -51,6 +53,7 @@ system.physmem.bw_total::total 42102084 # To system.membus.throughput 42160248 # Throughput (bytes/s) system.membus.data_through_bus 78853810 # Total data (bytes) system.membus.snoop_data_through_bus 0 # Total snoop data (bytes) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.l2c.tags.replacements 1000626 # number of replacements system.l2c.tags.tagsinuse 65381.922680 # Cycle average of tags in use system.l2c.tags.total_refs 2464737 # Total number of references to valid blocks. @@ -68,6 +71,15 @@ system.l2c.tags.occ_percent::cpu0.data 0.063089 # Av system.l2c.tags.occ_percent::cpu1.inst 0.002661 # Average percentage of cache occupancy system.l2c.tags.occ_percent::cpu1.data 0.000305 # Average percentage of cache occupancy system.l2c.tags.occ_percent::total 0.997649 # Average percentage of cache occupancy +system.l2c.tags.occ_task_id_blocks::1024 65142 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::0 769 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::1 3264 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::2 6912 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::3 6232 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::4 47965 # Occupied blocks per task id +system.l2c.tags.occ_task_id_percent::1024 0.993988 # Percentage of cache occupancy per task id +system.l2c.tags.tag_accesses 32109442 # Number of tag accesses +system.l2c.tags.data_accesses 32109442 # Number of data accesses system.l2c.ReadReq_hits::cpu0.inst 873086 # number of ReadReq hits system.l2c.ReadReq_hits::cpu0.data 763077 # number of ReadReq hits system.l2c.ReadReq_hits::cpu1.inst 101896 # number of ReadReq hits @@ -188,6 +200,11 @@ system.iocache.tags.warmup_cycle 1685787165017 # C system.iocache.tags.occ_blocks::tsunami.ide 0.435437 # Average occupied blocks per requestor system.iocache.tags.occ_percent::tsunami.ide 0.027215 # Average percentage of cache occupancy system.iocache.tags.occ_percent::total 0.027215 # Average percentage of cache occupancy +system.iocache.tags.occ_task_id_blocks::1023 16 # Occupied blocks per task id +system.iocache.tags.age_task_id_blocks_1023::2 16 # Occupied blocks per task id +system.iocache.tags.occ_task_id_percent::1023 1 # Percentage of cache occupancy per task id +system.iocache.tags.tag_accesses 375543 # Number of tag accesses +system.iocache.tags.data_accesses 375543 # Number of data accesses system.iocache.ReadReq_misses::tsunami.ide 175 # number of ReadReq misses system.iocache.ReadReq_misses::total 175 # number of ReadReq misses system.iocache.WriteReq_misses::tsunami.ide 41552 # number of WriteReq misses @@ -424,6 +441,13 @@ system.cpu0.icache.tags.warmup_cycle 9786576500 # Cy system.cpu0.icache.tags.occ_blocks::cpu0.inst 511.244754 # Average occupied blocks per requestor system.cpu0.icache.tags.occ_percent::cpu0.inst 0.998525 # Average percentage of cache occupancy system.cpu0.icache.tags.occ_percent::total 0.998525 # Average percentage of cache occupancy +system.cpu0.icache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::0 59 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::1 108 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::2 345 # Occupied blocks per task id +system.cpu0.icache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu0.icache.tags.tag_accesses 58115132 # Number of tag accesses +system.cpu0.icache.tags.data_accesses 58115132 # Number of data accesses system.cpu0.icache.ReadReq_hits::cpu0.inst 56345132 # number of ReadReq hits system.cpu0.icache.ReadReq_hits::total 56345132 # number of ReadReq hits system.cpu0.icache.demand_hits::cpu0.inst 56345132 # number of demand (read+write) hits @@ -466,6 +490,13 @@ system.cpu0.dcache.tags.warmup_cycle 10840000 # Cy system.cpu0.dcache.tags.occ_blocks::cpu0.data 507.129778 # Average occupied blocks per requestor system.cpu0.dcache.tags.occ_percent::cpu0.data 0.990488 # Average percentage of cache occupancy system.cpu0.dcache.tags.occ_percent::total 0.990488 # Average percentage of cache occupancy +system.cpu0.dcache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::0 443 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::1 65 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::2 4 # Occupied blocks per task id +system.cpu0.dcache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu0.dcache.tags.tag_accesses 62404072 # Number of tag accesses +system.cpu0.dcache.tags.data_accesses 62404072 # Number of data accesses system.cpu0.dcache.ReadReq_hits::cpu0.data 7298337 # number of ReadReq hits system.cpu0.dcache.ReadReq_hits::total 7298337 # number of ReadReq hits system.cpu0.dcache.WriteReq_hits::cpu0.data 5462263 # number of WriteReq hits @@ -661,6 +692,11 @@ system.cpu1.icache.tags.warmup_cycle 1868933059000 # C system.cpu1.icache.tags.occ_blocks::cpu1.inst 427.126317 # Average occupied blocks per requestor system.cpu1.icache.tags.occ_percent::cpu1.inst 0.834231 # Average percentage of cache occupancy system.cpu1.icache.tags.occ_percent::total 0.834231 # Average percentage of cache occupancy +system.cpu1.icache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu1.icache.tags.age_task_id_blocks_1024::2 512 # Occupied blocks per task id +system.cpu1.icache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu1.icache.tags.tag_accesses 6039396 # Number of tag accesses +system.cpu1.icache.tags.data_accesses 6039396 # Number of data accesses system.cpu1.icache.ReadReq_hits::cpu1.inst 5832136 # number of ReadReq hits system.cpu1.icache.ReadReq_hits::total 5832136 # number of ReadReq hits system.cpu1.icache.demand_hits::cpu1.inst 5832136 # number of demand (read+write) hits @@ -703,6 +739,12 @@ system.cpu1.dcache.tags.warmup_cycle 1851115552500 # C system.cpu1.dcache.tags.occ_blocks::cpu1.data 421.562730 # Average occupied blocks per requestor system.cpu1.dcache.tags.occ_percent::cpu1.data 0.823365 # Average percentage of cache occupancy system.cpu1.dcache.tags.occ_percent::total 0.823365 # Average percentage of cache occupancy +system.cpu1.dcache.tags.occ_task_id_blocks::1024 338 # Occupied blocks per task id +system.cpu1.dcache.tags.age_task_id_blocks_1024::2 337 # Occupied blocks per task id +system.cpu1.dcache.tags.age_task_id_blocks_1024::3 1 # Occupied blocks per task id +system.cpu1.dcache.tags.occ_task_id_percent::1024 0.660156 # Percentage of cache occupancy per task id +system.cpu1.dcache.tags.tag_accesses 7735310 # Number of tag accesses +system.cpu1.dcache.tags.data_accesses 7735310 # Number of data accesses system.cpu1.dcache.ReadReq_hits::cpu1.data 1109521 # number of ReadReq hits system.cpu1.dcache.ReadReq_hits::total 1109521 # number of ReadReq hits system.cpu1.dcache.WriteReq_hits::cpu1.data 707457 # number of WriteReq hits diff --git a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic/config.ini b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic/config.ini index 7f56b644f..b586addc4 100644 --- a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic/config.ini +++ b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic/config.ini @@ -15,16 +15,16 @@ boot_cpu_frequency=500 boot_osflags=root=/dev/hda1 console=ttyS0 cache_line_size=64 clk_domain=system.clk_domain -console=/scratch/nilay/GEM5/system/binaries/console +console=/dist/binaries/console eventq_index=0 init_param=0 -kernel=/scratch/nilay/GEM5/system/binaries/vmlinux +kernel=/dist/binaries/vmlinux load_addr_mask=1099511627775 mem_mode=atomic mem_ranges=0:134217727 memories=system.physmem num_work_ids=16 -pal=/scratch/nilay/GEM5/system/binaries/ts_osfpal +pal=/dist/binaries/ts_osfpal readfile=tests/halt.sh symbolfile= system_rev=1024 @@ -108,6 +108,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu.dcache.tags @@ -124,6 +125,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu.dtb] @@ -146,6 +148,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu.icache.tags @@ -162,6 +165,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu.interrupts] @@ -171,6 +175,7 @@ eventq_index=0 [system.cpu.isa] type=AlphaISA eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB @@ -192,6 +197,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=4194304 system=system tags=system.cpu.l2cache.tags @@ -208,6 +214,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=20 +sequential_access=false size=4194304 [system.cpu.toL2Bus] @@ -251,7 +258,7 @@ table_size=65536 [system.disk0.image.child] type=RawDiskImage eventq_index=0 -image_file=/scratch/nilay/GEM5/system/disks/linux-latest.img +image_file=/dist/disks/linux-latest.img read_only=true [system.disk2] @@ -274,7 +281,7 @@ table_size=65536 [system.disk2.image.child] type=RawDiskImage eventq_index=0 -image_file=/scratch/nilay/GEM5/system/disks/linux-bigswap2.img +image_file=/dist/disks/linux-bigswap2.img read_only=true [system.intrctrl] @@ -308,6 +315,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=50 +sequential_access=false size=1024 system=system tags=system.iocache.tags @@ -324,6 +332,7 @@ block_size=64 clk_domain=system.clk_domain eventq_index=0 hit_latency=50 +sequential_access=false size=1024 [system.membus] @@ -380,7 +389,7 @@ system=system [system.simple_disk.disk] type=RawDiskImage eventq_index=0 -image_file=/scratch/nilay/GEM5/system/disks/linux-latest.img +image_file=/dist/disks/linux-latest.img read_only=true [system.terminal] diff --git a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic/simerr b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic/simerr index 0bcb6e870..20fe2d682 100755 --- a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic/simerr +++ b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic/simerr @@ -2,4 +2,3 @@ warn: Sockets disabled, not accepting terminal connections warn: Sockets disabled, not accepting gdb connections warn: Prefetch instructions in Alpha do not do anything warn: Prefetch instructions in Alpha do not do anything -hack: be nice to actually delete the event here diff --git a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic/simout b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic/simout index b63c77b44..142ae35a0 100755 --- a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic/simout +++ b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic/simout @@ -1,14 +1,12 @@ -Redirecting stdout to build/ALPHA/tests/opt/quick/fs/10.linux-boot/alpha/linux/tsunami-simple-atomic/simout -Redirecting stderr to build/ALPHA/tests/opt/quick/fs/10.linux-boot/alpha/linux/tsunami-simple-atomic/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 24 2013 03:08:53 -gem5 started Sep 28 2013 03:05:50 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:27:55 +gem5 started Jan 22 2014 17:24:41 +gem5 executing on u200540-lin command line: build/ALPHA/gem5.opt -d build/ALPHA/tests/opt/quick/fs/10.linux-boot/alpha/linux/tsunami-simple-atomic -re tests/run.py build/ALPHA/tests/opt/quick/fs/10.linux-boot/alpha/linux/tsunami-simple-atomic Global frequency set at 1000000000000 ticks per second -info: kernel located at: /dist/m5/system/binaries/vmlinux +info: kernel located at: /dist/binaries/vmlinux 0: system.tsunami.io.rtc: Real-time clock set to Thu Jan 1 00:00:00 2009 info: Entering event queue @ 0. Starting simulation... -Exiting @ tick 1829332269000 because m5_exit instruction encountered +Exiting @ tick 1829332258000 because m5_exit instruction encountered diff --git a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic/stats.txt b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic/stats.txt index 01a06923f..2043677f9 100644 --- a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic/stats.txt +++ b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-atomic/stats.txt @@ -4,13 +4,15 @@ sim_seconds 1.829332 # Nu sim_ticks 1829332258000 # Number of ticks simulated final_tick 1829332258000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 1538182 # Simulator instruction rate (inst/s) -host_op_rate 1538181 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 46867449524 # Simulator tick rate (ticks/s) -host_mem_usage 350908 # Number of bytes of host memory used -host_seconds 39.03 # Real time elapsed on the host +host_inst_rate 3003513 # Simulator instruction rate (inst/s) +host_op_rate 3003511 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 91515177007 # Simulator tick rate (ticks/s) +host_mem_usage 306744 # Number of bytes of host memory used +host_seconds 19.99 # Real time elapsed on the host sim_insts 60038305 # Number of instructions simulated sim_ops 60038305 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 857984 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 66839424 # Number of bytes read from this memory system.physmem.bytes_read::tsunami.ide 2652288 # Number of bytes read from this memory @@ -50,6 +52,11 @@ system.iocache.tags.warmup_cycle 1685780659017 # C system.iocache.tags.occ_blocks::tsunami.ide 1.225570 # Average occupied blocks per requestor system.iocache.tags.occ_percent::tsunami.ide 0.076598 # Average percentage of cache occupancy system.iocache.tags.occ_percent::total 0.076598 # Average percentage of cache occupancy +system.iocache.tags.occ_task_id_blocks::1023 16 # Occupied blocks per task id +system.iocache.tags.age_task_id_blocks_1023::2 16 # Occupied blocks per task id +system.iocache.tags.occ_task_id_percent::1023 1 # Percentage of cache occupancy per task id +system.iocache.tags.tag_accesses 375534 # Number of tag accesses +system.iocache.tags.data_accesses 375534 # Number of data accesses system.iocache.ReadReq_misses::tsunami.ide 174 # number of ReadReq misses system.iocache.ReadReq_misses::total 174 # number of ReadReq misses system.iocache.WriteReq_misses::tsunami.ide 41552 # number of WriteReq misses @@ -97,6 +104,7 @@ system.disk2.dma_read_txs 0 # Nu system.disk2.dma_write_full_pages 1 # Number of full page size DMA writes. system.disk2.dma_write_bytes 8192 # Number of bytes transfered via DMA writes. system.disk2.dma_write_txs 1 # Number of DMA write transactions. +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.dtb.fetch_hits 0 # ITB hits system.cpu.dtb.fetch_misses 0 # ITB misses system.cpu.dtb.fetch_acv 0 # ITB acv @@ -278,6 +286,13 @@ system.cpu.icache.tags.warmup_cycle 9686972500 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 511.215243 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.998467 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.998467 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 63 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 117 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::2 332 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 60970364 # Number of tag accesses +system.cpu.icache.tags.data_accesses 60970364 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 59129922 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 59129922 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 59129922 # number of demand (read+write) hits @@ -324,6 +339,15 @@ system.cpu.l2cache.tags.occ_percent::writebacks 0.859209 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.074270 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.064818 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.998297 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 65163 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 781 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 3260 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::2 4024 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::3 3055 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::4 54043 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.994308 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 31737437 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 31737437 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 906797 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::cpu.data 811229 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 1718026 # number of ReadReq hits @@ -400,6 +424,13 @@ system.cpu.dcache.tags.warmup_cycle 10840000 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 511.997802 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.999996 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.999996 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 443 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 66 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::2 3 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 66369799 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 66369799 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 7807780 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 7807780 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 5848212 # number of WriteReq hits diff --git a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing-dual/config.ini b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing-dual/config.ini index c1c2c449d..87138f314 100644 --- a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing-dual/config.ini +++ b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing-dual/config.ini @@ -15,16 +15,16 @@ boot_cpu_frequency=500 boot_osflags=root=/dev/hda1 console=ttyS0 cache_line_size=64 clk_domain=system.clk_domain -console=/scratch/nilay/GEM5/system/binaries/console +console=/dist/binaries/console eventq_index=0 init_param=0 -kernel=/scratch/nilay/GEM5/system/binaries/vmlinux +kernel=/dist/binaries/vmlinux load_addr_mask=1099511627775 mem_mode=timing mem_ranges=0:134217727 memories=system.physmem num_work_ids=16 -pal=/scratch/nilay/GEM5/system/binaries/ts_osfpal +pal=/dist/binaries/ts_osfpal readfile=tests/halt.sh symbolfile= system_rev=1024 @@ -101,6 +101,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu0.dcache.tags @@ -117,6 +118,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu0.dtb] @@ -139,6 +141,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu0.icache.tags @@ -155,6 +158,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu0.interrupts] @@ -164,6 +168,7 @@ eventq_index=0 [system.cpu0.isa] type=AlphaISA eventq_index=0 +system=system [system.cpu0.itb] type=AlphaTLB @@ -220,6 +225,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu1.dcache.tags @@ -236,6 +242,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu1.dtb] @@ -258,6 +265,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu1.icache.tags @@ -274,6 +282,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu1.interrupts] @@ -283,6 +292,7 @@ eventq_index=0 [system.cpu1.isa] type=AlphaISA eventq_index=0 +system=system [system.cpu1.itb] type=AlphaTLB @@ -319,7 +329,7 @@ table_size=65536 [system.disk0.image.child] type=RawDiskImage eventq_index=0 -image_file=/scratch/nilay/GEM5/system/disks/linux-latest.img +image_file=/dist/disks/linux-latest.img read_only=true [system.disk2] @@ -342,7 +352,7 @@ table_size=65536 [system.disk2.image.child] type=RawDiskImage eventq_index=0 -image_file=/scratch/nilay/GEM5/system/disks/linux-bigswap2.img +image_file=/dist/disks/linux-bigswap2.img read_only=true [system.intrctrl] @@ -376,6 +386,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=50 +sequential_access=false size=1024 system=system tags=system.iocache.tags @@ -392,6 +403,7 @@ block_size=64 clk_domain=system.clk_domain eventq_index=0 hit_latency=50 +sequential_access=false size=1024 [system.l2c] @@ -409,6 +421,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=4194304 system=system tags=system.l2c.tags @@ -425,6 +438,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=20 +sequential_access=false size=4194304 [system.membus] @@ -505,7 +519,7 @@ system=system [system.simple_disk.disk] type=RawDiskImage eventq_index=0 -image_file=/scratch/nilay/GEM5/system/disks/linux-latest.img +image_file=/dist/disks/linux-latest.img read_only=true [system.terminal] diff --git a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing-dual/simerr b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing-dual/simerr index 0bcb6e870..20fe2d682 100755 --- a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing-dual/simerr +++ b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing-dual/simerr @@ -2,4 +2,3 @@ warn: Sockets disabled, not accepting terminal connections warn: Sockets disabled, not accepting gdb connections warn: Prefetch instructions in Alpha do not do anything warn: Prefetch instructions in Alpha do not do anything -hack: be nice to actually delete the event here diff --git a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing-dual/simout b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing-dual/simout index a33dd389d..2d1ba2c03 100755 --- a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing-dual/simout +++ b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing-dual/simout @@ -1,15 +1,13 @@ -Redirecting stdout to build/ALPHA/tests/opt/quick/fs/10.linux-boot/alpha/linux/tsunami-simple-timing-dual/simout -Redirecting stderr to build/ALPHA/tests/opt/quick/fs/10.linux-boot/alpha/linux/tsunami-simple-timing-dual/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 24 2013 03:08:53 -gem5 started Sep 28 2013 03:05:51 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:27:55 +gem5 started Jan 22 2014 17:25:12 +gem5 executing on u200540-lin command line: build/ALPHA/gem5.opt -d build/ALPHA/tests/opt/quick/fs/10.linux-boot/alpha/linux/tsunami-simple-timing-dual -re tests/run.py build/ALPHA/tests/opt/quick/fs/10.linux-boot/alpha/linux/tsunami-simple-timing-dual Global frequency set at 1000000000000 ticks per second -info: kernel located at: /dist/m5/system/binaries/vmlinux +info: kernel located at: /dist/binaries/vmlinux 0: system.tsunami.io.rtc: Real-time clock set to Thu Jan 1 00:00:00 2009 info: Entering event queue @ 0. Starting simulation... -info: Launching CPU 1 @ 675287000 -Exiting @ tick 1961841175000 because m5_exit instruction encountered +info: Launching CPU 1 @ 688618000 +Exiting @ tick 1960909874500 because m5_exit instruction encountered diff --git a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing-dual/stats.txt b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing-dual/stats.txt index 8b5007cf5..46997f65e 100644 --- a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing-dual/stats.txt +++ b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing-dual/stats.txt @@ -4,13 +4,15 @@ sim_seconds 1.960910 # Nu sim_ticks 1960909874500 # Number of ticks simulated final_tick 1960909874500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 787846 # Simulator instruction rate (inst/s) -host_op_rate 787845 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 25353578812 # Simulator tick rate (ticks/s) -host_mem_usage 353976 # Number of bytes of host memory used -host_seconds 77.34 # Real time elapsed on the host +host_inst_rate 1305982 # Simulator instruction rate (inst/s) +host_op_rate 1305981 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 42027651646 # Simulator tick rate (ticks/s) +host_mem_usage 309852 # Number of bytes of host memory used +host_seconds 46.66 # Real time elapsed on the host sim_insts 60933947 # Number of instructions simulated sim_ops 60933947 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu0.inst 833472 # Number of bytes read from this memory system.physmem.bytes_read::cpu0.data 24887104 # Number of bytes read from this memory system.physmem.bytes_read::tsunami.ide 2650688 # Number of bytes read from this memory @@ -416,6 +418,7 @@ system.membus.respLayer1.occupancy 3830990646 # La system.membus.respLayer1.utilization 0.2 # Layer utilization (%) system.membus.respLayer2.occupancy 376315500 # Layer occupancy (ticks) system.membus.respLayer2.utilization 0.0 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.l2c.tags.replacements 342160 # number of replacements system.l2c.tags.tagsinuse 65219.945305 # Cycle average of tags in use system.l2c.tags.total_refs 2443226 # Total number of references to valid blocks. @@ -433,6 +436,15 @@ system.l2c.tags.occ_percent::cpu0.data 0.074731 # Av system.l2c.tags.occ_percent::cpu1.inst 0.002426 # Average percentage of cache occupancy system.l2c.tags.occ_percent::cpu1.data 0.000675 # Average percentage of cache occupancy system.l2c.tags.occ_percent::total 0.995177 # Average percentage of cache occupancy +system.l2c.tags.occ_task_id_blocks::1024 65187 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::0 117 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::1 761 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::2 5186 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::3 7242 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::4 51881 # Occupied blocks per task id +system.l2c.tags.occ_task_id_percent::1024 0.994675 # Percentage of cache occupancy per task id +system.l2c.tags.tag_accesses 25932224 # Number of tag accesses +system.l2c.tags.data_accesses 25932224 # Number of data accesses system.l2c.ReadReq_hits::cpu0.inst 684719 # number of ReadReq hits system.l2c.ReadReq_hits::cpu0.data 664525 # number of ReadReq hits system.l2c.ReadReq_hits::cpu1.inst 317383 # number of ReadReq hits @@ -724,6 +736,11 @@ system.iocache.tags.warmup_cycle 1754531382000 # C system.iocache.tags.occ_blocks::tsunami.ide 0.570482 # Average occupied blocks per requestor system.iocache.tags.occ_percent::tsunami.ide 0.035655 # Average percentage of cache occupancy system.iocache.tags.occ_percent::total 0.035655 # Average percentage of cache occupancy +system.iocache.tags.occ_task_id_blocks::1023 16 # Occupied blocks per task id +system.iocache.tags.age_task_id_blocks_1023::3 16 # Occupied blocks per task id +system.iocache.tags.occ_task_id_percent::1023 1 # Percentage of cache occupancy per task id +system.iocache.tags.tag_accesses 375534 # Number of tag accesses +system.iocache.tags.data_accesses 375534 # Number of data accesses system.iocache.ReadReq_misses::tsunami.ide 174 # number of ReadReq misses system.iocache.ReadReq_misses::total 174 # number of ReadReq misses system.iocache.WriteReq_misses::tsunami.ide 41552 # number of WriteReq misses @@ -1105,6 +1122,14 @@ system.cpu0.icache.tags.warmup_cycle 40091069250 # Cy system.cpu0.icache.tags.occ_blocks::cpu0.inst 508.398756 # Average occupied blocks per requestor system.cpu0.icache.tags.occ_percent::cpu0.inst 0.992966 # Average percentage of cache occupancy system.cpu0.icache.tags.occ_percent::total 0.992966 # Average percentage of cache occupancy +system.cpu0.icache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::0 54 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::1 1 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::2 447 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::3 10 # Occupied blocks per task id +system.cpu0.icache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu0.icache.tags.tag_accesses 48690501 # Number of tag accesses +system.cpu0.icache.tags.data_accesses 48690501 # Number of data accesses system.cpu0.icache.ReadReq_hits::cpu0.inst 47294969 # number of ReadReq hits system.cpu0.icache.ReadReq_hits::total 47294969 # number of ReadReq hits system.cpu0.icache.demand_hits::cpu0.inst 47294969 # number of demand (read+write) hits @@ -1183,6 +1208,13 @@ system.cpu0.dcache.tags.warmup_cycle 107902250 # Cy system.cpu0.dcache.tags.occ_blocks::cpu0.data 505.271614 # Average occupied blocks per requestor system.cpu0.dcache.tags.occ_percent::cpu0.data 0.986859 # Average percentage of cache occupancy system.cpu0.dcache.tags.occ_percent::total 0.986859 # Average percentage of cache occupancy +system.cpu0.dcache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::0 125 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::1 318 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::2 69 # Occupied blocks per task id +system.cpu0.dcache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu0.dcache.tags.tag_accesses 51851796 # Number of tag accesses +system.cpu0.dcache.tags.data_accesses 51851796 # Number of data accesses system.cpu0.dcache.ReadReq_hits::cpu0.data 6451735 # number of ReadReq hits system.cpu0.dcache.ReadReq_hits::total 6451735 # number of ReadReq hits system.cpu0.dcache.WriteReq_hits::cpu0.data 4706856 # number of WriteReq hits @@ -1461,6 +1493,12 @@ system.cpu1.icache.tags.warmup_cycle 1958987590000 # C system.cpu1.icache.tags.occ_blocks::cpu1.inst 446.450379 # Average occupied blocks per requestor system.cpu1.icache.tags.occ_percent::cpu1.inst 0.871973 # Average percentage of cache occupancy system.cpu1.icache.tags.occ_percent::total 0.871973 # Average percentage of cache occupancy +system.cpu1.icache.tags.occ_task_id_blocks::1024 511 # Occupied blocks per task id +system.cpu1.icache.tags.age_task_id_blocks_1024::2 72 # Occupied blocks per task id +system.cpu1.icache.tags.age_task_id_blocks_1024::3 439 # Occupied blocks per task id +system.cpu1.icache.tags.occ_task_id_percent::1024 0.998047 # Percentage of cache occupancy per task id +system.cpu1.icache.tags.tag_accesses 13271059 # Number of tag accesses +system.cpu1.icache.tags.data_accesses 13271059 # Number of data accesses system.cpu1.icache.ReadReq_hits::cpu1.inst 12635285 # number of ReadReq hits system.cpu1.icache.ReadReq_hits::total 12635285 # number of ReadReq hits system.cpu1.icache.demand_hits::cpu1.inst 12635285 # number of demand (read+write) hits @@ -1539,6 +1577,12 @@ system.cpu1.dcache.tags.warmup_cycle 67802253000 # Cy system.cpu1.dcache.tags.occ_blocks::cpu1.data 485.752776 # Average occupied blocks per requestor system.cpu1.dcache.tags.occ_percent::cpu1.data 0.948736 # Average percentage of cache occupancy system.cpu1.dcache.tags.occ_percent::total 0.948736 # Average percentage of cache occupancy +system.cpu1.dcache.tags.occ_task_id_blocks::1024 326 # Occupied blocks per task id +system.cpu1.dcache.tags.age_task_id_blocks_1024::2 31 # Occupied blocks per task id +system.cpu1.dcache.tags.age_task_id_blocks_1024::3 295 # Occupied blocks per task id +system.cpu1.dcache.tags.occ_task_id_percent::1024 0.636719 # Percentage of cache occupancy per task id +system.cpu1.dcache.tags.tag_accesses 16587420 # Number of tag accesses +system.cpu1.dcache.tags.data_accesses 16587420 # Number of data accesses system.cpu1.dcache.ReadReq_hits::cpu1.data 2220669 # number of ReadReq hits system.cpu1.dcache.ReadReq_hits::total 2220669 # number of ReadReq hits system.cpu1.dcache.WriteReq_hits::cpu1.data 1595283 # number of WriteReq hits diff --git a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing/config.ini b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing/config.ini index 54bf6e928..161fac4a8 100644 --- a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing/config.ini +++ b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=true +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -13,15 +15,16 @@ boot_cpu_frequency=500 boot_osflags=root=/dev/hda1 console=ttyS0 cache_line_size=64 clk_domain=system.clk_domain -console=/dist/m5/system/binaries/console +console=/dist/binaries/console +eventq_index=0 init_param=0 -kernel=/dist/m5/system/binaries/vmlinux +kernel=/dist/binaries/vmlinux load_addr_mask=1099511627775 mem_mode=timing mem_ranges=0:134217727 memories=system.physmem num_work_ids=16 -pal=/dist/m5/system/binaries/ts_osfpal +pal=/dist/binaries/ts_osfpal readfile=tests/halt.sh symbolfile= system_rev=1024 @@ -39,6 +42,7 @@ system_port=system.membus.slave[0] type=Bridge clk_domain=system.clk_domain delay=50000 +eventq_index=0 ranges=8796093022208:18446744073709551615 req_size=16 resp_size=16 @@ -48,6 +52,7 @@ slave=system.membus.master[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -60,6 +65,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -86,6 +92,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=4 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -94,6 +101,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu.dcache.tags @@ -108,11 +116,14 @@ type=LRU assoc=4 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu.dtb] type=AlphaTLB +eventq_index=0 size=64 [system.cpu.icache] @@ -121,6 +132,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=1 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -129,6 +141,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu.icache.tags @@ -143,17 +156,23 @@ type=LRU assoc=1 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu.interrupts] type=AlphaInterrupts +eventq_index=0 [system.cpu.isa] type=AlphaISA +eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB +eventq_index=0 size=48 [system.cpu.l2cache] @@ -162,6 +181,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -170,6 +190,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=4194304 system=system tags=system.cpu.l2cache.tags @@ -184,12 +205,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=4194304 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -199,10 +223,12 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.disk0] @@ -210,19 +236,22 @@ type=IdeDisk children=image delay=1000000 driveID=master +eventq_index=0 image=system.disk0.image [system.disk0.image] type=CowDiskImage children=child child=system.disk0.image.child +eventq_index=0 image_file= read_only=false table_size=65536 [system.disk0.image.child] type=RawDiskImage -image_file=/dist/m5/system/disks/linux-latest.img +eventq_index=0 +image_file=/dist/disks/linux-latest.img read_only=true [system.disk2] @@ -230,28 +259,33 @@ type=IdeDisk children=image delay=1000000 driveID=master +eventq_index=0 image=system.disk2.image [system.disk2.image] type=CowDiskImage children=child child=system.disk2.image.child +eventq_index=0 image_file= read_only=false table_size=65536 [system.disk2.image.child] type=RawDiskImage -image_file=/dist/m5/system/disks/linux-bigswap2.img +eventq_index=0 +image_file=/dist/disks/linux-bigswap2.img read_only=true [system.intrctrl] type=IntrControl +eventq_index=0 sys=system [system.iobus] type=NoncoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 use_default_range=true width=8 @@ -265,6 +299,7 @@ children=tags addr_ranges=0:134217727 assoc=8 clk_domain=system.clk_domain +eventq_index=0 forward_snoops=false hit_latency=50 is_top_level=true @@ -273,6 +308,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=50 +sequential_access=false size=1024 system=system tags=system.iocache.tags @@ -287,13 +323,16 @@ type=LRU assoc=8 block_size=64 clk_domain=system.clk_domain +eventq_index=0 hit_latency=50 +sequential_access=false size=1024 [system.membus] type=CoherentBus children=badaddr_responder clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -305,6 +344,7 @@ slave=system.system_port system.cpu.l2cache.mem_side system.iocache.mem_side [system.membus.badaddr_responder] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=0 pio_latency=100000 @@ -331,6 +371,7 @@ conf_table_reported=true device_bus_width=8 device_rowbuffer_size=1024 devices_per_rank=8 +eventq_index=0 in_addr_map=true mem_sched_policy=frfcfs null=false @@ -342,29 +383,35 @@ static_backend_latency=10000 static_frontend_latency=10000 tBURST=5000 tCL=13750 +tRAS=35000 tRCD=13750 tREFI=7800000 tRFC=300000 tRP=13750 +tRRD=6250 tWTR=7500 tXAW=40000 write_buffer_size=32 -write_thresh_perc=70 +write_high_thresh_perc=70 +write_low_thresh_perc=0 port=system.membus.master[1] [system.simple_disk] type=SimpleDisk children=disk disk=system.simple_disk.disk +eventq_index=0 system=system [system.simple_disk.disk] type=RawDiskImage -image_file=/dist/m5/system/disks/linux-latest.img +eventq_index=0 +image_file=/dist/disks/linux-latest.img read_only=true [system.terminal] type=Terminal +eventq_index=0 intr_control=system.intrctrl number=0 output=true @@ -373,6 +420,7 @@ port=3456 [system.tsunami] type=Tsunami children=backdoor cchip ethernet fake_OROM fake_ata0 fake_ata1 fake_pnp_addr fake_pnp_read0 fake_pnp_read1 fake_pnp_read2 fake_pnp_read3 fake_pnp_read4 fake_pnp_read5 fake_pnp_read6 fake_pnp_read7 fake_pnp_write fake_ppc fake_sm_chip fake_uart1 fake_uart2 fake_uart3 fake_uart4 fb ide io pchip pciconfig uart +eventq_index=0 intrctrl=system.intrctrl system=system @@ -381,6 +429,7 @@ type=AlphaBackdoor clk_domain=system.clk_domain cpu=system.cpu disk=system.simple_disk +eventq_index=0 pio_addr=8804682956800 pio_latency=100000 platform=system.tsunami @@ -391,6 +440,7 @@ pio=system.iobus.master[24] [system.tsunami.cchip] type=TsunamiCChip clk_domain=system.clk_domain +eventq_index=0 pio_addr=8803072344064 pio_latency=100000 system=system @@ -419,6 +469,7 @@ BAR5LegacyIO=false BAR5Size=0 BIST=0 CacheLineSize=0 +CapabilityPtr=0 CardbusCIS=0 ClassCode=2 Command=0 @@ -428,8 +479,40 @@ HeaderType=0 InterruptLine=30 InterruptPin=1 LatencyTimer=0 +MSICAPBaseOffset=0 +MSICAPCapId=0 +MSICAPMaskBits=0 +MSICAPMsgAddr=0 +MSICAPMsgCtrl=0 +MSICAPMsgData=0 +MSICAPMsgUpperAddr=0 +MSICAPNextCapability=0 +MSICAPPendingBits=0 +MSIXCAPBaseOffset=0 +MSIXCAPCapId=0 +MSIXCAPNextCapability=0 +MSIXMsgCtrl=0 +MSIXPbaOffset=0 +MSIXTableOffset=0 MaximumLatency=52 MinimumGrant=176 +PMCAPBaseOffset=0 +PMCAPCapId=0 +PMCAPCapabilities=0 +PMCAPCtrlStatus=0 +PMCAPNextCapability=0 +PXCAPBaseOffset=0 +PXCAPCapId=0 +PXCAPCapabilities=0 +PXCAPDevCap2=0 +PXCAPDevCapabilities=0 +PXCAPDevCtrl=0 +PXCAPDevCtrl2=0 +PXCAPDevStatus=0 +PXCAPLinkCap=0 +PXCAPLinkCtrl=0 +PXCAPLinkStatus=0 +PXCAPNextCapability=0 ProgIF=0 Revision=0 Status=656 @@ -446,6 +529,7 @@ dma_read_delay=0 dma_read_factor=0 dma_write_delay=0 dma_write_factor=0 +eventq_index=0 hardware_address=00:90:00:00:00:01 intr_delay=10000000 pci_bus=0 @@ -469,6 +553,7 @@ pio=system.iobus.master[27] [system.tsunami.fake_OROM] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8796093677568 pio_latency=100000 @@ -486,6 +571,7 @@ pio=system.iobus.master[8] [system.tsunami.fake_ata0] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848432 pio_latency=100000 @@ -503,6 +589,7 @@ pio=system.iobus.master[19] [system.tsunami.fake_ata1] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848304 pio_latency=100000 @@ -520,6 +607,7 @@ pio=system.iobus.master[20] [system.tsunami.fake_pnp_addr] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848569 pio_latency=100000 @@ -537,6 +625,7 @@ pio=system.iobus.master[9] [system.tsunami.fake_pnp_read0] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848451 pio_latency=100000 @@ -554,6 +643,7 @@ pio=system.iobus.master[11] [system.tsunami.fake_pnp_read1] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848515 pio_latency=100000 @@ -571,6 +661,7 @@ pio=system.iobus.master[12] [system.tsunami.fake_pnp_read2] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848579 pio_latency=100000 @@ -588,6 +679,7 @@ pio=system.iobus.master[13] [system.tsunami.fake_pnp_read3] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848643 pio_latency=100000 @@ -605,6 +697,7 @@ pio=system.iobus.master[14] [system.tsunami.fake_pnp_read4] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848707 pio_latency=100000 @@ -622,6 +715,7 @@ pio=system.iobus.master[15] [system.tsunami.fake_pnp_read5] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848771 pio_latency=100000 @@ -639,6 +733,7 @@ pio=system.iobus.master[16] [system.tsunami.fake_pnp_read6] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848835 pio_latency=100000 @@ -656,6 +751,7 @@ pio=system.iobus.master[17] [system.tsunami.fake_pnp_read7] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848899 pio_latency=100000 @@ -673,6 +769,7 @@ pio=system.iobus.master[18] [system.tsunami.fake_pnp_write] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615850617 pio_latency=100000 @@ -690,6 +787,7 @@ pio=system.iobus.master[10] [system.tsunami.fake_ppc] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848891 pio_latency=100000 @@ -707,6 +805,7 @@ pio=system.iobus.master[7] [system.tsunami.fake_sm_chip] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848816 pio_latency=100000 @@ -724,6 +823,7 @@ pio=system.iobus.master[2] [system.tsunami.fake_uart1] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848696 pio_latency=100000 @@ -741,6 +841,7 @@ pio=system.iobus.master[3] [system.tsunami.fake_uart2] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848936 pio_latency=100000 @@ -758,6 +859,7 @@ pio=system.iobus.master[4] [system.tsunami.fake_uart3] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848680 pio_latency=100000 @@ -775,6 +877,7 @@ pio=system.iobus.master[5] [system.tsunami.fake_uart4] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848944 pio_latency=100000 @@ -793,6 +896,7 @@ pio=system.iobus.master[6] type=BadDevice clk_domain=system.clk_domain devicename=FrameBuffer +eventq_index=0 pio_addr=8804615848912 pio_latency=100000 system=system @@ -820,6 +924,7 @@ BAR5LegacyIO=false BAR5Size=0 BIST=0 CacheLineSize=0 +CapabilityPtr=0 CardbusCIS=0 ClassCode=1 Command=0 @@ -829,8 +934,40 @@ HeaderType=0 InterruptLine=31 InterruptPin=1 LatencyTimer=0 +MSICAPBaseOffset=0 +MSICAPCapId=0 +MSICAPMaskBits=0 +MSICAPMsgAddr=0 +MSICAPMsgCtrl=0 +MSICAPMsgData=0 +MSICAPMsgUpperAddr=0 +MSICAPNextCapability=0 +MSICAPPendingBits=0 +MSIXCAPBaseOffset=0 +MSIXCAPCapId=0 +MSIXCAPNextCapability=0 +MSIXMsgCtrl=0 +MSIXPbaOffset=0 +MSIXTableOffset=0 MaximumLatency=0 MinimumGrant=0 +PMCAPBaseOffset=0 +PMCAPCapId=0 +PMCAPCapabilities=0 +PMCAPCtrlStatus=0 +PMCAPNextCapability=0 +PXCAPBaseOffset=0 +PXCAPCapId=0 +PXCAPCapabilities=0 +PXCAPDevCap2=0 +PXCAPDevCapabilities=0 +PXCAPDevCtrl=0 +PXCAPDevCtrl2=0 +PXCAPDevStatus=0 +PXCAPLinkCap=0 +PXCAPLinkCtrl=0 +PXCAPLinkStatus=0 +PXCAPNextCapability=0 ProgIF=133 Revision=0 Status=640 @@ -842,6 +979,7 @@ clk_domain=system.clk_domain config_latency=20000 ctrl_offset=0 disks=system.disk0 system.disk2 +eventq_index=0 io_shift=0 pci_bus=0 pci_dev=0 @@ -856,6 +994,7 @@ pio=system.iobus.master[25] [system.tsunami.io] type=TsunamiIO clk_domain=system.clk_domain +eventq_index=0 frequency=976562500 pio_addr=8804615847936 pio_latency=100000 @@ -868,6 +1007,7 @@ pio=system.iobus.master[22] [system.tsunami.pchip] type=TsunamiPChip clk_domain=system.clk_domain +eventq_index=0 pio_addr=8802535473152 pio_latency=100000 system=system @@ -878,6 +1018,7 @@ pio=system.iobus.master[1] type=PciConfigAll bus=0 clk_domain=system.clk_domain +eventq_index=0 pio_addr=0 pio_latency=30000 platform=system.tsunami @@ -888,6 +1029,7 @@ pio=system.iobus.default [system.tsunami.uart] type=Uart8250 clk_domain=system.clk_domain +eventq_index=0 pio_addr=8804615848952 pio_latency=100000 platform=system.tsunami @@ -897,5 +1039,6 @@ pio=system.iobus.master[23] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing/simerr b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing/simerr index 0bcb6e870..20fe2d682 100755 --- a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing/simerr +++ b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing/simerr @@ -2,4 +2,3 @@ warn: Sockets disabled, not accepting terminal connections warn: Sockets disabled, not accepting gdb connections warn: Prefetch instructions in Alpha do not do anything warn: Prefetch instructions in Alpha do not do anything -hack: be nice to actually delete the event here diff --git a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing/simout b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing/simout index cabc90a11..fa5fb8ad8 100755 --- a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing/simout +++ b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing/simout @@ -1,14 +1,12 @@ -Redirecting stdout to build/ALPHA/tests/opt/quick/fs/10.linux-boot/alpha/linux/tsunami-simple-timing/simout -Redirecting stderr to build/ALPHA/tests/opt/quick/fs/10.linux-boot/alpha/linux/tsunami-simple-timing/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 24 2013 03:08:53 -gem5 started Sep 28 2013 03:05:50 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:27:55 +gem5 started Jan 22 2014 17:24:48 +gem5 executing on u200540-lin command line: build/ALPHA/gem5.opt -d build/ALPHA/tests/opt/quick/fs/10.linux-boot/alpha/linux/tsunami-simple-timing -re tests/run.py build/ALPHA/tests/opt/quick/fs/10.linux-boot/alpha/linux/tsunami-simple-timing Global frequency set at 1000000000000 ticks per second -info: kernel located at: /dist/m5/system/binaries/vmlinux +info: kernel located at: /dist/binaries/vmlinux 0: system.tsunami.io.rtc: Real-time clock set to Thu Jan 1 00:00:00 2009 info: Entering event queue @ 0. Starting simulation... -Exiting @ tick 1918473094000 because m5_exit instruction encountered +Exiting @ tick 1920428041000 because m5_exit instruction encountered diff --git a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing/stats.txt b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing/stats.txt index 479e1f707..01b0606bd 100644 --- a/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing/stats.txt +++ b/tests/quick/fs/10.linux-boot/ref/alpha/linux/tsunami-simple-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 1.920428 # Nu sim_ticks 1920428041000 # Number of ticks simulated final_tick 1920428041000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 1218375 # Simulator instruction rate (inst/s) -host_op_rate 1218374 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 41646226437 # Simulator tick rate (ticks/s) -host_mem_usage 305884 # Number of bytes of host memory used -host_seconds 46.11 # Real time elapsed on the host +host_inst_rate 1405906 # Simulator instruction rate (inst/s) +host_op_rate 1405905 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 48056353161 # Simulator tick rate (ticks/s) +host_mem_usage 307800 # Number of bytes of host memory used +host_seconds 39.96 # Real time elapsed on the host sim_insts 56182750 # Number of instructions simulated sim_ops 56182750 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 850688 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 24846912 # Number of bytes read from this memory system.physmem.bytes_read::tsunami.ide 2652352 # Number of bytes read from this memory @@ -414,6 +416,11 @@ system.iocache.tags.warmup_cycle 1753529489000 # C system.iocache.tags.occ_blocks::tsunami.ide 1.352288 # Average occupied blocks per requestor system.iocache.tags.occ_percent::tsunami.ide 0.084518 # Average percentage of cache occupancy system.iocache.tags.occ_percent::total 0.084518 # Average percentage of cache occupancy +system.iocache.tags.occ_task_id_blocks::1023 16 # Occupied blocks per task id +system.iocache.tags.age_task_id_blocks_1023::3 16 # Occupied blocks per task id +system.iocache.tags.occ_task_id_percent::1023 1 # Percentage of cache occupancy per task id +system.iocache.tags.tag_accesses 375525 # Number of tag accesses +system.iocache.tags.data_accesses 375525 # Number of data accesses system.iocache.ReadReq_misses::tsunami.ide 173 # number of ReadReq misses system.iocache.ReadReq_misses::total 173 # number of ReadReq misses system.iocache.WriteReq_misses::tsunami.ide 41552 # number of WriteReq misses @@ -509,6 +516,7 @@ system.disk2.dma_read_txs 0 # Nu system.disk2.dma_write_full_pages 1 # Number of full page size DMA writes. system.disk2.dma_write_bytes 8192 # Number of bytes transfered via DMA writes. system.disk2.dma_write_txs 1 # Number of DMA write transactions. +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.dtb.fetch_hits 0 # ITB hits system.cpu.dtb.fetch_misses 0 # ITB misses system.cpu.dtb.fetch_acv 0 # ITB acv @@ -756,6 +764,14 @@ system.cpu.icache.tags.warmup_cycle 39723654250 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 508.321671 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.992816 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.992816 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 511 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 63 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 1 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::2 438 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::3 9 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.998047 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 57123599 # Number of tag accesses +system.cpu.icache.tags.data_accesses 57123599 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 55265541 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 55265541 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 55265541 # number of demand (read+write) hits @@ -838,6 +854,15 @@ system.cpu.l2cache.tags.occ_percent::writebacks 0.848127 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.072615 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.075609 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.996351 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 65162 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 178 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 1050 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::2 4896 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::3 3257 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::4 55781 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.994293 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 25947571 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 25947571 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 915717 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::cpu.data 814814 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 1730531 # number of ReadReq hits @@ -1004,6 +1029,13 @@ system.cpu.dcache.tags.warmup_cycle 107298250 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 511.978915 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.999959 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.999959 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 187 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 257 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::2 68 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 63152102 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 63152102 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 7814622 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 7814622 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 5852326 # number of WriteReq hits diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic-dual/config.ini b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic-dual/config.ini index db7088ff9..894acecbc 100644 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic-dual/config.ini +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic-dual/config.ini @@ -12,7 +12,7 @@ time_sync_spin_threshold=100000000 type=LinuxArmSystem children=bridge cf0 clk_domain cpu0 cpu1 cpu_clk_domain intrctrl iobus iocache l2c membus physmem realview terminal toL2Bus vncserver voltage_domain atags_addr=256 -boot_loader=/scratch/nilay/GEM5/system/binaries/boot.arm +boot_loader=/dist/binaries/boot.arm boot_osflags=earlyprintk console=ttyAMA0 lpj=19988480 norandmaps rw loglevel=8 mem=128MB root=/dev/sda1 cache_line_size=64 clk_domain=system.clk_domain @@ -23,7 +23,7 @@ eventq_index=0 flags_addr=268435504 gic_cpu_addr=520093952 init_param=0 -kernel=/scratch/nilay/GEM5/system/binaries/vmlinux.arm.smp.fb.2.6.38.8 +kernel=/dist/binaries/vmlinux.arm.smp.fb.2.6.38.8 load_addr_mask=268435455 machine_type=RealView_PBX mem_mode=atomic @@ -75,7 +75,7 @@ table_size=65536 [system.cf0.image.child] type=RawDiskImage eventq_index=0 -image_file=/scratch/nilay/GEM5/system/disks/linux-arm-ael.img +image_file=/dist/disks/linux-arm-ael.img read_only=true [system.clk_domain] @@ -137,6 +137,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu0.dcache.tags @@ -153,6 +154,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu0.dtb] @@ -185,6 +187,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu0.icache.tags @@ -201,6 +204,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu0.interrupts] @@ -297,6 +301,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu1.dcache.tags @@ -313,6 +318,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu1.dtb] @@ -345,6 +351,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu1.icache.tags @@ -361,6 +368,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu1.interrupts] @@ -440,6 +448,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=50 +sequential_access=false size=1024 system=system tags=system.iocache.tags @@ -456,6 +465,7 @@ block_size=64 clk_domain=system.clk_domain eventq_index=0 hit_latency=50 +sequential_access=false size=1024 [system.l2c] @@ -473,6 +483,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=4194304 system=system tags=system.l2c.tags @@ -489,6 +500,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=20 +sequential_access=false size=4194304 [system.membus] diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic-dual/simerr b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic-dual/simerr index 4ccac5e7b..5a43c8b18 100755 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic-dual/simerr +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic-dual/simerr @@ -1,7 +1,6 @@ warn: Sockets disabled, not accepting vnc client connections warn: Sockets disabled, not accepting terminal connections warn: Sockets disabled, not accepting gdb connections -warn: DTB file specified, but no device tree support in kernel warn: The clidr register always reports 0 caches. warn: clidr LoUIS field of 0b001 to match current ARM implementations. warn: The csselr register isn't implemented. @@ -15,4 +14,3 @@ warn: instruction 'mcr bpiallis' unimplemented warn: LCD dual screen mode not supported warn: instruction 'mcr icialluis' unimplemented warn: instruction 'mcr icialluis' unimplemented -hack: be nice to actually delete the event here diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic-dual/simout b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic-dual/simout index 789ceb651..1e2520995 100755 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic-dual/simout +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic-dual/simout @@ -1,14 +1,12 @@ -Redirecting stdout to build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-simple-atomic-dual/simout -Redirecting stderr to build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-simple-atomic-dual/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 07:58:15 -gem5 started Sep 22 2013 09:04:45 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:24:06 +gem5 started Jan 22 2014 17:30:53 +gem5 executing on u200540-lin command line: build/ARM/gem5.opt -d build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-simple-atomic-dual -re tests/run.py build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-simple-atomic-dual Global frequency set at 1000000000000 ticks per second -info: kernel located at: /dist/m5/system/binaries/vmlinux.arm.smp.fb.2.6.38.8 +info: kernel located at: /dist/binaries/vmlinux.arm.smp.fb.2.6.38.8 info: Using bootloader at address 0x80000000 info: Entering event queue @ 0. Starting simulation... Exiting @ tick 912096763500 because m5_exit instruction encountered diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic-dual/stats.txt b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic-dual/stats.txt index 622f0dad2..eb8cedaf3 100644 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic-dual/stats.txt +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic-dual/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.912097 # Nu sim_ticks 912096763500 # Number of ticks simulated final_tick 912096763500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 1031681 # Simulator instruction rate (inst/s) -host_op_rate 1328287 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 15269405009 # Simulator tick rate (ticks/s) -host_mem_usage 443324 # Number of bytes of host memory used -host_seconds 59.73 # Real time elapsed on the host +host_inst_rate 1859152 # Simulator instruction rate (inst/s) +host_op_rate 2393654 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 27516397451 # Simulator tick rate (ticks/s) +host_mem_usage 399324 # Number of bytes of host memory used +host_seconds 33.15 # Real time elapsed on the host sim_insts 61625970 # Number of instructions simulated sim_ops 79343340 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::realview.clcd 39321600 # Number of bytes read from this memory system.physmem.bytes_read::cpu0.dtb.walker 64 # Number of bytes read from this memory system.physmem.bytes_read::cpu0.itb.walker 192 # Number of bytes read from this memory @@ -87,6 +89,7 @@ system.realview.nvmem.bw_total::total 75 # To system.membus.throughput 64986577 # Throughput (bytes/s) system.membus.data_through_bus 59274047 # Total data (bytes) system.membus.snoop_data_through_bus 0 # Total snoop data (bytes) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.l2c.tags.replacements 70658 # number of replacements system.l2c.tags.tagsinuse 51560.149653 # Cycle average of tags in use system.l2c.tags.total_refs 1623339 # Total number of references to valid blocks. @@ -110,6 +113,19 @@ system.l2c.tags.occ_percent::cpu1.dtb.walker 0.000041 system.l2c.tags.occ_percent::cpu1.inst 0.032447 # Average percentage of cache occupancy system.l2c.tags.occ_percent::cpu1.data 0.050521 # Average percentage of cache occupancy system.l2c.tags.occ_percent::total 0.786745 # Average percentage of cache occupancy +system.l2c.tags.occ_task_id_blocks::1023 4 # Occupied blocks per task id +system.l2c.tags.occ_task_id_blocks::1024 65148 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1023::2 1 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1023::4 3 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::0 50 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::1 203 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::2 3771 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::3 12549 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::4 48575 # Occupied blocks per task id +system.l2c.tags.occ_task_id_percent::1023 0.000061 # Percentage of cache occupancy per task id +system.l2c.tags.occ_task_id_percent::1024 0.994080 # Percentage of cache occupancy per task id +system.l2c.tags.tag_accesses 16906854 # Number of tag accesses +system.l2c.tags.data_accesses 16906854 # Number of data accesses system.l2c.ReadReq_hits::cpu0.dtb.walker 3874 # number of ReadReq hits system.l2c.ReadReq_hits::cpu0.itb.walker 1919 # number of ReadReq hits system.l2c.ReadReq_hits::cpu0.inst 421038 # number of ReadReq hits @@ -349,6 +365,12 @@ system.cpu0.icache.tags.warmup_cycle 64537139000 # Cy system.cpu0.icache.tags.occ_blocks::cpu0.inst 511.015216 # Average occupied blocks per requestor system.cpu0.icache.tags.occ_percent::cpu0.inst 0.998077 # Average percentage of cache occupancy system.cpu0.icache.tags.occ_percent::total 0.998077 # Average percentage of cache occupancy +system.cpu0.icache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::2 508 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::3 4 # Occupied blocks per task id +system.cpu0.icache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu0.icache.tags.tag_accesses 30669233 # Number of tag accesses +system.cpu0.icache.tags.data_accesses 30669233 # Number of data accesses system.cpu0.icache.ReadReq_hits::cpu0.inst 29811115 # number of ReadReq hits system.cpu0.icache.ReadReq_hits::total 29811115 # number of ReadReq hits system.cpu0.icache.demand_hits::cpu0.inst 29811115 # number of demand (read+write) hits @@ -391,6 +413,11 @@ system.cpu0.dcache.tags.warmup_cycle 22115000 # Cy system.cpu0.dcache.tags.occ_blocks::cpu0.data 494.763091 # Average occupied blocks per requestor system.cpu0.dcache.tags.occ_percent::cpu0.data 0.966334 # Average percentage of cache occupancy system.cpu0.dcache.tags.occ_percent::total 0.966334 # Average percentage of cache occupancy +system.cpu0.dcache.tags.occ_task_id_blocks::1024 372 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::2 372 # Occupied blocks per task id +system.cpu0.dcache.tags.occ_task_id_percent::1024 0.726562 # Percentage of cache occupancy per task id +system.cpu0.dcache.tags.tag_accesses 51675155 # Number of tag accesses +system.cpu0.dcache.tags.data_accesses 51675155 # Number of data accesses system.cpu0.dcache.ReadReq_hits::cpu0.data 6512305 # number of ReadReq hits system.cpu0.dcache.ReadReq_hits::total 6512305 # number of ReadReq hits system.cpu0.dcache.WriteReq_hits::cpu0.data 5630881 # number of WriteReq hits @@ -409,8 +436,8 @@ system.cpu0.dcache.WriteReq_misses::cpu0.data 167342 system.cpu0.dcache.WriteReq_misses::total 167342 # number of WriteReq misses system.cpu0.dcache.LoadLockedReq_misses::cpu0.data 9062 # number of LoadLockedReq misses system.cpu0.dcache.LoadLockedReq_misses::total 9062 # number of LoadLockedReq misses -system.cpu0.dcache.StoreCondReq_misses::cpu0.data 7469 # number of StoreCondReq misses -system.cpu0.dcache.StoreCondReq_misses::total 7469 # number of StoreCondReq misses +system.cpu0.dcache.StoreCondReq_misses::cpu0.data 7466 # number of StoreCondReq misses +system.cpu0.dcache.StoreCondReq_misses::total 7466 # number of StoreCondReq misses system.cpu0.dcache.demand_misses::cpu0.data 364509 # number of demand (read+write) misses system.cpu0.dcache.demand_misses::total 364509 # number of demand (read+write) misses system.cpu0.dcache.overall_misses::cpu0.data 364509 # number of overall misses @@ -421,8 +448,8 @@ system.cpu0.dcache.WriteReq_accesses::cpu0.data 5798223 system.cpu0.dcache.WriteReq_accesses::total 5798223 # number of WriteReq accesses(hits+misses) system.cpu0.dcache.LoadLockedReq_accesses::cpu0.data 160681 # number of LoadLockedReq accesses(hits+misses) system.cpu0.dcache.LoadLockedReq_accesses::total 160681 # number of LoadLockedReq accesses(hits+misses) -system.cpu0.dcache.StoreCondReq_accesses::cpu0.data 160649 # number of StoreCondReq accesses(hits+misses) -system.cpu0.dcache.StoreCondReq_accesses::total 160649 # number of StoreCondReq accesses(hits+misses) +system.cpu0.dcache.StoreCondReq_accesses::cpu0.data 160646 # number of StoreCondReq accesses(hits+misses) +system.cpu0.dcache.StoreCondReq_accesses::total 160646 # number of StoreCondReq accesses(hits+misses) system.cpu0.dcache.demand_accesses::cpu0.data 12507695 # number of demand (read+write) accesses system.cpu0.dcache.demand_accesses::total 12507695 # number of demand (read+write) accesses system.cpu0.dcache.overall_accesses::cpu0.data 12507695 # number of overall (read+write) accesses @@ -433,8 +460,8 @@ system.cpu0.dcache.WriteReq_miss_rate::cpu0.data 0.028861 system.cpu0.dcache.WriteReq_miss_rate::total 0.028861 # miss rate for WriteReq accesses system.cpu0.dcache.LoadLockedReq_miss_rate::cpu0.data 0.056397 # miss rate for LoadLockedReq accesses system.cpu0.dcache.LoadLockedReq_miss_rate::total 0.056397 # miss rate for LoadLockedReq accesses -system.cpu0.dcache.StoreCondReq_miss_rate::cpu0.data 0.046493 # miss rate for StoreCondReq accesses -system.cpu0.dcache.StoreCondReq_miss_rate::total 0.046493 # miss rate for StoreCondReq accesses +system.cpu0.dcache.StoreCondReq_miss_rate::cpu0.data 0.046475 # miss rate for StoreCondReq accesses +system.cpu0.dcache.StoreCondReq_miss_rate::total 0.046475 # miss rate for StoreCondReq accesses system.cpu0.dcache.demand_miss_rate::cpu0.data 0.029143 # miss rate for demand accesses system.cpu0.dcache.demand_miss_rate::total 0.029143 # miss rate for demand accesses system.cpu0.dcache.overall_miss_rate::cpu0.data 0.029143 # miss rate for overall accesses @@ -525,6 +552,14 @@ system.cpu1.icache.tags.warmup_cycle 69967763000 # Cy system.cpu1.icache.tags.occ_blocks::cpu1.inst 475.447912 # Average occupied blocks per requestor system.cpu1.icache.tags.occ_percent::cpu1.inst 0.928609 # Average percentage of cache occupancy system.cpu1.icache.tags.occ_percent::total 0.928609 # Average percentage of cache occupancy +system.cpu1.icache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu1.icache.tags.age_task_id_blocks_1024::0 168 # Occupied blocks per task id +system.cpu1.icache.tags.age_task_id_blocks_1024::1 63 # Occupied blocks per task id +system.cpu1.icache.tags.age_task_id_blocks_1024::2 261 # Occupied blocks per task id +system.cpu1.icache.tags.age_task_id_blocks_1024::3 20 # Occupied blocks per task id +system.cpu1.icache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu1.icache.tags.tag_accesses 32848033 # Number of tag accesses +system.cpu1.icache.tags.data_accesses 32848033 # Number of data accesses system.cpu1.icache.ReadReq_hits::cpu1.inst 31979125 # number of ReadReq hits system.cpu1.icache.ReadReq_hits::total 31979125 # number of ReadReq hits system.cpu1.icache.demand_hits::cpu1.inst 31979125 # number of demand (read+write) hits @@ -567,6 +602,14 @@ system.cpu1.dcache.tags.warmup_cycle 67293493000 # Cy system.cpu1.dcache.tags.occ_blocks::cpu1.data 447.573682 # Average occupied blocks per requestor system.cpu1.dcache.tags.occ_percent::cpu1.data 0.874167 # Average percentage of cache occupancy system.cpu1.dcache.tags.occ_percent::total 0.874167 # Average percentage of cache occupancy +system.cpu1.dcache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu1.dcache.tags.age_task_id_blocks_1024::0 267 # Occupied blocks per task id +system.cpu1.dcache.tags.age_task_id_blocks_1024::1 226 # Occupied blocks per task id +system.cpu1.dcache.tags.age_task_id_blocks_1024::2 17 # Occupied blocks per task id +system.cpu1.dcache.tags.age_task_id_blocks_1024::3 2 # Occupied blocks per task id +system.cpu1.dcache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu1.dcache.tags.tag_accesses 48417680 # Number of tag accesses +system.cpu1.dcache.tags.data_accesses 48417680 # Number of data accesses system.cpu1.dcache.ReadReq_hits::cpu1.data 7002209 # number of ReadReq hits system.cpu1.dcache.ReadReq_hits::total 7002209 # number of ReadReq hits system.cpu1.dcache.WriteReq_hits::cpu1.data 4520313 # number of WriteReq hits @@ -585,8 +628,8 @@ system.cpu1.dcache.WriteReq_misses::cpu1.data 125920 system.cpu1.dcache.WriteReq_misses::total 125920 # number of WriteReq misses system.cpu1.dcache.LoadLockedReq_misses::cpu1.data 11251 # number of LoadLockedReq misses system.cpu1.dcache.LoadLockedReq_misses::total 11251 # number of LoadLockedReq misses -system.cpu1.dcache.StoreCondReq_misses::cpu1.data 10139 # number of StoreCondReq misses -system.cpu1.dcache.StoreCondReq_misses::total 10139 # number of StoreCondReq misses +system.cpu1.dcache.StoreCondReq_misses::cpu1.data 10133 # number of StoreCondReq misses +system.cpu1.dcache.StoreCondReq_misses::total 10133 # number of StoreCondReq misses system.cpu1.dcache.demand_misses::cpu1.data 324195 # number of demand (read+write) misses system.cpu1.dcache.demand_misses::total 324195 # number of demand (read+write) misses system.cpu1.dcache.overall_misses::cpu1.data 324195 # number of overall misses @@ -597,8 +640,8 @@ system.cpu1.dcache.WriteReq_accesses::cpu1.data 4646233 system.cpu1.dcache.WriteReq_accesses::total 4646233 # number of WriteReq accesses(hits+misses) system.cpu1.dcache.LoadLockedReq_accesses::cpu1.data 89205 # number of LoadLockedReq accesses(hits+misses) system.cpu1.dcache.LoadLockedReq_accesses::total 89205 # number of LoadLockedReq accesses(hits+misses) -system.cpu1.dcache.StoreCondReq_accesses::cpu1.data 89169 # number of StoreCondReq accesses(hits+misses) -system.cpu1.dcache.StoreCondReq_accesses::total 89169 # number of StoreCondReq accesses(hits+misses) +system.cpu1.dcache.StoreCondReq_accesses::cpu1.data 89163 # number of StoreCondReq accesses(hits+misses) +system.cpu1.dcache.StoreCondReq_accesses::total 89163 # number of StoreCondReq accesses(hits+misses) system.cpu1.dcache.demand_accesses::cpu1.data 11846717 # number of demand (read+write) accesses system.cpu1.dcache.demand_accesses::total 11846717 # number of demand (read+write) accesses system.cpu1.dcache.overall_accesses::cpu1.data 11846717 # number of overall (read+write) accesses @@ -609,8 +652,8 @@ system.cpu1.dcache.WriteReq_miss_rate::cpu1.data 0.027102 system.cpu1.dcache.WriteReq_miss_rate::total 0.027102 # miss rate for WriteReq accesses system.cpu1.dcache.LoadLockedReq_miss_rate::cpu1.data 0.126125 # miss rate for LoadLockedReq accesses system.cpu1.dcache.LoadLockedReq_miss_rate::total 0.126125 # miss rate for LoadLockedReq accesses -system.cpu1.dcache.StoreCondReq_miss_rate::cpu1.data 0.113705 # miss rate for StoreCondReq accesses -system.cpu1.dcache.StoreCondReq_miss_rate::total 0.113705 # miss rate for StoreCondReq accesses +system.cpu1.dcache.StoreCondReq_miss_rate::cpu1.data 0.113646 # miss rate for StoreCondReq accesses +system.cpu1.dcache.StoreCondReq_miss_rate::total 0.113646 # miss rate for StoreCondReq accesses system.cpu1.dcache.demand_miss_rate::cpu1.data 0.027366 # miss rate for demand accesses system.cpu1.dcache.demand_miss_rate::total 0.027366 # miss rate for demand accesses system.cpu1.dcache.overall_miss_rate::cpu1.data 0.027366 # miss rate for overall accesses @@ -632,6 +675,8 @@ system.iocache.tags.total_refs 0 # To system.iocache.tags.sampled_refs 0 # Sample count of references to valid blocks. system.iocache.tags.avg_refs nan # Average number of references to valid blocks. system.iocache.tags.warmup_cycle 0 # Cycle when the warmup percentage was hit. +system.iocache.tags.tag_accesses 0 # Number of tag accesses +system.iocache.tags.data_accesses 0 # Number of data accesses system.iocache.blocked_cycles::no_mshrs 0 # number of cycles access was blocked system.iocache.blocked_cycles::no_targets 0 # number of cycles access was blocked system.iocache.blocked::no_mshrs 0 # number of cycles access was blocked diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic/config.ini b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic/config.ini index 196c32809..ab338ac30 100644 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic/config.ini +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic/config.ini @@ -12,7 +12,7 @@ time_sync_spin_threshold=100000000 type=LinuxArmSystem children=bridge cf0 clk_domain cpu cpu_clk_domain intrctrl iobus iocache membus physmem realview terminal vncserver voltage_domain atags_addr=256 -boot_loader=/scratch/nilay/GEM5/system/binaries/boot.arm +boot_loader=/dist/binaries/boot.arm boot_osflags=earlyprintk console=ttyAMA0 lpj=19988480 norandmaps rw loglevel=8 mem=128MB root=/dev/sda1 cache_line_size=64 clk_domain=system.clk_domain @@ -23,7 +23,7 @@ eventq_index=0 flags_addr=268435504 gic_cpu_addr=520093952 init_param=0 -kernel=/scratch/nilay/GEM5/system/binaries/vmlinux.arm.smp.fb.2.6.38.8 +kernel=/dist/binaries/vmlinux.arm.smp.fb.2.6.38.8 load_addr_mask=268435455 machine_type=RealView_PBX mem_mode=atomic @@ -75,7 +75,7 @@ table_size=65536 [system.cf0.image.child] type=RawDiskImage eventq_index=0 -image_file=/scratch/nilay/GEM5/system/disks/linux-arm-ael.img +image_file=/dist/disks/linux-arm-ael.img read_only=true [system.clk_domain] @@ -137,6 +137,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu.dcache.tags @@ -153,6 +154,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu.dtb] @@ -185,6 +187,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu.icache.tags @@ -201,6 +204,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu.interrupts] @@ -255,6 +259,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=4194304 system=system tags=system.cpu.l2cache.tags @@ -271,6 +276,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=20 +sequential_access=false size=4194304 [system.cpu.toL2Bus] @@ -324,6 +330,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=50 +sequential_access=false size=1024 system=system tags=system.iocache.tags @@ -340,6 +347,7 @@ block_size=64 clk_domain=system.clk_domain eventq_index=0 hit_latency=50 +sequential_access=false size=1024 [system.membus] diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic/simerr b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic/simerr index eda827fb8..41742298b 100755 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic/simerr +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic/simerr @@ -1,7 +1,6 @@ warn: Sockets disabled, not accepting vnc client connections warn: Sockets disabled, not accepting terminal connections warn: Sockets disabled, not accepting gdb connections -warn: DTB file specified, but no device tree support in kernel warn: The clidr register always reports 0 caches. warn: clidr LoUIS field of 0b001 to match current ARM implementations. warn: The csselr register isn't implemented. @@ -14,4 +13,3 @@ warn: instruction 'mcr icimvau' unimplemented warn: LCD dual screen mode not supported warn: instruction 'mcr icialluis' unimplemented warn: instruction 'mcr bpiallis' unimplemented -hack: be nice to actually delete the event here diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic/simout b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic/simout index 0ff7b53a5..8105d53fc 100755 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic/simout +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic/simout @@ -1,14 +1,12 @@ -Redirecting stdout to build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-simple-atomic/simout -Redirecting stderr to build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-simple-atomic/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 07:58:15 -gem5 started Sep 22 2013 08:30:49 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:24:06 +gem5 started Jan 22 2014 17:30:43 +gem5 executing on u200540-lin command line: build/ARM/gem5.opt -d build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-simple-atomic -re tests/run.py build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-simple-atomic Global frequency set at 1000000000000 ticks per second -info: kernel located at: /dist/m5/system/binaries/vmlinux.arm.smp.fb.2.6.38.8 +info: kernel located at: /dist/binaries/vmlinux.arm.smp.fb.2.6.38.8 info: Using bootloader at address 0x80000000 info: Entering event queue @ 0. Starting simulation... Exiting @ tick 2332810264000 because m5_exit instruction encountered diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic/stats.txt b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic/stats.txt index cb6c51df2..0b833a71d 100644 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic/stats.txt +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-atomic/stats.txt @@ -4,13 +4,15 @@ sim_seconds 2.332810 # Nu sim_ticks 2332810264000 # Number of ticks simulated final_tick 2332810264000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 993135 # Simulator instruction rate (inst/s) -host_op_rate 1277110 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 38352024586 # Simulator tick rate (ticks/s) -host_mem_usage 443324 # Number of bytes of host memory used -host_seconds 60.83 # Real time elapsed on the host +host_inst_rate 1656319 # Simulator instruction rate (inst/s) +host_op_rate 2129924 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 63962280307 # Simulator tick rate (ticks/s) +host_mem_usage 398176 # Number of bytes of host memory used +host_seconds 36.47 # Real time elapsed on the host sim_insts 60408639 # Number of instructions simulated sim_ops 77681819 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::realview.clcd 111673344 # Number of bytes read from this memory system.physmem.bytes_read::cpu.dtb.walker 320 # Number of bytes read from this memory system.physmem.bytes_read::cpu.itb.walker 192 # Number of bytes read from this memory @@ -72,6 +74,7 @@ system.cf0.dma_write_bytes 0 # Nu system.cf0.dma_write_txs 0 # Number of DMA write transactions. system.iobus.throughput 48895252 # Throughput (bytes/s) system.iobus.data_through_bus 114063346 # Total data (bytes) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.dtb.inst_hits 0 # ITB inst hits system.cpu.dtb.inst_misses 0 # ITB inst misses system.cpu.dtb.read_hits 14971214 # DTB read hits @@ -147,6 +150,14 @@ system.cpu.icache.tags.warmup_cycle 5709383000 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 511.678593 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.999372 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.999372 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 177 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 78 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::2 255 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::3 2 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 62285702 # Number of tag accesses +system.cpu.icache.tags.data_accesses 62285702 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 60583498 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 60583498 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 60583498 # number of demand (read+write) hits @@ -197,6 +208,18 @@ system.cpu.l2cache.tags.occ_percent::cpu.itb.walker 0.000015 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.107036 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.092911 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.763050 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1023 5 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_blocks::1024 65380 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1023::4 5 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 40 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 176 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::2 3589 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::3 9187 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::4 52388 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1023 0.000076 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.997620 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 17035899 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 17035899 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.dtb.walker 7507 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::cpu.itb.walker 3129 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::cpu.inst 838871 # number of ReadReq hits @@ -297,6 +320,13 @@ system.cpu.dcache.tags.warmup_cycle 21763000 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 511.997031 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.999994 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.999994 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 278 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 208 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::2 26 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 97632617 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 97632617 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 13180066 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 13180066 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 9962072 # number of WriteReq hits @@ -361,6 +391,8 @@ system.iocache.tags.total_refs 0 # To system.iocache.tags.sampled_refs 0 # Sample count of references to valid blocks. system.iocache.tags.avg_refs nan # Average number of references to valid blocks. system.iocache.tags.warmup_cycle 0 # Cycle when the warmup percentage was hit. +system.iocache.tags.tag_accesses 0 # Number of tag accesses +system.iocache.tags.data_accesses 0 # Number of data accesses system.iocache.blocked_cycles::no_mshrs 0 # number of cycles access was blocked system.iocache.blocked_cycles::no_targets 0 # number of cycles access was blocked system.iocache.blocked::no_mshrs 0 # number of cycles access was blocked diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing-dual/config.ini b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing-dual/config.ini index 051cf58a2..da0c44ae8 100644 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing-dual/config.ini +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing-dual/config.ini @@ -12,7 +12,7 @@ time_sync_spin_threshold=100000000 type=LinuxArmSystem children=bridge cf0 clk_domain cpu0 cpu1 cpu_clk_domain intrctrl iobus iocache l2c membus physmem realview terminal toL2Bus vncserver voltage_domain atags_addr=256 -boot_loader=/scratch/nilay/GEM5/system/binaries/boot.arm +boot_loader=/dist/binaries/boot.arm boot_osflags=earlyprintk console=ttyAMA0 lpj=19988480 norandmaps rw loglevel=8 mem=128MB root=/dev/sda1 cache_line_size=64 clk_domain=system.clk_domain @@ -23,7 +23,7 @@ eventq_index=0 flags_addr=268435504 gic_cpu_addr=520093952 init_param=0 -kernel=/scratch/nilay/GEM5/system/binaries/vmlinux.arm.smp.fb.2.6.38.8 +kernel=/dist/binaries/vmlinux.arm.smp.fb.2.6.38.8 load_addr_mask=268435455 machine_type=RealView_PBX mem_mode=timing @@ -75,7 +75,7 @@ table_size=65536 [system.cf0.image.child] type=RawDiskImage eventq_index=0 -image_file=/scratch/nilay/GEM5/system/disks/linux-arm-ael.img +image_file=/dist/disks/linux-arm-ael.img read_only=true [system.clk_domain] @@ -130,6 +130,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu0.dcache.tags @@ -146,6 +147,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu0.dtb] @@ -178,6 +180,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu0.icache.tags @@ -194,6 +197,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu0.interrupts] @@ -283,6 +287,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu1.dcache.tags @@ -299,6 +304,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu1.dtb] @@ -331,6 +337,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu1.icache.tags @@ -347,6 +354,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu1.interrupts] @@ -426,6 +434,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=50 +sequential_access=false size=1024 system=system tags=system.iocache.tags @@ -442,6 +451,7 @@ block_size=64 clk_domain=system.clk_domain eventq_index=0 hit_latency=50 +sequential_access=false size=1024 [system.l2c] @@ -459,6 +469,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=4194304 system=system tags=system.l2c.tags @@ -475,6 +486,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=20 +sequential_access=false size=4194304 [system.membus] diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing-dual/simerr b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing-dual/simerr index 4ccac5e7b..5a43c8b18 100755 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing-dual/simerr +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing-dual/simerr @@ -1,7 +1,6 @@ warn: Sockets disabled, not accepting vnc client connections warn: Sockets disabled, not accepting terminal connections warn: Sockets disabled, not accepting gdb connections -warn: DTB file specified, but no device tree support in kernel warn: The clidr register always reports 0 caches. warn: clidr LoUIS field of 0b001 to match current ARM implementations. warn: The csselr register isn't implemented. @@ -15,4 +14,3 @@ warn: instruction 'mcr bpiallis' unimplemented warn: LCD dual screen mode not supported warn: instruction 'mcr icialluis' unimplemented warn: instruction 'mcr icialluis' unimplemented -hack: be nice to actually delete the event here diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing-dual/simout b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing-dual/simout index c328b3227..012824f20 100755 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing-dual/simout +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing-dual/simout @@ -1,14 +1,12 @@ -Redirecting stdout to build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-simple-timing-dual/simout -Redirecting stderr to build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-simple-timing-dual/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 07:58:15 -gem5 started Sep 22 2013 08:25:29 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:24:06 +gem5 started Jan 22 2014 17:31:37 +gem5 executing on u200540-lin command line: build/ARM/gem5.opt -d build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-simple-timing-dual -re tests/run.py build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-simple-timing-dual Global frequency set at 1000000000000 ticks per second -info: kernel located at: /dist/m5/system/binaries/vmlinux.arm.smp.fb.2.6.38.8 +info: kernel located at: /dist/binaries/vmlinux.arm.smp.fb.2.6.38.8 info: Using bootloader at address 0x80000000 info: Entering event queue @ 0. Starting simulation... -Exiting @ tick 1194883580500 because m5_exit instruction encountered +Exiting @ tick 1196134388000 because m5_exit instruction encountered diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing-dual/stats.txt b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing-dual/stats.txt index 168e14479..da78d67e8 100644 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing-dual/stats.txt +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing-dual/stats.txt @@ -1,150 +1,156 @@ ---------- Begin Simulation Statistics ---------- -sim_seconds 1.195792 # Number of seconds simulated -sim_ticks 1195791950500 # Number of ticks simulated -final_tick 1195791950500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) +sim_seconds 1.196134 # Number of seconds simulated +sim_ticks 1196134388000 # Number of ticks simulated +final_tick 1196134388000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 418462 # Simulator instruction rate (inst/s) -host_op_rate 533251 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 8153682245 # Simulator tick rate (ticks/s) -host_mem_usage 447424 # Number of bytes of host memory used -host_seconds 146.66 # Real time elapsed on the host -sim_insts 61370228 # Number of instructions simulated -sim_ops 78204808 # Number of ops (including micro ops) simulated +host_inst_rate 708523 # Simulator instruction rate (inst/s) +host_op_rate 902798 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 13791811883 # Simulator tick rate (ticks/s) +host_mem_usage 403420 # Number of bytes of host memory used +host_seconds 86.73 # Real time elapsed on the host +sim_insts 61448705 # Number of instructions simulated +sim_ops 78297711 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::realview.clcd 51904512 # Number of bytes read from this memory -system.physmem.bytes_read::cpu0.dtb.walker 256 # Number of bytes read from this memory +system.physmem.bytes_read::cpu0.dtb.walker 64 # Number of bytes read from this memory system.physmem.bytes_read::cpu0.itb.walker 128 # Number of bytes read from this memory -system.physmem.bytes_read::cpu0.inst 463716 # Number of bytes read from this memory -system.physmem.bytes_read::cpu0.data 6626164 # Number of bytes read from this memory +system.physmem.bytes_read::cpu0.inst 393380 # Number of bytes read from this memory +system.physmem.bytes_read::cpu0.data 4724852 # Number of bytes read from this memory +system.physmem.bytes_read::cpu1.dtb.walker 256 # Number of bytes read from this memory system.physmem.bytes_read::cpu1.itb.walker 64 # Number of bytes read from this memory -system.physmem.bytes_read::cpu1.inst 256412 # Number of bytes read from this memory -system.physmem.bytes_read::cpu1.data 2903920 # Number of bytes read from this memory -system.physmem.bytes_read::total 62155172 # Number of bytes read from this memory -system.physmem.bytes_inst_read::cpu0.inst 463716 # Number of instructions bytes read from this memory -system.physmem.bytes_inst_read::cpu1.inst 256412 # Number of instructions bytes read from this memory -system.physmem.bytes_inst_read::total 720128 # Number of instructions bytes read from this memory -system.physmem.bytes_written::writebacks 4136128 # Number of bytes written to this memory -system.physmem.bytes_written::cpu0.data 3027304 # Number of bytes written to this memory -system.physmem.bytes_written::cpu1.data 40 # Number of bytes written to this memory -system.physmem.bytes_written::total 7163472 # Number of bytes written to this memory +system.physmem.bytes_read::cpu1.inst 323996 # Number of bytes read from this memory +system.physmem.bytes_read::cpu1.data 4798512 # Number of bytes read from this memory +system.physmem.bytes_read::total 62145764 # Number of bytes read from this memory +system.physmem.bytes_inst_read::cpu0.inst 393380 # Number of instructions bytes read from this memory +system.physmem.bytes_inst_read::cpu1.inst 323996 # Number of instructions bytes read from this memory +system.physmem.bytes_inst_read::total 717376 # Number of instructions bytes read from this memory +system.physmem.bytes_written::writebacks 4112768 # Number of bytes written to this memory +system.physmem.bytes_written::cpu0.data 17000 # Number of bytes written to this memory +system.physmem.bytes_written::cpu1.data 3010344 # Number of bytes written to this memory +system.physmem.bytes_written::total 7140112 # Number of bytes written to this memory system.physmem.num_reads::realview.clcd 6488064 # Number of read requests responded to by this memory -system.physmem.num_reads::cpu0.dtb.walker 4 # Number of read requests responded to by this memory +system.physmem.num_reads::cpu0.dtb.walker 1 # Number of read requests responded to by this memory system.physmem.num_reads::cpu0.itb.walker 2 # Number of read requests responded to by this memory -system.physmem.num_reads::cpu0.inst 13464 # Number of read requests responded to by this memory -system.physmem.num_reads::cpu0.data 103606 # Number of read requests responded to by this memory +system.physmem.num_reads::cpu0.inst 12365 # Number of read requests responded to by this memory +system.physmem.num_reads::cpu0.data 73898 # Number of read requests responded to by this memory +system.physmem.num_reads::cpu1.dtb.walker 4 # Number of read requests responded to by this memory system.physmem.num_reads::cpu1.itb.walker 1 # Number of read requests responded to by this memory -system.physmem.num_reads::cpu1.inst 4088 # Number of read requests responded to by this memory -system.physmem.num_reads::cpu1.data 45400 # Number of read requests responded to by this memory -system.physmem.num_reads::total 6654629 # Number of read requests responded to by this memory -system.physmem.num_writes::writebacks 64627 # Number of write requests responded to by this memory -system.physmem.num_writes::cpu0.data 756826 # Number of write requests responded to by this memory -system.physmem.num_writes::cpu1.data 10 # Number of write requests responded to by this memory -system.physmem.num_writes::total 821463 # Number of write requests responded to by this memory -system.physmem.bw_read::realview.clcd 43405972 # Total read bandwidth from this memory (bytes/s) -system.physmem.bw_read::cpu0.dtb.walker 214 # Total read bandwidth from this memory (bytes/s) +system.physmem.num_reads::cpu1.inst 5144 # Number of read requests responded to by this memory +system.physmem.num_reads::cpu1.data 75003 # Number of read requests responded to by this memory +system.physmem.num_reads::total 6654482 # Number of read requests responded to by this memory +system.physmem.num_writes::writebacks 64262 # Number of write requests responded to by this memory +system.physmem.num_writes::cpu0.data 4250 # Number of write requests responded to by this memory +system.physmem.num_writes::cpu1.data 752586 # Number of write requests responded to by this memory +system.physmem.num_writes::total 821098 # Number of write requests responded to by this memory +system.physmem.bw_read::realview.clcd 43393546 # Total read bandwidth from this memory (bytes/s) +system.physmem.bw_read::cpu0.dtb.walker 54 # Total read bandwidth from this memory (bytes/s) system.physmem.bw_read::cpu0.itb.walker 107 # Total read bandwidth from this memory (bytes/s) -system.physmem.bw_read::cpu0.inst 387790 # Total read bandwidth from this memory (bytes/s) -system.physmem.bw_read::cpu0.data 5541235 # Total read bandwidth from this memory (bytes/s) +system.physmem.bw_read::cpu0.inst 328876 # Total read bandwidth from this memory (bytes/s) +system.physmem.bw_read::cpu0.data 3950101 # Total read bandwidth from this memory (bytes/s) +system.physmem.bw_read::cpu1.dtb.walker 214 # Total read bandwidth from this memory (bytes/s) system.physmem.bw_read::cpu1.itb.walker 54 # Total read bandwidth from this memory (bytes/s) -system.physmem.bw_read::cpu1.inst 214429 # Total read bandwidth from this memory (bytes/s) -system.physmem.bw_read::cpu1.data 2428449 # Total read bandwidth from this memory (bytes/s) -system.physmem.bw_read::total 51978249 # Total read bandwidth from this memory (bytes/s) -system.physmem.bw_inst_read::cpu0.inst 387790 # Instruction read bandwidth from this memory (bytes/s) -system.physmem.bw_inst_read::cpu1.inst 214429 # Instruction read bandwidth from this memory (bytes/s) -system.physmem.bw_inst_read::total 602218 # Instruction read bandwidth from this memory (bytes/s) -system.physmem.bw_write::writebacks 3458903 # Write bandwidth from this memory (bytes/s) -system.physmem.bw_write::cpu0.data 2531631 # Write bandwidth from this memory (bytes/s) -system.physmem.bw_write::cpu1.data 33 # Write bandwidth from this memory (bytes/s) -system.physmem.bw_write::total 5990567 # Write bandwidth from this memory (bytes/s) -system.physmem.bw_total::writebacks 3458903 # Total bandwidth to/from this memory (bytes/s) -system.physmem.bw_total::realview.clcd 43405972 # Total bandwidth to/from this memory (bytes/s) -system.physmem.bw_total::cpu0.dtb.walker 214 # Total bandwidth to/from this memory (bytes/s) +system.physmem.bw_read::cpu1.inst 270869 # Total read bandwidth from this memory (bytes/s) +system.physmem.bw_read::cpu1.data 4011683 # Total read bandwidth from this memory (bytes/s) +system.physmem.bw_read::total 51955503 # Total read bandwidth from this memory (bytes/s) +system.physmem.bw_inst_read::cpu0.inst 328876 # Instruction read bandwidth from this memory (bytes/s) +system.physmem.bw_inst_read::cpu1.inst 270869 # Instruction read bandwidth from this memory (bytes/s) +system.physmem.bw_inst_read::total 599745 # Instruction read bandwidth from this memory (bytes/s) +system.physmem.bw_write::writebacks 3438383 # Write bandwidth from this memory (bytes/s) +system.physmem.bw_write::cpu0.data 14212 # Write bandwidth from this memory (bytes/s) +system.physmem.bw_write::cpu1.data 2516727 # Write bandwidth from this memory (bytes/s) +system.physmem.bw_write::total 5969323 # Write bandwidth from this memory (bytes/s) +system.physmem.bw_total::writebacks 3438383 # Total bandwidth to/from this memory (bytes/s) +system.physmem.bw_total::realview.clcd 43393546 # Total bandwidth to/from this memory (bytes/s) +system.physmem.bw_total::cpu0.dtb.walker 54 # Total bandwidth to/from this memory (bytes/s) system.physmem.bw_total::cpu0.itb.walker 107 # Total bandwidth to/from this memory (bytes/s) -system.physmem.bw_total::cpu0.inst 387790 # Total bandwidth to/from this memory (bytes/s) -system.physmem.bw_total::cpu0.data 8072866 # Total bandwidth to/from this memory (bytes/s) +system.physmem.bw_total::cpu0.inst 328876 # Total bandwidth to/from this memory (bytes/s) +system.physmem.bw_total::cpu0.data 3964314 # Total bandwidth to/from this memory (bytes/s) +system.physmem.bw_total::cpu1.dtb.walker 214 # Total bandwidth to/from this memory (bytes/s) system.physmem.bw_total::cpu1.itb.walker 54 # Total bandwidth to/from this memory (bytes/s) -system.physmem.bw_total::cpu1.inst 214429 # Total bandwidth to/from this memory (bytes/s) -system.physmem.bw_total::cpu1.data 2428483 # Total bandwidth to/from this memory (bytes/s) -system.physmem.bw_total::total 57968816 # Total bandwidth to/from this memory (bytes/s) -system.physmem.readReqs 6654629 # Number of read requests accepted -system.physmem.writeReqs 821463 # Number of write requests accepted -system.physmem.readBursts 6654629 # Number of DRAM read bursts, including those serviced by the write queue -system.physmem.writeBursts 821463 # Number of DRAM write bursts, including those merged in the write queue -system.physmem.bytesReadDRAM 425873472 # Total number of bytes read from DRAM -system.physmem.bytesReadWrQ 22784 # Total number of bytes read from write queue -system.physmem.bytesWritten 7293184 # Total number of bytes written to DRAM -system.physmem.bytesReadSys 62155172 # Total read bytes from the system interface side -system.physmem.bytesWrittenSys 7163472 # Total written bytes from the system interface side -system.physmem.servicedByWrQ 356 # Number of DRAM read bursts serviced by the write queue -system.physmem.mergedWrBursts 707504 # Number of DRAM write bursts merged with an existing one -system.physmem.neitherReadNorWriteReqs 10661 # Number of requests that are neither read nor write -system.physmem.perBankRdBursts::0 415730 # Per bank write bursts -system.physmem.perBankRdBursts::1 415559 # Per bank write bursts -system.physmem.perBankRdBursts::2 414961 # Per bank write bursts -system.physmem.perBankRdBursts::3 415335 # Per bank write bursts -system.physmem.perBankRdBursts::4 422368 # Per bank write bursts -system.physmem.perBankRdBursts::5 415375 # Per bank write bursts -system.physmem.perBankRdBursts::6 415446 # Per bank write bursts -system.physmem.perBankRdBursts::7 415289 # Per bank write bursts -system.physmem.perBankRdBursts::8 415350 # Per bank write bursts -system.physmem.perBankRdBursts::9 415631 # Per bank write bursts -system.physmem.perBankRdBursts::10 415265 # Per bank write bursts -system.physmem.perBankRdBursts::11 414898 # Per bank write bursts -system.physmem.perBankRdBursts::12 415491 # Per bank write bursts -system.physmem.perBankRdBursts::13 416088 # Per bank write bursts -system.physmem.perBankRdBursts::14 415759 # Per bank write bursts -system.physmem.perBankRdBursts::15 415728 # Per bank write bursts -system.physmem.perBankWrBursts::0 7313 # Per bank write bursts -system.physmem.perBankWrBursts::1 7201 # Per bank write bursts -system.physmem.perBankWrBursts::2 6692 # Per bank write bursts -system.physmem.perBankWrBursts::3 6866 # Per bank write bursts -system.physmem.perBankWrBursts::4 7393 # Per bank write bursts -system.physmem.perBankWrBursts::5 6958 # Per bank write bursts -system.physmem.perBankWrBursts::6 7169 # Per bank write bursts -system.physmem.perBankWrBursts::7 6986 # Per bank write bursts -system.physmem.perBankWrBursts::8 6988 # Per bank write bursts -system.physmem.perBankWrBursts::9 7250 # Per bank write bursts -system.physmem.perBankWrBursts::10 6972 # Per bank write bursts -system.physmem.perBankWrBursts::11 6687 # Per bank write bursts -system.physmem.perBankWrBursts::12 7223 # Per bank write bursts -system.physmem.perBankWrBursts::13 7529 # Per bank write bursts -system.physmem.perBankWrBursts::14 7375 # Per bank write bursts -system.physmem.perBankWrBursts::15 7354 # Per bank write bursts +system.physmem.bw_total::cpu1.inst 270869 # Total bandwidth to/from this memory (bytes/s) +system.physmem.bw_total::cpu1.data 6528410 # Total bandwidth to/from this memory (bytes/s) +system.physmem.bw_total::total 57924826 # Total bandwidth to/from this memory (bytes/s) +system.physmem.readReqs 6654482 # Number of read requests accepted +system.physmem.writeReqs 821098 # Number of write requests accepted +system.physmem.readBursts 6654482 # Number of DRAM read bursts, including those serviced by the write queue +system.physmem.writeBursts 821098 # Number of DRAM write bursts, including those merged in the write queue +system.physmem.bytesReadDRAM 425858048 # Total number of bytes read from DRAM +system.physmem.bytesReadWrQ 28800 # Total number of bytes read from write queue +system.physmem.bytesWritten 7268928 # Total number of bytes written to DRAM +system.physmem.bytesReadSys 62145764 # Total read bytes from the system interface side +system.physmem.bytesWrittenSys 7140112 # Total written bytes from the system interface side +system.physmem.servicedByWrQ 450 # Number of DRAM read bursts serviced by the write queue +system.physmem.mergedWrBursts 707519 # Number of DRAM write bursts merged with an existing one +system.physmem.neitherReadNorWriteReqs 11807 # Number of requests that are neither read nor write +system.physmem.perBankRdBursts::0 415388 # Per bank write bursts +system.physmem.perBankRdBursts::1 415219 # Per bank write bursts +system.physmem.perBankRdBursts::2 415339 # Per bank write bursts +system.physmem.perBankRdBursts::3 415675 # Per bank write bursts +system.physmem.perBankRdBursts::4 422391 # Per bank write bursts +system.physmem.perBankRdBursts::5 415542 # Per bank write bursts +system.physmem.perBankRdBursts::6 415783 # Per bank write bursts +system.physmem.perBankRdBursts::7 415483 # Per bank write bursts +system.physmem.perBankRdBursts::8 416074 # Per bank write bursts +system.physmem.perBankRdBursts::9 415577 # Per bank write bursts +system.physmem.perBankRdBursts::10 415272 # Per bank write bursts +system.physmem.perBankRdBursts::11 414856 # Per bank write bursts +system.physmem.perBankRdBursts::12 415143 # Per bank write bursts +system.physmem.perBankRdBursts::13 415555 # Per bank write bursts +system.physmem.perBankRdBursts::14 415537 # Per bank write bursts +system.physmem.perBankRdBursts::15 415198 # Per bank write bursts +system.physmem.perBankWrBursts::0 6998 # Per bank write bursts +system.physmem.perBankWrBursts::1 6842 # Per bank write bursts +system.physmem.perBankWrBursts::2 7022 # Per bank write bursts +system.physmem.perBankWrBursts::3 7170 # Per bank write bursts +system.physmem.perBankWrBursts::4 7417 # Per bank write bursts +system.physmem.perBankWrBursts::5 7181 # Per bank write bursts +system.physmem.perBankWrBursts::6 7437 # Per bank write bursts +system.physmem.perBankWrBursts::7 7180 # Per bank write bursts +system.physmem.perBankWrBursts::8 7616 # Per bank write bursts +system.physmem.perBankWrBursts::9 7218 # Per bank write bursts +system.physmem.perBankWrBursts::10 7106 # Per bank write bursts +system.physmem.perBankWrBursts::11 6658 # Per bank write bursts +system.physmem.perBankWrBursts::12 6803 # Per bank write bursts +system.physmem.perBankWrBursts::13 7016 # Per bank write bursts +system.physmem.perBankWrBursts::14 7092 # Per bank write bursts +system.physmem.perBankWrBursts::15 6821 # Per bank write bursts system.physmem.numRdRetry 0 # Number of times read queue was full causing retry system.physmem.numWrRetry 0 # Number of times write queue was full causing retry -system.physmem.totGap 1195787534500 # Total gap between requests +system.physmem.totGap 1196129800000 # Total gap between requests system.physmem.readPktSize::0 0 # Read request sizes (log2) system.physmem.readPktSize::1 0 # Read request sizes (log2) system.physmem.readPktSize::2 6825 # Read request sizes (log2) system.physmem.readPktSize::3 6488064 # Read request sizes (log2) system.physmem.readPktSize::4 0 # Read request sizes (log2) system.physmem.readPktSize::5 0 # Read request sizes (log2) -system.physmem.readPktSize::6 159740 # Read request sizes (log2) +system.physmem.readPktSize::6 159593 # Read request sizes (log2) system.physmem.writePktSize::0 0 # Write request sizes (log2) system.physmem.writePktSize::1 0 # Write request sizes (log2) system.physmem.writePktSize::2 756836 # Write request sizes (log2) system.physmem.writePktSize::3 0 # Write request sizes (log2) system.physmem.writePktSize::4 0 # Write request sizes (log2) system.physmem.writePktSize::5 0 # Write request sizes (log2) -system.physmem.writePktSize::6 64627 # Write request sizes (log2) -system.physmem.rdQLenPdf::0 636769 # What read queue length does an incoming req see -system.physmem.rdQLenPdf::1 483388 # What read queue length does an incoming req see -system.physmem.rdQLenPdf::2 484627 # What read queue length does an incoming req see -system.physmem.rdQLenPdf::3 1579502 # What read queue length does an incoming req see -system.physmem.rdQLenPdf::4 1123930 # What read queue length does an incoming req see -system.physmem.rdQLenPdf::5 1118197 # What read queue length does an incoming req see -system.physmem.rdQLenPdf::6 1114450 # What read queue length does an incoming req see -system.physmem.rdQLenPdf::7 25137 # What read queue length does an incoming req see -system.physmem.rdQLenPdf::8 24391 # What read queue length does an incoming req see -system.physmem.rdQLenPdf::9 9450 # What read queue length does an incoming req see -system.physmem.rdQLenPdf::10 9387 # What read queue length does an incoming req see -system.physmem.rdQLenPdf::11 9266 # What read queue length does an incoming req see -system.physmem.rdQLenPdf::12 8971 # What read queue length does an incoming req see -system.physmem.rdQLenPdf::13 8900 # What read queue length does an incoming req see -system.physmem.rdQLenPdf::14 8855 # What read queue length does an incoming req see -system.physmem.rdQLenPdf::15 8823 # What read queue length does an incoming req see -system.physmem.rdQLenPdf::16 219 # What read queue length does an incoming req see -system.physmem.rdQLenPdf::17 11 # What read queue length does an incoming req see +system.physmem.writePktSize::6 64262 # Write request sizes (log2) +system.physmem.rdQLenPdf::0 634838 # What read queue length does an incoming req see +system.physmem.rdQLenPdf::1 481612 # What read queue length does an incoming req see +system.physmem.rdQLenPdf::2 482409 # What read queue length does an incoming req see +system.physmem.rdQLenPdf::3 1579414 # What read queue length does an incoming req see +system.physmem.rdQLenPdf::4 1125551 # What read queue length does an incoming req see +system.physmem.rdQLenPdf::5 1120257 # What read queue length does an incoming req see +system.physmem.rdQLenPdf::6 1116869 # What read queue length does an incoming req see +system.physmem.rdQLenPdf::7 25458 # What read queue length does an incoming req see +system.physmem.rdQLenPdf::8 24379 # What read queue length does an incoming req see +system.physmem.rdQLenPdf::9 9272 # What read queue length does an incoming req see +system.physmem.rdQLenPdf::10 9173 # What read queue length does an incoming req see +system.physmem.rdQLenPdf::11 9118 # What read queue length does an incoming req see +system.physmem.rdQLenPdf::12 8948 # What read queue length does an incoming req see +system.physmem.rdQLenPdf::13 8870 # What read queue length does an incoming req see +system.physmem.rdQLenPdf::14 8835 # What read queue length does an incoming req see +system.physmem.rdQLenPdf::15 8809 # What read queue length does an incoming req see +system.physmem.rdQLenPdf::16 205 # What read queue length does an incoming req see +system.physmem.rdQLenPdf::17 15 # What read queue length does an incoming req see system.physmem.rdQLenPdf::18 0 # What read queue length does an incoming req see system.physmem.rdQLenPdf::19 0 # What read queue length does an incoming req see system.physmem.rdQLenPdf::20 0 # What read queue length does an incoming req see @@ -159,29 +165,29 @@ system.physmem.rdQLenPdf::28 0 # Wh system.physmem.rdQLenPdf::29 0 # What read queue length does an incoming req see system.physmem.rdQLenPdf::30 0 # What read queue length does an incoming req see system.physmem.rdQLenPdf::31 0 # What read queue length does an incoming req see -system.physmem.wrQLenPdf::0 5178 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::1 5187 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::2 5182 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::3 5179 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::4 5180 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::5 5179 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::6 5179 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::7 5180 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::8 5177 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::9 5179 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::10 5177 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::11 5178 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::12 5178 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::13 5178 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::14 5179 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::15 5179 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::16 5178 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::17 5183 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::18 5180 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::19 5180 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::20 5184 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::21 5183 # What write queue length does an incoming req see -system.physmem.wrQLenPdf::22 2 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::0 5162 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::1 5164 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::2 5162 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::3 5162 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::4 5163 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::5 5161 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::6 5162 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::7 5165 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::8 5162 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::9 5162 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::10 5162 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::11 5161 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::12 5163 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::13 5162 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::14 5162 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::15 5165 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::16 5161 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::17 5165 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::18 5164 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::19 5161 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::20 5166 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::21 5162 # What write queue length does an incoming req see +system.physmem.wrQLenPdf::22 0 # What write queue length does an incoming req see system.physmem.wrQLenPdf::23 0 # What write queue length does an incoming req see system.physmem.wrQLenPdf::24 0 # What write queue length does an incoming req see system.physmem.wrQLenPdf::25 0 # What write queue length does an incoming req see @@ -191,408 +197,401 @@ system.physmem.wrQLenPdf::28 0 # Wh system.physmem.wrQLenPdf::29 0 # What write queue length does an incoming req see system.physmem.wrQLenPdf::30 0 # What write queue length does an incoming req see system.physmem.wrQLenPdf::31 0 # What write queue length does an incoming req see -system.physmem.bytesPerActivate::samples 74963 # Bytes accessed per row activation -system.physmem.bytesPerActivate::mean 5778.397556 # Bytes accessed per row activation -system.physmem.bytesPerActivate::gmean 392.859970 # Bytes accessed per row activation -system.physmem.bytesPerActivate::stdev 13041.482454 # Bytes accessed per row activation -system.physmem.bytesPerActivate::64-71 26098 34.81% 34.81% # Bytes accessed per row activation -system.physmem.bytesPerActivate::128-135 15301 20.41% 55.23% # Bytes accessed per row activation -system.physmem.bytesPerActivate::192-199 3417 4.56% 59.78% # Bytes accessed per row activation -system.physmem.bytesPerActivate::256-263 2337 3.12% 62.90% # Bytes accessed per row activation -system.physmem.bytesPerActivate::320-327 1552 2.07% 64.97% # Bytes accessed per row activation -system.physmem.bytesPerActivate::384-391 1311 1.75% 66.72% # Bytes accessed per row activation -system.physmem.bytesPerActivate::448-455 1048 1.40% 68.12% # Bytes accessed per row activation -system.physmem.bytesPerActivate::512-519 1133 1.51% 69.63% # Bytes accessed per row activation -system.physmem.bytesPerActivate::576-583 708 0.94% 70.57% # Bytes accessed per row activation -system.physmem.bytesPerActivate::640-647 576 0.77% 71.34% # Bytes accessed per row activation -system.physmem.bytesPerActivate::704-711 588 0.78% 72.13% # Bytes accessed per row activation -system.physmem.bytesPerActivate::768-775 600 0.80% 72.93% # Bytes accessed per row activation -system.physmem.bytesPerActivate::832-839 313 0.42% 73.35% # Bytes accessed per row activation -system.physmem.bytesPerActivate::896-903 304 0.41% 73.75% # Bytes accessed per row activation -system.physmem.bytesPerActivate::960-967 211 0.28% 74.03% # Bytes accessed per row activation -system.physmem.bytesPerActivate::1024-1031 484 0.65% 74.68% # Bytes accessed per row activation -system.physmem.bytesPerActivate::1088-1095 181 0.24% 74.92% # Bytes accessed per row activation -system.physmem.bytesPerActivate::1152-1159 133 0.18% 75.10% # Bytes accessed per row activation -system.physmem.bytesPerActivate::1216-1223 163 0.22% 75.31% # Bytes accessed per row activation -system.physmem.bytesPerActivate::1280-1287 181 0.24% 75.56% # Bytes accessed per row activation -system.physmem.bytesPerActivate::1344-1351 117 0.16% 75.71% # Bytes accessed per row activation -system.physmem.bytesPerActivate::1408-1415 2275 3.03% 78.75% # Bytes accessed per row activation -system.physmem.bytesPerActivate::1472-1479 133 0.18% 78.92% # Bytes accessed per row activation -system.physmem.bytesPerActivate::1536-1543 94 0.13% 79.05% # Bytes accessed per row activation -system.physmem.bytesPerActivate::1600-1607 64 0.09% 79.14% # Bytes accessed per row activation -system.physmem.bytesPerActivate::1664-1671 60 0.08% 79.22% # Bytes accessed per row activation -system.physmem.bytesPerActivate::1728-1735 43 0.06% 79.27% # Bytes accessed per row activation -system.physmem.bytesPerActivate::1792-1799 123 0.16% 79.44% # Bytes accessed per row activation -system.physmem.bytesPerActivate::1856-1863 53 0.07% 79.51% # Bytes accessed per row activation -system.physmem.bytesPerActivate::1920-1927 29 0.04% 79.55% # Bytes accessed per row activation -system.physmem.bytesPerActivate::1984-1991 21 0.03% 79.57% # Bytes accessed per row activation -system.physmem.bytesPerActivate::2048-2055 191 0.25% 79.83% # Bytes accessed per row activation -system.physmem.bytesPerActivate::2112-2119 19 0.03% 79.85% # Bytes accessed per row activation -system.physmem.bytesPerActivate::2176-2183 18 0.02% 79.88% # Bytes accessed per row activation -system.physmem.bytesPerActivate::2240-2247 24 0.03% 79.91% # Bytes accessed per row activation -system.physmem.bytesPerActivate::2304-2311 41 0.05% 79.96% # Bytes accessed per row activation -system.physmem.bytesPerActivate::2368-2375 14 0.02% 79.98% # Bytes accessed per row activation -system.physmem.bytesPerActivate::2432-2439 25 0.03% 80.02% # Bytes accessed per row activation -system.physmem.bytesPerActivate::2496-2503 28 0.04% 80.05% # Bytes accessed per row activation -system.physmem.bytesPerActivate::2560-2567 24 0.03% 80.09% # Bytes accessed per row activation -system.physmem.bytesPerActivate::2624-2631 25 0.03% 80.12% # Bytes accessed per row activation -system.physmem.bytesPerActivate::2688-2695 17 0.02% 80.14% # Bytes accessed per row activation -system.physmem.bytesPerActivate::2752-2759 20 0.03% 80.17% # Bytes accessed per row activation -system.physmem.bytesPerActivate::2816-2823 20 0.03% 80.20% # Bytes accessed per row activation -system.physmem.bytesPerActivate::2880-2887 7 0.01% 80.20% # Bytes accessed per row activation -system.physmem.bytesPerActivate::2944-2951 20 0.03% 80.23% # Bytes accessed per row activation -system.physmem.bytesPerActivate::3008-3015 6 0.01% 80.24% # Bytes accessed per row activation -system.physmem.bytesPerActivate::3072-3079 190 0.25% 80.49% # Bytes accessed per row activation -system.physmem.bytesPerActivate::3136-3143 23 0.03% 80.52% # Bytes accessed per row activation -system.physmem.bytesPerActivate::3200-3207 8 0.01% 80.53% # Bytes accessed per row activation -system.physmem.bytesPerActivate::3264-3271 10 0.01% 80.55% # Bytes accessed per row activation -system.physmem.bytesPerActivate::3328-3335 98 0.13% 80.68% # Bytes accessed per row activation -system.physmem.bytesPerActivate::3392-3399 7 0.01% 80.69% # Bytes accessed per row activation -system.physmem.bytesPerActivate::3456-3463 7 0.01% 80.70% # Bytes accessed per row activation -system.physmem.bytesPerActivate::3520-3527 16 0.02% 80.72% # Bytes accessed per row activation -system.physmem.bytesPerActivate::3584-3591 20 0.03% 80.75% # Bytes accessed per row activation -system.physmem.bytesPerActivate::3648-3655 6 0.01% 80.75% # Bytes accessed per row activation -system.physmem.bytesPerActivate::3712-3719 20 0.03% 80.78% # Bytes accessed per row activation -system.physmem.bytesPerActivate::3776-3783 37 0.05% 80.83% # Bytes accessed per row activation -system.physmem.bytesPerActivate::3840-3847 47 0.06% 80.89% # Bytes accessed per row activation -system.physmem.bytesPerActivate::3904-3911 17 0.02% 80.91% # Bytes accessed per row activation -system.physmem.bytesPerActivate::3968-3975 8 0.01% 80.93% # Bytes accessed per row activation -system.physmem.bytesPerActivate::4032-4039 6 0.01% 80.93% # Bytes accessed per row activation -system.physmem.bytesPerActivate::4096-4103 197 0.26% 81.20% # Bytes accessed per row activation -system.physmem.bytesPerActivate::4160-4167 7 0.01% 81.21% # Bytes accessed per row activation -system.physmem.bytesPerActivate::4224-4231 10 0.01% 81.22% # Bytes accessed per row activation -system.physmem.bytesPerActivate::4288-4295 14 0.02% 81.24% # Bytes accessed per row activation -system.physmem.bytesPerActivate::4352-4359 80 0.11% 81.34% # Bytes accessed per row activation -system.physmem.bytesPerActivate::4416-4423 4 0.01% 81.35% # Bytes accessed per row activation -system.physmem.bytesPerActivate::4480-4487 14 0.02% 81.37% # Bytes accessed per row activation -system.physmem.bytesPerActivate::4544-4551 3 0.00% 81.37% # Bytes accessed per row activation -system.physmem.bytesPerActivate::4608-4615 33 0.04% 81.42% # Bytes accessed per row activation -system.physmem.bytesPerActivate::4672-4679 14 0.02% 81.43% # Bytes accessed per row activation -system.physmem.bytesPerActivate::4736-4743 3 0.00% 81.44% # Bytes accessed per row activation -system.physmem.bytesPerActivate::4800-4807 4 0.01% 81.44% # Bytes accessed per row activation -system.physmem.bytesPerActivate::4864-4871 23 0.03% 81.47% # Bytes accessed per row activation -system.physmem.bytesPerActivate::4928-4935 5 0.01% 81.48% # Bytes accessed per row activation -system.physmem.bytesPerActivate::4992-4999 7 0.01% 81.49% # Bytes accessed per row activation -system.physmem.bytesPerActivate::5056-5063 15 0.02% 81.51% # Bytes accessed per row activation -system.physmem.bytesPerActivate::5120-5127 154 0.21% 81.72% # Bytes accessed per row activation -system.physmem.bytesPerActivate::5184-5191 3 0.00% 81.72% # Bytes accessed per row activation -system.physmem.bytesPerActivate::5248-5255 14 0.02% 81.74% # Bytes accessed per row activation -system.physmem.bytesPerActivate::5312-5319 6 0.01% 81.75% # Bytes accessed per row activation -system.physmem.bytesPerActivate::5376-5383 35 0.05% 81.79% # Bytes accessed per row activation -system.physmem.bytesPerActivate::5440-5447 170 0.23% 82.02% # Bytes accessed per row activation -system.physmem.bytesPerActivate::5504-5511 59 0.08% 82.10% # Bytes accessed per row activation -system.physmem.bytesPerActivate::5632-5639 78 0.10% 82.20% # Bytes accessed per row activation -system.physmem.bytesPerActivate::5696-5703 1 0.00% 82.20% # Bytes accessed per row activation -system.physmem.bytesPerActivate::5888-5895 9 0.01% 82.22% # Bytes accessed per row activation -system.physmem.bytesPerActivate::6144-6151 89 0.12% 82.34% # Bytes accessed per row activation -system.physmem.bytesPerActivate::6272-6279 2 0.00% 82.34% # Bytes accessed per row activation -system.physmem.bytesPerActivate::6400-6407 70 0.09% 82.43% # Bytes accessed per row activation -system.physmem.bytesPerActivate::6464-6471 1 0.00% 82.43% # Bytes accessed per row activation -system.physmem.bytesPerActivate::6528-6535 2 0.00% 82.44% # Bytes accessed per row activation -system.physmem.bytesPerActivate::6592-6599 1 0.00% 82.44% # Bytes accessed per row activation -system.physmem.bytesPerActivate::6656-6663 108 0.14% 82.58% # Bytes accessed per row activation -system.physmem.bytesPerActivate::6848-6855 1 0.00% 82.58% # Bytes accessed per row activation -system.physmem.bytesPerActivate::6912-6919 17 0.02% 82.60% # Bytes accessed per row activation -system.physmem.bytesPerActivate::7040-7047 1 0.00% 82.61% # Bytes accessed per row activation -system.physmem.bytesPerActivate::7168-7175 32 0.04% 82.65% # Bytes accessed per row activation -system.physmem.bytesPerActivate::7424-7431 132 0.18% 82.82% # Bytes accessed per row activation -system.physmem.bytesPerActivate::7680-7687 28 0.04% 82.86% # Bytes accessed per row activation -system.physmem.bytesPerActivate::7936-7943 74 0.10% 82.96% # Bytes accessed per row activation -system.physmem.bytesPerActivate::8000-8007 1 0.00% 82.96% # Bytes accessed per row activation -system.physmem.bytesPerActivate::8064-8071 1 0.00% 82.96% # Bytes accessed per row activation -system.physmem.bytesPerActivate::8192-8199 29 0.04% 83.00% # Bytes accessed per row activation -system.physmem.bytesPerActivate::8384-8391 1 0.00% 83.00% # Bytes accessed per row activation -system.physmem.bytesPerActivate::8448-8455 75 0.10% 83.10% # Bytes accessed per row activation -system.physmem.bytesPerActivate::8704-8711 29 0.04% 83.14% # Bytes accessed per row activation -system.physmem.bytesPerActivate::8832-8839 2 0.00% 83.15% # Bytes accessed per row activation -system.physmem.bytesPerActivate::8960-8967 130 0.17% 83.32% # Bytes accessed per row activation -system.physmem.bytesPerActivate::9152-9159 1 0.00% 83.32% # Bytes accessed per row activation -system.physmem.bytesPerActivate::9216-9223 29 0.04% 83.36% # Bytes accessed per row activation -system.physmem.bytesPerActivate::9280-9287 1 0.00% 83.36% # Bytes accessed per row activation -system.physmem.bytesPerActivate::9472-9479 17 0.02% 83.38% # Bytes accessed per row activation -system.physmem.bytesPerActivate::9728-9735 100 0.13% 83.52% # Bytes accessed per row activation -system.physmem.bytesPerActivate::9856-9863 1 0.00% 83.52% # Bytes accessed per row activation -system.physmem.bytesPerActivate::9984-9991 74 0.10% 83.62% # Bytes accessed per row activation -system.physmem.bytesPerActivate::10240-10247 86 0.11% 83.73% # Bytes accessed per row activation -system.physmem.bytesPerActivate::10496-10503 12 0.02% 83.75% # Bytes accessed per row activation -system.physmem.bytesPerActivate::10752-10759 80 0.11% 83.85% # Bytes accessed per row activation -system.physmem.bytesPerActivate::11008-11015 29 0.04% 83.89% # Bytes accessed per row activation -system.physmem.bytesPerActivate::11136-11143 2 0.00% 83.89% # Bytes accessed per row activation -system.physmem.bytesPerActivate::11200-11207 1 0.00% 83.90% # Bytes accessed per row activation -system.physmem.bytesPerActivate::11264-11271 147 0.20% 84.09% # Bytes accessed per row activation -system.physmem.bytesPerActivate::11456-11463 1 0.00% 84.09% # Bytes accessed per row activation -system.physmem.bytesPerActivate::11520-11527 8 0.01% 84.10% # Bytes accessed per row activation -system.physmem.bytesPerActivate::11776-11783 25 0.03% 84.14% # Bytes accessed per row activation -system.physmem.bytesPerActivate::11904-11911 1 0.00% 84.14% # Bytes accessed per row activation -system.physmem.bytesPerActivate::12032-12039 77 0.10% 84.24% # Bytes accessed per row activation -system.physmem.bytesPerActivate::12288-12295 181 0.24% 84.48% # Bytes accessed per row activation -system.physmem.bytesPerActivate::12480-12487 1 0.00% 84.48% # Bytes accessed per row activation -system.physmem.bytesPerActivate::12544-12551 36 0.05% 84.53% # Bytes accessed per row activation -system.physmem.bytesPerActivate::12800-12807 17 0.02% 84.56% # Bytes accessed per row activation -system.physmem.bytesPerActivate::12864-12871 1 0.00% 84.56% # Bytes accessed per row activation -system.physmem.bytesPerActivate::13056-13063 77 0.10% 84.66% # Bytes accessed per row activation -system.physmem.bytesPerActivate::13248-13255 1 0.00% 84.66% # Bytes accessed per row activation -system.physmem.bytesPerActivate::13312-13319 164 0.22% 84.88% # Bytes accessed per row activation -system.physmem.bytesPerActivate::13568-13575 12 0.02% 84.90% # Bytes accessed per row activation -system.physmem.bytesPerActivate::13824-13831 12 0.02% 84.91% # Bytes accessed per row activation -system.physmem.bytesPerActivate::13952-13959 1 0.00% 84.91% # Bytes accessed per row activation -system.physmem.bytesPerActivate::14080-14087 27 0.04% 84.95% # Bytes accessed per row activation -system.physmem.bytesPerActivate::14144-14151 1 0.00% 84.95% # Bytes accessed per row activation -system.physmem.bytesPerActivate::14208-14215 3 0.00% 84.95% # Bytes accessed per row activation -system.physmem.bytesPerActivate::14336-14343 162 0.22% 85.17% # Bytes accessed per row activation -system.physmem.bytesPerActivate::14592-14599 88 0.12% 85.29% # Bytes accessed per row activation -system.physmem.bytesPerActivate::14656-14663 1 0.00% 85.29% # Bytes accessed per row activation -system.physmem.bytesPerActivate::14848-14855 21 0.03% 85.32% # Bytes accessed per row activation -system.physmem.bytesPerActivate::15040-15047 2 0.00% 85.32% # Bytes accessed per row activation -system.physmem.bytesPerActivate::15104-15111 28 0.04% 85.36% # Bytes accessed per row activation -system.physmem.bytesPerActivate::15232-15239 1 0.00% 85.36% # Bytes accessed per row activation -system.physmem.bytesPerActivate::15360-15367 218 0.29% 85.65% # Bytes accessed per row activation -system.physmem.bytesPerActivate::15616-15623 20 0.03% 85.68% # Bytes accessed per row activation -system.physmem.bytesPerActivate::15872-15879 17 0.02% 85.70% # Bytes accessed per row activation -system.physmem.bytesPerActivate::16000-16007 1 0.00% 85.70% # Bytes accessed per row activation -system.physmem.bytesPerActivate::16128-16135 8 0.01% 85.71% # Bytes accessed per row activation -system.physmem.bytesPerActivate::16256-16263 2 0.00% 85.71% # Bytes accessed per row activation -system.physmem.bytesPerActivate::16384-16391 272 0.36% 86.08% # Bytes accessed per row activation -system.physmem.bytesPerActivate::16640-16647 7 0.01% 86.09% # Bytes accessed per row activation -system.physmem.bytesPerActivate::16768-16775 1 0.00% 86.09% # Bytes accessed per row activation -system.physmem.bytesPerActivate::16896-16903 16 0.02% 86.11% # Bytes accessed per row activation -system.physmem.bytesPerActivate::17152-17159 23 0.03% 86.14% # Bytes accessed per row activation -system.physmem.bytesPerActivate::17280-17287 1 0.00% 86.14% # Bytes accessed per row activation -system.physmem.bytesPerActivate::17408-17415 224 0.30% 86.44% # Bytes accessed per row activation -system.physmem.bytesPerActivate::17600-17607 2 0.00% 86.44% # Bytes accessed per row activation -system.physmem.bytesPerActivate::17664-17671 24 0.03% 86.47% # Bytes accessed per row activation -system.physmem.bytesPerActivate::17920-17927 18 0.02% 86.50% # Bytes accessed per row activation -system.physmem.bytesPerActivate::18112-18119 1 0.00% 86.50% # Bytes accessed per row activation -system.physmem.bytesPerActivate::18176-18183 85 0.11% 86.61% # Bytes accessed per row activation -system.physmem.bytesPerActivate::18304-18311 1 0.00% 86.61% # Bytes accessed per row activation -system.physmem.bytesPerActivate::18432-18439 154 0.21% 86.82% # Bytes accessed per row activation -system.physmem.bytesPerActivate::18688-18695 30 0.04% 86.86% # Bytes accessed per row activation -system.physmem.bytesPerActivate::18944-18951 16 0.02% 86.88% # Bytes accessed per row activation -system.physmem.bytesPerActivate::19200-19207 19 0.03% 86.91% # Bytes accessed per row activation -system.physmem.bytesPerActivate::19328-19335 1 0.00% 86.91% # Bytes accessed per row activation -system.physmem.bytesPerActivate::19456-19463 169 0.23% 87.13% # Bytes accessed per row activation -system.physmem.bytesPerActivate::19712-19719 71 0.09% 87.23% # Bytes accessed per row activation -system.physmem.bytesPerActivate::19968-19975 12 0.02% 87.24% # Bytes accessed per row activation -system.physmem.bytesPerActivate::20224-20231 44 0.06% 87.30% # Bytes accessed per row activation -system.physmem.bytesPerActivate::20288-20295 1 0.00% 87.30% # Bytes accessed per row activation -system.physmem.bytesPerActivate::20352-20359 1 0.00% 87.30% # Bytes accessed per row activation -system.physmem.bytesPerActivate::20480-20487 167 0.22% 87.53% # Bytes accessed per row activation -system.physmem.bytesPerActivate::20608-20615 3 0.00% 87.53% # Bytes accessed per row activation -system.physmem.bytesPerActivate::20736-20743 78 0.10% 87.64% # Bytes accessed per row activation -system.physmem.bytesPerActivate::20800-20807 1 0.00% 87.64% # Bytes accessed per row activation -system.physmem.bytesPerActivate::20864-20871 1 0.00% 87.64% # Bytes accessed per row activation -system.physmem.bytesPerActivate::20992-20999 25 0.03% 87.67% # Bytes accessed per row activation -system.physmem.bytesPerActivate::21184-21191 2 0.00% 87.67% # Bytes accessed per row activation -system.physmem.bytesPerActivate::21248-21255 12 0.02% 87.69% # Bytes accessed per row activation -system.physmem.bytesPerActivate::21376-21383 1 0.00% 87.69% # Bytes accessed per row activation -system.physmem.bytesPerActivate::21504-21511 139 0.19% 87.88% # Bytes accessed per row activation -system.physmem.bytesPerActivate::21760-21767 23 0.03% 87.91% # Bytes accessed per row activation -system.physmem.bytesPerActivate::22016-22023 79 0.11% 88.01% # Bytes accessed per row activation -system.physmem.bytesPerActivate::22208-22215 1 0.00% 88.01% # Bytes accessed per row activation -system.physmem.bytesPerActivate::22272-22279 9 0.01% 88.03% # Bytes accessed per row activation -system.physmem.bytesPerActivate::22528-22535 89 0.12% 88.14% # Bytes accessed per row activation -system.physmem.bytesPerActivate::22592-22599 2 0.00% 88.15% # Bytes accessed per row activation -system.physmem.bytesPerActivate::22784-22791 71 0.09% 88.24% # Bytes accessed per row activation -system.physmem.bytesPerActivate::23040-23047 103 0.14% 88.38% # Bytes accessed per row activation -system.physmem.bytesPerActivate::23296-23303 16 0.02% 88.40% # Bytes accessed per row activation -system.physmem.bytesPerActivate::23360-23367 1 0.00% 88.40% # Bytes accessed per row activation -system.physmem.bytesPerActivate::23552-23559 26 0.03% 88.44% # Bytes accessed per row activation -system.physmem.bytesPerActivate::23680-23687 1 0.00% 88.44% # Bytes accessed per row activation -system.physmem.bytesPerActivate::23808-23815 131 0.17% 88.61% # Bytes accessed per row activation -system.physmem.bytesPerActivate::24064-24071 26 0.03% 88.65% # Bytes accessed per row activation -system.physmem.bytesPerActivate::24320-24327 72 0.10% 88.74% # Bytes accessed per row activation -system.physmem.bytesPerActivate::24384-24391 1 0.00% 88.75% # Bytes accessed per row activation -system.physmem.bytesPerActivate::24576-24583 23 0.03% 88.78% # Bytes accessed per row activation -system.physmem.bytesPerActivate::24832-24839 77 0.10% 88.88% # Bytes accessed per row activation -system.physmem.bytesPerActivate::25088-25095 26 0.03% 88.91% # Bytes accessed per row activation -system.physmem.bytesPerActivate::25216-25223 1 0.00% 88.91% # Bytes accessed per row activation -system.physmem.bytesPerActivate::25344-25351 132 0.18% 89.09% # Bytes accessed per row activation -system.physmem.bytesPerActivate::25600-25607 29 0.04% 89.13% # Bytes accessed per row activation -system.physmem.bytesPerActivate::25856-25863 15 0.02% 89.15% # Bytes accessed per row activation -system.physmem.bytesPerActivate::26112-26119 98 0.13% 89.28% # Bytes accessed per row activation -system.physmem.bytesPerActivate::26240-26247 1 0.00% 89.28% # Bytes accessed per row activation -system.physmem.bytesPerActivate::26304-26311 2 0.00% 89.28% # Bytes accessed per row activation -system.physmem.bytesPerActivate::26368-26375 75 0.10% 89.38% # Bytes accessed per row activation -system.physmem.bytesPerActivate::26432-26439 1 0.00% 89.39% # Bytes accessed per row activation -system.physmem.bytesPerActivate::26560-26567 1 0.00% 89.39% # Bytes accessed per row activation -system.physmem.bytesPerActivate::26624-26631 84 0.11% 89.50% # Bytes accessed per row activation -system.physmem.bytesPerActivate::26880-26887 13 0.02% 89.52% # Bytes accessed per row activation -system.physmem.bytesPerActivate::27136-27143 81 0.11% 89.62% # Bytes accessed per row activation -system.physmem.bytesPerActivate::27392-27399 23 0.03% 89.65% # Bytes accessed per row activation -system.physmem.bytesPerActivate::27584-27591 1 0.00% 89.66% # Bytes accessed per row activation -system.physmem.bytesPerActivate::27648-27655 143 0.19% 89.85% # Bytes accessed per row activation -system.physmem.bytesPerActivate::27840-27847 1 0.00% 89.85% # Bytes accessed per row activation -system.physmem.bytesPerActivate::27904-27911 9 0.01% 89.86% # Bytes accessed per row activation -system.physmem.bytesPerActivate::28032-28039 1 0.00% 89.86% # Bytes accessed per row activation -system.physmem.bytesPerActivate::28160-28167 25 0.03% 89.90% # Bytes accessed per row activation -system.physmem.bytesPerActivate::28288-28295 1 0.00% 89.90% # Bytes accessed per row activation -system.physmem.bytesPerActivate::28416-28423 80 0.11% 90.00% # Bytes accessed per row activation -system.physmem.bytesPerActivate::28544-28551 3 0.00% 90.01% # Bytes accessed per row activation -system.physmem.bytesPerActivate::28608-28615 1 0.00% 90.01% # Bytes accessed per row activation -system.physmem.bytesPerActivate::28672-28679 176 0.23% 90.24% # Bytes accessed per row activation -system.physmem.bytesPerActivate::28736-28743 1 0.00% 90.24% # Bytes accessed per row activation -system.physmem.bytesPerActivate::28928-28935 40 0.05% 90.30% # Bytes accessed per row activation -system.physmem.bytesPerActivate::29184-29191 16 0.02% 90.32% # Bytes accessed per row activation -system.physmem.bytesPerActivate::29440-29447 72 0.10% 90.42% # Bytes accessed per row activation -system.physmem.bytesPerActivate::29632-29639 1 0.00% 90.42% # Bytes accessed per row activation -system.physmem.bytesPerActivate::29696-29703 167 0.22% 90.64% # Bytes accessed per row activation -system.physmem.bytesPerActivate::29760-29767 1 0.00% 90.64% # Bytes accessed per row activation -system.physmem.bytesPerActivate::29952-29959 13 0.02% 90.66% # Bytes accessed per row activation -system.physmem.bytesPerActivate::30208-30215 6 0.01% 90.67% # Bytes accessed per row activation -system.physmem.bytesPerActivate::30272-30279 2 0.00% 90.67% # Bytes accessed per row activation -system.physmem.bytesPerActivate::30464-30471 29 0.04% 90.71% # Bytes accessed per row activation -system.physmem.bytesPerActivate::30592-30599 1 0.00% 90.71% # Bytes accessed per row activation -system.physmem.bytesPerActivate::30720-30727 150 0.20% 90.91% # Bytes accessed per row activation -system.physmem.bytesPerActivate::30848-30855 1 0.00% 90.91% # Bytes accessed per row activation -system.physmem.bytesPerActivate::30976-30983 84 0.11% 91.02% # Bytes accessed per row activation -system.physmem.bytesPerActivate::31232-31239 19 0.03% 91.05% # Bytes accessed per row activation -system.physmem.bytesPerActivate::31488-31495 30 0.04% 91.09% # Bytes accessed per row activation -system.physmem.bytesPerActivate::31552-31559 1 0.00% 91.09% # Bytes accessed per row activation -system.physmem.bytesPerActivate::31616-31623 1 0.00% 91.09% # Bytes accessed per row activation -system.physmem.bytesPerActivate::31744-31751 220 0.29% 91.38% # Bytes accessed per row activation -system.physmem.bytesPerActivate::31872-31879 1 0.00% 91.39% # Bytes accessed per row activation -system.physmem.bytesPerActivate::32000-32007 20 0.03% 91.41% # Bytes accessed per row activation -system.physmem.bytesPerActivate::32256-32263 17 0.02% 91.43% # Bytes accessed per row activation -system.physmem.bytesPerActivate::32512-32519 10 0.01% 91.45% # Bytes accessed per row activation -system.physmem.bytesPerActivate::32640-32647 1 0.00% 91.45% # Bytes accessed per row activation -system.physmem.bytesPerActivate::32768-32775 269 0.36% 91.81% # Bytes accessed per row activation -system.physmem.bytesPerActivate::33024-33031 14 0.02% 91.83% # Bytes accessed per row activation -system.physmem.bytesPerActivate::33216-33223 1 0.00% 91.83% # Bytes accessed per row activation -system.physmem.bytesPerActivate::33280-33287 25 0.03% 91.86% # Bytes accessed per row activation -system.physmem.bytesPerActivate::33344-33351 2 0.00% 91.86% # Bytes accessed per row activation -system.physmem.bytesPerActivate::33536-33543 22 0.03% 91.89% # Bytes accessed per row activation -system.physmem.bytesPerActivate::33600-33607 1 0.00% 91.89% # Bytes accessed per row activation -system.physmem.bytesPerActivate::33664-33671 1 0.00% 91.90% # Bytes accessed per row activation -system.physmem.bytesPerActivate::33728-33735 1 0.00% 91.90% # Bytes accessed per row activation -system.physmem.bytesPerActivate::33792-33799 216 0.29% 92.19% # Bytes accessed per row activation -system.physmem.bytesPerActivate::33920-33927 1 0.00% 92.19% # Bytes accessed per row activation -system.physmem.bytesPerActivate::34048-34055 26 0.03% 92.22% # Bytes accessed per row activation -system.physmem.bytesPerActivate::34176-34183 1 0.00% 92.22% # Bytes accessed per row activation -system.physmem.bytesPerActivate::34304-34311 19 0.03% 92.25% # Bytes accessed per row activation -system.physmem.bytesPerActivate::34560-34567 84 0.11% 92.36% # Bytes accessed per row activation -system.physmem.bytesPerActivate::34688-34695 2 0.00% 92.36% # Bytes accessed per row activation -system.physmem.bytesPerActivate::34816-34823 148 0.20% 92.56% # Bytes accessed per row activation -system.physmem.bytesPerActivate::34880-34887 1 0.00% 92.56% # Bytes accessed per row activation -system.physmem.bytesPerActivate::35072-35079 27 0.04% 92.60% # Bytes accessed per row activation -system.physmem.bytesPerActivate::35328-35335 6 0.01% 92.61% # Bytes accessed per row activation -system.physmem.bytesPerActivate::35584-35591 13 0.02% 92.62% # Bytes accessed per row activation -system.physmem.bytesPerActivate::35776-35783 1 0.00% 92.62% # Bytes accessed per row activation -system.physmem.bytesPerActivate::35840-35847 165 0.22% 92.84% # Bytes accessed per row activation -system.physmem.bytesPerActivate::35904-35911 1 0.00% 92.85% # Bytes accessed per row activation -system.physmem.bytesPerActivate::36096-36103 71 0.09% 92.94% # Bytes accessed per row activation -system.physmem.bytesPerActivate::36352-36359 14 0.02% 92.96% # Bytes accessed per row activation -system.physmem.bytesPerActivate::36480-36487 1 0.00% 92.96% # Bytes accessed per row activation -system.physmem.bytesPerActivate::36608-36615 37 0.05% 93.01% # Bytes accessed per row activation -system.physmem.bytesPerActivate::36800-36807 1 0.00% 93.01% # Bytes accessed per row activation -system.physmem.bytesPerActivate::36864-36871 169 0.23% 93.24% # Bytes accessed per row activation -system.physmem.bytesPerActivate::36928-36935 1 0.00% 93.24% # Bytes accessed per row activation -system.physmem.bytesPerActivate::36992-36999 2 0.00% 93.24% # Bytes accessed per row activation -system.physmem.bytesPerActivate::37120-37127 79 0.11% 93.35% # Bytes accessed per row activation -system.physmem.bytesPerActivate::37248-37255 1 0.00% 93.35% # Bytes accessed per row activation -system.physmem.bytesPerActivate::37376-37383 25 0.03% 93.38% # Bytes accessed per row activation -system.physmem.bytesPerActivate::37504-37511 1 0.00% 93.38% # Bytes accessed per row activation -system.physmem.bytesPerActivate::37632-37639 9 0.01% 93.39% # Bytes accessed per row activation -system.physmem.bytesPerActivate::37888-37895 142 0.19% 93.58% # Bytes accessed per row activation -system.physmem.bytesPerActivate::37952-37959 1 0.00% 93.58% # Bytes accessed per row activation -system.physmem.bytesPerActivate::38144-38151 22 0.03% 93.61% # Bytes accessed per row activation -system.physmem.bytesPerActivate::38400-38407 79 0.11% 93.72% # Bytes accessed per row activation -system.physmem.bytesPerActivate::38656-38663 11 0.01% 93.73% # Bytes accessed per row activation -system.physmem.bytesPerActivate::38912-38919 85 0.11% 93.85% # Bytes accessed per row activation -system.physmem.bytesPerActivate::38976-38983 1 0.00% 93.85% # Bytes accessed per row activation -system.physmem.bytesPerActivate::39104-39111 1 0.00% 93.85% # Bytes accessed per row activation -system.physmem.bytesPerActivate::39168-39175 73 0.10% 93.95% # Bytes accessed per row activation -system.physmem.bytesPerActivate::39232-39239 2 0.00% 93.95% # Bytes accessed per row activation -system.physmem.bytesPerActivate::39424-39431 96 0.13% 94.08% # Bytes accessed per row activation -system.physmem.bytesPerActivate::39680-39687 14 0.02% 94.10% # Bytes accessed per row activation -system.physmem.bytesPerActivate::39936-39943 27 0.04% 94.13% # Bytes accessed per row activation -system.physmem.bytesPerActivate::40192-40199 131 0.17% 94.31% # Bytes accessed per row activation -system.physmem.bytesPerActivate::40320-40327 1 0.00% 94.31% # Bytes accessed per row activation -system.physmem.bytesPerActivate::40448-40455 24 0.03% 94.34% # Bytes accessed per row activation -system.physmem.bytesPerActivate::40512-40519 1 0.00% 94.34% # Bytes accessed per row activation -system.physmem.bytesPerActivate::40704-40711 75 0.10% 94.44% # Bytes accessed per row activation -system.physmem.bytesPerActivate::40768-40775 1 0.00% 94.44% # Bytes accessed per row activation -system.physmem.bytesPerActivate::40960-40967 23 0.03% 94.47% # Bytes accessed per row activation -system.physmem.bytesPerActivate::41152-41159 1 0.00% 94.48% # Bytes accessed per row activation -system.physmem.bytesPerActivate::41216-41223 72 0.10% 94.57% # Bytes accessed per row activation -system.physmem.bytesPerActivate::41344-41351 1 0.00% 94.57% # Bytes accessed per row activation -system.physmem.bytesPerActivate::41472-41479 24 0.03% 94.61% # Bytes accessed per row activation -system.physmem.bytesPerActivate::41728-41735 130 0.17% 94.78% # Bytes accessed per row activation -system.physmem.bytesPerActivate::41984-41991 25 0.03% 94.81% # Bytes accessed per row activation -system.physmem.bytesPerActivate::42240-42247 15 0.02% 94.83% # Bytes accessed per row activation -system.physmem.bytesPerActivate::42496-42503 101 0.13% 94.97% # Bytes accessed per row activation -system.physmem.bytesPerActivate::42752-42759 70 0.09% 95.06% # Bytes accessed per row activation -system.physmem.bytesPerActivate::42944-42951 1 0.00% 95.06% # Bytes accessed per row activation -system.physmem.bytesPerActivate::43008-43015 85 0.11% 95.17% # Bytes accessed per row activation -system.physmem.bytesPerActivate::43264-43271 8 0.01% 95.19% # Bytes accessed per row activation -system.physmem.bytesPerActivate::43520-43527 78 0.10% 95.29% # Bytes accessed per row activation +system.physmem.bytesPerActivate::samples 74428 # Bytes accessed per row activation +system.physmem.bytesPerActivate::mean 5819.401301 # Bytes accessed per row activation +system.physmem.bytesPerActivate::gmean 396.636644 # Bytes accessed per row activation +system.physmem.bytesPerActivate::stdev 13081.491079 # Bytes accessed per row activation +system.physmem.bytesPerActivate::64-71 25728 34.57% 34.57% # Bytes accessed per row activation +system.physmem.bytesPerActivate::128-135 15292 20.55% 55.11% # Bytes accessed per row activation +system.physmem.bytesPerActivate::192-199 3262 4.38% 59.50% # Bytes accessed per row activation +system.physmem.bytesPerActivate::256-263 2304 3.10% 62.59% # Bytes accessed per row activation +system.physmem.bytesPerActivate::320-327 1614 2.17% 64.76% # Bytes accessed per row activation +system.physmem.bytesPerActivate::384-391 1322 1.78% 66.54% # Bytes accessed per row activation +system.physmem.bytesPerActivate::448-455 1040 1.40% 67.93% # Bytes accessed per row activation +system.physmem.bytesPerActivate::512-519 1190 1.60% 69.53% # Bytes accessed per row activation +system.physmem.bytesPerActivate::576-583 729 0.98% 70.51% # Bytes accessed per row activation +system.physmem.bytesPerActivate::640-647 570 0.77% 71.28% # Bytes accessed per row activation +system.physmem.bytesPerActivate::704-711 569 0.76% 72.04% # Bytes accessed per row activation +system.physmem.bytesPerActivate::768-775 665 0.89% 72.94% # Bytes accessed per row activation +system.physmem.bytesPerActivate::832-839 312 0.42% 73.36% # Bytes accessed per row activation +system.physmem.bytesPerActivate::896-903 285 0.38% 73.74% # Bytes accessed per row activation +system.physmem.bytesPerActivate::960-967 210 0.28% 74.02% # Bytes accessed per row activation +system.physmem.bytesPerActivate::1024-1031 384 0.52% 74.54% # Bytes accessed per row activation +system.physmem.bytesPerActivate::1088-1095 194 0.26% 74.80% # Bytes accessed per row activation +system.physmem.bytesPerActivate::1152-1159 136 0.18% 74.98% # Bytes accessed per row activation +system.physmem.bytesPerActivate::1216-1223 150 0.20% 75.18% # Bytes accessed per row activation +system.physmem.bytesPerActivate::1280-1287 155 0.21% 75.39% # Bytes accessed per row activation +system.physmem.bytesPerActivate::1344-1351 121 0.16% 75.55% # Bytes accessed per row activation +system.physmem.bytesPerActivate::1408-1415 2260 3.04% 78.59% # Bytes accessed per row activation +system.physmem.bytesPerActivate::1472-1479 133 0.18% 78.77% # Bytes accessed per row activation +system.physmem.bytesPerActivate::1536-1543 107 0.14% 78.91% # Bytes accessed per row activation +system.physmem.bytesPerActivate::1600-1607 57 0.08% 78.99% # Bytes accessed per row activation +system.physmem.bytesPerActivate::1664-1671 59 0.08% 79.07% # Bytes accessed per row activation +system.physmem.bytesPerActivate::1728-1735 48 0.06% 79.13% # Bytes accessed per row activation +system.physmem.bytesPerActivate::1792-1799 56 0.08% 79.21% # Bytes accessed per row activation +system.physmem.bytesPerActivate::1856-1863 51 0.07% 79.28% # Bytes accessed per row activation +system.physmem.bytesPerActivate::1920-1927 23 0.03% 79.31% # Bytes accessed per row activation +system.physmem.bytesPerActivate::1984-1991 16 0.02% 79.33% # Bytes accessed per row activation +system.physmem.bytesPerActivate::2048-2055 212 0.28% 79.61% # Bytes accessed per row activation +system.physmem.bytesPerActivate::2112-2119 17 0.02% 79.64% # Bytes accessed per row activation +system.physmem.bytesPerActivate::2176-2183 23 0.03% 79.67% # Bytes accessed per row activation +system.physmem.bytesPerActivate::2240-2247 26 0.03% 79.70% # Bytes accessed per row activation +system.physmem.bytesPerActivate::2304-2311 105 0.14% 79.84% # Bytes accessed per row activation +system.physmem.bytesPerActivate::2368-2375 15 0.02% 79.86% # Bytes accessed per row activation +system.physmem.bytesPerActivate::2432-2439 24 0.03% 79.89% # Bytes accessed per row activation +system.physmem.bytesPerActivate::2496-2503 24 0.03% 79.93% # Bytes accessed per row activation +system.physmem.bytesPerActivate::2560-2567 92 0.12% 80.05% # Bytes accessed per row activation +system.physmem.bytesPerActivate::2624-2631 21 0.03% 80.08% # Bytes accessed per row activation +system.physmem.bytesPerActivate::2688-2695 14 0.02% 80.10% # Bytes accessed per row activation +system.physmem.bytesPerActivate::2752-2759 18 0.02% 80.12% # Bytes accessed per row activation +system.physmem.bytesPerActivate::2816-2823 21 0.03% 80.15% # Bytes accessed per row activation +system.physmem.bytesPerActivate::2880-2887 12 0.02% 80.17% # Bytes accessed per row activation +system.physmem.bytesPerActivate::2944-2951 19 0.03% 80.19% # Bytes accessed per row activation +system.physmem.bytesPerActivate::3008-3015 7 0.01% 80.20% # Bytes accessed per row activation +system.physmem.bytesPerActivate::3072-3079 116 0.16% 80.36% # Bytes accessed per row activation +system.physmem.bytesPerActivate::3136-3143 25 0.03% 80.39% # Bytes accessed per row activation +system.physmem.bytesPerActivate::3200-3207 8 0.01% 80.40% # Bytes accessed per row activation +system.physmem.bytesPerActivate::3264-3271 13 0.02% 80.42% # Bytes accessed per row activation +system.physmem.bytesPerActivate::3328-3335 99 0.13% 80.55% # Bytes accessed per row activation +system.physmem.bytesPerActivate::3392-3399 6 0.01% 80.56% # Bytes accessed per row activation +system.physmem.bytesPerActivate::3456-3463 8 0.01% 80.57% # Bytes accessed per row activation +system.physmem.bytesPerActivate::3520-3527 17 0.02% 80.59% # Bytes accessed per row activation +system.physmem.bytesPerActivate::3584-3591 23 0.03% 80.62% # Bytes accessed per row activation +system.physmem.bytesPerActivate::3648-3655 8 0.01% 80.63% # Bytes accessed per row activation +system.physmem.bytesPerActivate::3712-3719 15 0.02% 80.66% # Bytes accessed per row activation +system.physmem.bytesPerActivate::3776-3783 32 0.04% 80.70% # Bytes accessed per row activation +system.physmem.bytesPerActivate::3840-3847 26 0.03% 80.73% # Bytes accessed per row activation +system.physmem.bytesPerActivate::3904-3911 18 0.02% 80.76% # Bytes accessed per row activation +system.physmem.bytesPerActivate::3968-3975 6 0.01% 80.77% # Bytes accessed per row activation +system.physmem.bytesPerActivate::4032-4039 9 0.01% 80.78% # Bytes accessed per row activation +system.physmem.bytesPerActivate::4096-4103 126 0.17% 80.95% # Bytes accessed per row activation +system.physmem.bytesPerActivate::4160-4167 5 0.01% 80.95% # Bytes accessed per row activation +system.physmem.bytesPerActivate::4224-4231 7 0.01% 80.96% # Bytes accessed per row activation +system.physmem.bytesPerActivate::4288-4295 15 0.02% 80.98% # Bytes accessed per row activation +system.physmem.bytesPerActivate::4352-4359 80 0.11% 81.09% # Bytes accessed per row activation +system.physmem.bytesPerActivate::4416-4423 4 0.01% 81.10% # Bytes accessed per row activation +system.physmem.bytesPerActivate::4480-4487 15 0.02% 81.12% # Bytes accessed per row activation +system.physmem.bytesPerActivate::4544-4551 3 0.00% 81.12% # Bytes accessed per row activation +system.physmem.bytesPerActivate::4608-4615 38 0.05% 81.17% # Bytes accessed per row activation +system.physmem.bytesPerActivate::4672-4679 14 0.02% 81.19% # Bytes accessed per row activation +system.physmem.bytesPerActivate::4736-4743 3 0.00% 81.19% # Bytes accessed per row activation +system.physmem.bytesPerActivate::4800-4807 3 0.00% 81.20% # Bytes accessed per row activation +system.physmem.bytesPerActivate::4864-4871 85 0.11% 81.31% # Bytes accessed per row activation +system.physmem.bytesPerActivate::4928-4935 8 0.01% 81.32% # Bytes accessed per row activation +system.physmem.bytesPerActivate::4992-4999 9 0.01% 81.33% # Bytes accessed per row activation +system.physmem.bytesPerActivate::5056-5063 16 0.02% 81.36% # Bytes accessed per row activation +system.physmem.bytesPerActivate::5120-5127 156 0.21% 81.57% # Bytes accessed per row activation +system.physmem.bytesPerActivate::5184-5191 3 0.00% 81.57% # Bytes accessed per row activation +system.physmem.bytesPerActivate::5248-5255 13 0.02% 81.59% # Bytes accessed per row activation +system.physmem.bytesPerActivate::5312-5319 7 0.01% 81.60% # Bytes accessed per row activation +system.physmem.bytesPerActivate::5376-5383 15 0.02% 81.62% # Bytes accessed per row activation +system.physmem.bytesPerActivate::5440-5447 169 0.23% 81.84% # Bytes accessed per row activation +system.physmem.bytesPerActivate::5504-5511 59 0.08% 81.92% # Bytes accessed per row activation +system.physmem.bytesPerActivate::5568-5575 1 0.00% 81.92% # Bytes accessed per row activation +system.physmem.bytesPerActivate::5632-5639 88 0.12% 82.04% # Bytes accessed per row activation +system.physmem.bytesPerActivate::5696-5703 1 0.00% 82.04% # Bytes accessed per row activation +system.physmem.bytesPerActivate::5888-5895 23 0.03% 82.08% # Bytes accessed per row activation +system.physmem.bytesPerActivate::5952-5959 1 0.00% 82.08% # Bytes accessed per row activation +system.physmem.bytesPerActivate::6144-6151 97 0.13% 82.21% # Bytes accessed per row activation +system.physmem.bytesPerActivate::6400-6407 26 0.03% 82.24% # Bytes accessed per row activation +system.physmem.bytesPerActivate::6528-6535 2 0.00% 82.24% # Bytes accessed per row activation +system.physmem.bytesPerActivate::6592-6599 1 0.00% 82.25% # Bytes accessed per row activation +system.physmem.bytesPerActivate::6656-6663 83 0.11% 82.36% # Bytes accessed per row activation +system.physmem.bytesPerActivate::6784-6791 1 0.00% 82.36% # Bytes accessed per row activation +system.physmem.bytesPerActivate::6912-6919 16 0.02% 82.38% # Bytes accessed per row activation +system.physmem.bytesPerActivate::7104-7111 1 0.00% 82.38% # Bytes accessed per row activation +system.physmem.bytesPerActivate::7168-7175 158 0.21% 82.59% # Bytes accessed per row activation +system.physmem.bytesPerActivate::7296-7303 1 0.00% 82.60% # Bytes accessed per row activation +system.physmem.bytesPerActivate::7424-7431 74 0.10% 82.69% # Bytes accessed per row activation +system.physmem.bytesPerActivate::7680-7687 17 0.02% 82.72% # Bytes accessed per row activation +system.physmem.bytesPerActivate::7936-7943 18 0.02% 82.74% # Bytes accessed per row activation +system.physmem.bytesPerActivate::8192-8199 158 0.21% 82.95% # Bytes accessed per row activation +system.physmem.bytesPerActivate::8256-8263 1 0.00% 82.96% # Bytes accessed per row activation +system.physmem.bytesPerActivate::8448-8455 22 0.03% 82.98% # Bytes accessed per row activation +system.physmem.bytesPerActivate::8576-8583 1 0.00% 82.99% # Bytes accessed per row activation +system.physmem.bytesPerActivate::8704-8711 18 0.02% 83.01% # Bytes accessed per row activation +system.physmem.bytesPerActivate::8960-8967 80 0.11% 83.12% # Bytes accessed per row activation +system.physmem.bytesPerActivate::9216-9223 161 0.22% 83.33% # Bytes accessed per row activation +system.physmem.bytesPerActivate::9472-9479 14 0.02% 83.35% # Bytes accessed per row activation +system.physmem.bytesPerActivate::9600-9607 1 0.00% 83.35% # Bytes accessed per row activation +system.physmem.bytesPerActivate::9728-9735 83 0.11% 83.47% # Bytes accessed per row activation +system.physmem.bytesPerActivate::9984-9991 26 0.03% 83.50% # Bytes accessed per row activation +system.physmem.bytesPerActivate::10240-10247 95 0.13% 83.63% # Bytes accessed per row activation +system.physmem.bytesPerActivate::10496-10503 23 0.03% 83.66% # Bytes accessed per row activation +system.physmem.bytesPerActivate::10752-10759 85 0.11% 83.77% # Bytes accessed per row activation +system.physmem.bytesPerActivate::10880-10887 1 0.00% 83.77% # Bytes accessed per row activation +system.physmem.bytesPerActivate::11008-11015 14 0.02% 83.79% # Bytes accessed per row activation +system.physmem.bytesPerActivate::11072-11079 2 0.00% 83.80% # Bytes accessed per row activation +system.physmem.bytesPerActivate::11136-11143 1 0.00% 83.80% # Bytes accessed per row activation +system.physmem.bytesPerActivate::11264-11271 160 0.21% 84.01% # Bytes accessed per row activation +system.physmem.bytesPerActivate::11328-11335 1 0.00% 84.01% # Bytes accessed per row activation +system.physmem.bytesPerActivate::11520-11527 71 0.10% 84.11% # Bytes accessed per row activation +system.physmem.bytesPerActivate::11584-11591 1 0.00% 84.11% # Bytes accessed per row activation +system.physmem.bytesPerActivate::11776-11783 35 0.05% 84.16% # Bytes accessed per row activation +system.physmem.bytesPerActivate::11904-11911 1 0.00% 84.16% # Bytes accessed per row activation +system.physmem.bytesPerActivate::12032-12039 73 0.10% 84.26% # Bytes accessed per row activation +system.physmem.bytesPerActivate::12224-12231 1 0.00% 84.26% # Bytes accessed per row activation +system.physmem.bytesPerActivate::12288-12295 102 0.14% 84.40% # Bytes accessed per row activation +system.physmem.bytesPerActivate::12544-12551 14 0.02% 84.41% # Bytes accessed per row activation +system.physmem.bytesPerActivate::12800-12807 14 0.02% 84.43% # Bytes accessed per row activation +system.physmem.bytesPerActivate::12864-12871 1 0.00% 84.43% # Bytes accessed per row activation +system.physmem.bytesPerActivate::13056-13063 79 0.11% 84.54% # Bytes accessed per row activation +system.physmem.bytesPerActivate::13312-13319 90 0.12% 84.66% # Bytes accessed per row activation +system.physmem.bytesPerActivate::13568-13575 6 0.01% 84.67% # Bytes accessed per row activation +system.physmem.bytesPerActivate::13824-13831 79 0.11% 84.78% # Bytes accessed per row activation +system.physmem.bytesPerActivate::13952-13959 1 0.00% 84.78% # Bytes accessed per row activation +system.physmem.bytesPerActivate::14080-14087 82 0.11% 84.89% # Bytes accessed per row activation +system.physmem.bytesPerActivate::14208-14215 1 0.00% 84.89% # Bytes accessed per row activation +system.physmem.bytesPerActivate::14336-14343 173 0.23% 85.12% # Bytes accessed per row activation +system.physmem.bytesPerActivate::14400-14407 1 0.00% 85.12% # Bytes accessed per row activation +system.physmem.bytesPerActivate::14464-14471 1 0.00% 85.12% # Bytes accessed per row activation +system.physmem.bytesPerActivate::14592-14599 28 0.04% 85.16% # Bytes accessed per row activation +system.physmem.bytesPerActivate::14848-14855 20 0.03% 85.19% # Bytes accessed per row activation +system.physmem.bytesPerActivate::14976-14983 1 0.00% 85.19% # Bytes accessed per row activation +system.physmem.bytesPerActivate::15104-15111 17 0.02% 85.21% # Bytes accessed per row activation +system.physmem.bytesPerActivate::15296-15303 1 0.00% 85.21% # Bytes accessed per row activation +system.physmem.bytesPerActivate::15360-15367 165 0.22% 85.44% # Bytes accessed per row activation +system.physmem.bytesPerActivate::15616-15623 18 0.02% 85.46% # Bytes accessed per row activation +system.physmem.bytesPerActivate::15680-15687 1 0.00% 85.46% # Bytes accessed per row activation +system.physmem.bytesPerActivate::15872-15879 85 0.11% 85.58% # Bytes accessed per row activation +system.physmem.bytesPerActivate::16000-16007 2 0.00% 85.58% # Bytes accessed per row activation +system.physmem.bytesPerActivate::16128-16135 16 0.02% 85.60% # Bytes accessed per row activation +system.physmem.bytesPerActivate::16256-16263 3 0.00% 85.60% # Bytes accessed per row activation +system.physmem.bytesPerActivate::16384-16391 274 0.37% 85.97% # Bytes accessed per row activation +system.physmem.bytesPerActivate::16640-16647 28 0.04% 86.01% # Bytes accessed per row activation +system.physmem.bytesPerActivate::16896-16903 83 0.11% 86.12% # Bytes accessed per row activation +system.physmem.bytesPerActivate::17088-17095 1 0.00% 86.12% # Bytes accessed per row activation +system.physmem.bytesPerActivate::17152-17159 19 0.03% 86.15% # Bytes accessed per row activation +system.physmem.bytesPerActivate::17216-17223 1 0.00% 86.15% # Bytes accessed per row activation +system.physmem.bytesPerActivate::17408-17415 163 0.22% 86.37% # Bytes accessed per row activation +system.physmem.bytesPerActivate::17472-17479 1 0.00% 86.37% # Bytes accessed per row activation +system.physmem.bytesPerActivate::17600-17607 1 0.00% 86.37% # Bytes accessed per row activation +system.physmem.bytesPerActivate::17664-17671 16 0.02% 86.39% # Bytes accessed per row activation +system.physmem.bytesPerActivate::17792-17799 1 0.00% 86.39% # Bytes accessed per row activation +system.physmem.bytesPerActivate::17856-17863 1 0.00% 86.39% # Bytes accessed per row activation +system.physmem.bytesPerActivate::17920-17927 18 0.02% 86.42% # Bytes accessed per row activation +system.physmem.bytesPerActivate::18048-18055 1 0.00% 86.42% # Bytes accessed per row activation +system.physmem.bytesPerActivate::18176-18183 25 0.03% 86.45% # Bytes accessed per row activation +system.physmem.bytesPerActivate::18304-18311 2 0.00% 86.46% # Bytes accessed per row activation +system.physmem.bytesPerActivate::18432-18439 168 0.23% 86.68% # Bytes accessed per row activation +system.physmem.bytesPerActivate::18688-18695 83 0.11% 86.79% # Bytes accessed per row activation +system.physmem.bytesPerActivate::18944-18951 80 0.11% 86.90% # Bytes accessed per row activation +system.physmem.bytesPerActivate::19200-19207 10 0.01% 86.91% # Bytes accessed per row activation +system.physmem.bytesPerActivate::19392-19399 1 0.00% 86.92% # Bytes accessed per row activation +system.physmem.bytesPerActivate::19456-19463 84 0.11% 87.03% # Bytes accessed per row activation +system.physmem.bytesPerActivate::19648-19655 1 0.00% 87.03% # Bytes accessed per row activation +system.physmem.bytesPerActivate::19712-19719 79 0.11% 87.14% # Bytes accessed per row activation +system.physmem.bytesPerActivate::19968-19975 12 0.02% 87.15% # Bytes accessed per row activation +system.physmem.bytesPerActivate::20224-20231 18 0.02% 87.18% # Bytes accessed per row activation +system.physmem.bytesPerActivate::20352-20359 1 0.00% 87.18% # Bytes accessed per row activation +system.physmem.bytesPerActivate::20416-20423 1 0.00% 87.18% # Bytes accessed per row activation +system.physmem.bytesPerActivate::20480-20487 106 0.14% 87.32% # Bytes accessed per row activation +system.physmem.bytesPerActivate::20608-20615 1 0.00% 87.32% # Bytes accessed per row activation +system.physmem.bytesPerActivate::20736-20743 76 0.10% 87.43% # Bytes accessed per row activation +system.physmem.bytesPerActivate::20928-20935 1 0.00% 87.43% # Bytes accessed per row activation +system.physmem.bytesPerActivate::20992-20999 32 0.04% 87.47% # Bytes accessed per row activation +system.physmem.bytesPerActivate::21248-21255 72 0.10% 87.57% # Bytes accessed per row activation +system.physmem.bytesPerActivate::21376-21383 1 0.00% 87.57% # Bytes accessed per row activation +system.physmem.bytesPerActivate::21440-21447 1 0.00% 87.57% # Bytes accessed per row activation +system.physmem.bytesPerActivate::21504-21511 147 0.20% 87.77% # Bytes accessed per row activation +system.physmem.bytesPerActivate::21760-21767 16 0.02% 87.79% # Bytes accessed per row activation +system.physmem.bytesPerActivate::22016-22023 88 0.12% 87.91% # Bytes accessed per row activation +system.physmem.bytesPerActivate::22144-22151 1 0.00% 87.91% # Bytes accessed per row activation +system.physmem.bytesPerActivate::22272-22279 18 0.02% 87.93% # Bytes accessed per row activation +system.physmem.bytesPerActivate::22400-22407 1 0.00% 87.93% # Bytes accessed per row activation +system.physmem.bytesPerActivate::22528-22535 92 0.12% 88.06% # Bytes accessed per row activation +system.physmem.bytesPerActivate::22592-22599 1 0.00% 88.06% # Bytes accessed per row activation +system.physmem.bytesPerActivate::22784-22791 29 0.04% 88.10% # Bytes accessed per row activation +system.physmem.bytesPerActivate::22912-22919 1 0.00% 88.10% # Bytes accessed per row activation +system.physmem.bytesPerActivate::23040-23047 83 0.11% 88.21% # Bytes accessed per row activation +system.physmem.bytesPerActivate::23296-23303 10 0.01% 88.22% # Bytes accessed per row activation +system.physmem.bytesPerActivate::23552-23559 156 0.21% 88.43% # Bytes accessed per row activation +system.physmem.bytesPerActivate::23680-23687 1 0.00% 88.43% # Bytes accessed per row activation +system.physmem.bytesPerActivate::23808-23815 72 0.10% 88.53% # Bytes accessed per row activation +system.physmem.bytesPerActivate::24064-24071 14 0.02% 88.55% # Bytes accessed per row activation +system.physmem.bytesPerActivate::24320-24327 23 0.03% 88.58% # Bytes accessed per row activation +system.physmem.bytesPerActivate::24576-24583 162 0.22% 88.80% # Bytes accessed per row activation +system.physmem.bytesPerActivate::24768-24775 2 0.00% 88.80% # Bytes accessed per row activation +system.physmem.bytesPerActivate::24832-24839 25 0.03% 88.83% # Bytes accessed per row activation +system.physmem.bytesPerActivate::24896-24903 1 0.00% 88.84% # Bytes accessed per row activation +system.physmem.bytesPerActivate::25088-25095 16 0.02% 88.86% # Bytes accessed per row activation +system.physmem.bytesPerActivate::25216-25223 1 0.00% 88.86% # Bytes accessed per row activation +system.physmem.bytesPerActivate::25280-25287 2 0.00% 88.86% # Bytes accessed per row activation +system.physmem.bytesPerActivate::25344-25351 74 0.10% 88.96% # Bytes accessed per row activation +system.physmem.bytesPerActivate::25600-25607 153 0.21% 89.17% # Bytes accessed per row activation +system.physmem.bytesPerActivate::25728-25735 1 0.00% 89.17% # Bytes accessed per row activation +system.physmem.bytesPerActivate::25856-25863 12 0.02% 89.18% # Bytes accessed per row activation +system.physmem.bytesPerActivate::26112-26119 84 0.11% 89.30% # Bytes accessed per row activation +system.physmem.bytesPerActivate::26368-26375 23 0.03% 89.33% # Bytes accessed per row activation +system.physmem.bytesPerActivate::26432-26439 2 0.00% 89.33% # Bytes accessed per row activation +system.physmem.bytesPerActivate::26496-26503 1 0.00% 89.33% # Bytes accessed per row activation +system.physmem.bytesPerActivate::26624-26631 91 0.12% 89.45% # Bytes accessed per row activation +system.physmem.bytesPerActivate::26880-26887 22 0.03% 89.48% # Bytes accessed per row activation +system.physmem.bytesPerActivate::27136-27143 88 0.12% 89.60% # Bytes accessed per row activation +system.physmem.bytesPerActivate::27392-27399 18 0.02% 89.63% # Bytes accessed per row activation +system.physmem.bytesPerActivate::27456-27463 1 0.00% 89.63% # Bytes accessed per row activation +system.physmem.bytesPerActivate::27520-27527 1 0.00% 89.63% # Bytes accessed per row activation +system.physmem.bytesPerActivate::27648-27655 143 0.19% 89.82% # Bytes accessed per row activation +system.physmem.bytesPerActivate::27904-27911 72 0.10% 89.92% # Bytes accessed per row activation +system.physmem.bytesPerActivate::28160-28167 33 0.04% 89.96% # Bytes accessed per row activation +system.physmem.bytesPerActivate::28224-28231 1 0.00% 89.96% # Bytes accessed per row activation +system.physmem.bytesPerActivate::28416-28423 78 0.10% 90.07% # Bytes accessed per row activation +system.physmem.bytesPerActivate::28672-28679 106 0.14% 90.21% # Bytes accessed per row activation +system.physmem.bytesPerActivate::28928-28935 16 0.02% 90.23% # Bytes accessed per row activation +system.physmem.bytesPerActivate::29184-29191 11 0.01% 90.25% # Bytes accessed per row activation +system.physmem.bytesPerActivate::29440-29447 79 0.11% 90.35% # Bytes accessed per row activation +system.physmem.bytesPerActivate::29568-29575 1 0.00% 90.35% # Bytes accessed per row activation +system.physmem.bytesPerActivate::29696-29703 86 0.12% 90.47% # Bytes accessed per row activation +system.physmem.bytesPerActivate::29952-29959 2 0.00% 90.47% # Bytes accessed per row activation +system.physmem.bytesPerActivate::30208-30215 79 0.11% 90.58% # Bytes accessed per row activation +system.physmem.bytesPerActivate::30464-30471 88 0.12% 90.70% # Bytes accessed per row activation +system.physmem.bytesPerActivate::30528-30535 1 0.00% 90.70% # Bytes accessed per row activation +system.physmem.bytesPerActivate::30592-30599 1 0.00% 90.70% # Bytes accessed per row activation +system.physmem.bytesPerActivate::30720-30727 164 0.22% 90.92% # Bytes accessed per row activation +system.physmem.bytesPerActivate::30848-30855 2 0.00% 90.92% # Bytes accessed per row activation +system.physmem.bytesPerActivate::30976-30983 26 0.03% 90.96% # Bytes accessed per row activation +system.physmem.bytesPerActivate::31232-31239 20 0.03% 90.98% # Bytes accessed per row activation +system.physmem.bytesPerActivate::31488-31495 12 0.02% 91.00% # Bytes accessed per row activation +system.physmem.bytesPerActivate::31744-31751 163 0.22% 91.22% # Bytes accessed per row activation +system.physmem.bytesPerActivate::32000-32007 12 0.02% 91.24% # Bytes accessed per row activation +system.physmem.bytesPerActivate::32064-32071 1 0.00% 91.24% # Bytes accessed per row activation +system.physmem.bytesPerActivate::32128-32135 1 0.00% 91.24% # Bytes accessed per row activation +system.physmem.bytesPerActivate::32256-32263 82 0.11% 91.35% # Bytes accessed per row activation +system.physmem.bytesPerActivate::32512-32519 24 0.03% 91.38% # Bytes accessed per row activation +system.physmem.bytesPerActivate::32640-32647 1 0.00% 91.38% # Bytes accessed per row activation +system.physmem.bytesPerActivate::32768-32775 278 0.37% 91.76% # Bytes accessed per row activation +system.physmem.bytesPerActivate::32896-32903 2 0.00% 91.76% # Bytes accessed per row activation +system.physmem.bytesPerActivate::33024-33031 21 0.03% 91.79% # Bytes accessed per row activation +system.physmem.bytesPerActivate::33280-33287 92 0.12% 91.91% # Bytes accessed per row activation +system.physmem.bytesPerActivate::33344-33351 3 0.00% 91.91% # Bytes accessed per row activation +system.physmem.bytesPerActivate::33472-33479 1 0.00% 91.92% # Bytes accessed per row activation +system.physmem.bytesPerActivate::33536-33543 12 0.02% 91.93% # Bytes accessed per row activation +system.physmem.bytesPerActivate::33792-33799 159 0.21% 92.15% # Bytes accessed per row activation +system.physmem.bytesPerActivate::34048-34055 13 0.02% 92.16% # Bytes accessed per row activation +system.physmem.bytesPerActivate::34304-34311 20 0.03% 92.19% # Bytes accessed per row activation +system.physmem.bytesPerActivate::34560-34567 27 0.04% 92.23% # Bytes accessed per row activation +system.physmem.bytesPerActivate::34688-34695 1 0.00% 92.23% # Bytes accessed per row activation +system.physmem.bytesPerActivate::34816-34823 160 0.21% 92.44% # Bytes accessed per row activation +system.physmem.bytesPerActivate::34944-34951 1 0.00% 92.44% # Bytes accessed per row activation +system.physmem.bytesPerActivate::35072-35079 84 0.11% 92.56% # Bytes accessed per row activation +system.physmem.bytesPerActivate::35328-35335 78 0.10% 92.66% # Bytes accessed per row activation +system.physmem.bytesPerActivate::35456-35463 1 0.00% 92.66% # Bytes accessed per row activation +system.physmem.bytesPerActivate::35584-35591 3 0.00% 92.67% # Bytes accessed per row activation +system.physmem.bytesPerActivate::35840-35847 85 0.11% 92.78% # Bytes accessed per row activation +system.physmem.bytesPerActivate::36032-36039 1 0.00% 92.78% # Bytes accessed per row activation +system.physmem.bytesPerActivate::36096-36103 78 0.10% 92.89% # Bytes accessed per row activation +system.physmem.bytesPerActivate::36352-36359 11 0.01% 92.90% # Bytes accessed per row activation +system.physmem.bytesPerActivate::36608-36615 15 0.02% 92.92% # Bytes accessed per row activation +system.physmem.bytesPerActivate::36864-36871 99 0.13% 93.06% # Bytes accessed per row activation +system.physmem.bytesPerActivate::37120-37127 73 0.10% 93.15% # Bytes accessed per row activation +system.physmem.bytesPerActivate::37312-37319 1 0.00% 93.15% # Bytes accessed per row activation +system.physmem.bytesPerActivate::37376-37383 31 0.04% 93.20% # Bytes accessed per row activation +system.physmem.bytesPerActivate::37504-37511 1 0.00% 93.20% # Bytes accessed per row activation +system.physmem.bytesPerActivate::37632-37639 71 0.10% 93.29% # Bytes accessed per row activation +system.physmem.bytesPerActivate::37888-37895 147 0.20% 93.49% # Bytes accessed per row activation +system.physmem.bytesPerActivate::38144-38151 15 0.02% 93.51% # Bytes accessed per row activation +system.physmem.bytesPerActivate::38400-38407 87 0.12% 93.63% # Bytes accessed per row activation +system.physmem.bytesPerActivate::38528-38535 1 0.00% 93.63% # Bytes accessed per row activation +system.physmem.bytesPerActivate::38656-38663 22 0.03% 93.66% # Bytes accessed per row activation +system.physmem.bytesPerActivate::38912-38919 90 0.12% 93.78% # Bytes accessed per row activation +system.physmem.bytesPerActivate::39104-39111 2 0.00% 93.78% # Bytes accessed per row activation +system.physmem.bytesPerActivate::39168-39175 24 0.03% 93.81% # Bytes accessed per row activation +system.physmem.bytesPerActivate::39424-39431 84 0.11% 93.93% # Bytes accessed per row activation +system.physmem.bytesPerActivate::39488-39495 1 0.00% 93.93% # Bytes accessed per row activation +system.physmem.bytesPerActivate::39680-39687 9 0.01% 93.94% # Bytes accessed per row activation +system.physmem.bytesPerActivate::39808-39815 2 0.00% 93.94% # Bytes accessed per row activation +system.physmem.bytesPerActivate::39936-39943 152 0.20% 94.15% # Bytes accessed per row activation +system.physmem.bytesPerActivate::40192-40199 74 0.10% 94.25% # Bytes accessed per row activation +system.physmem.bytesPerActivate::40256-40263 2 0.00% 94.25% # Bytes accessed per row activation +system.physmem.bytesPerActivate::40448-40455 14 0.02% 94.27% # Bytes accessed per row activation +system.physmem.bytesPerActivate::40640-40647 1 0.00% 94.27% # Bytes accessed per row activation +system.physmem.bytesPerActivate::40704-40711 22 0.03% 94.30% # Bytes accessed per row activation +system.physmem.bytesPerActivate::40768-40775 2 0.00% 94.30% # Bytes accessed per row activation +system.physmem.bytesPerActivate::40960-40967 160 0.21% 94.52% # Bytes accessed per row activation +system.physmem.bytesPerActivate::41216-41223 23 0.03% 94.55% # Bytes accessed per row activation +system.physmem.bytesPerActivate::41472-41479 14 0.02% 94.57% # Bytes accessed per row activation +system.physmem.bytesPerActivate::41728-41735 72 0.10% 94.66% # Bytes accessed per row activation +system.physmem.bytesPerActivate::41984-41991 153 0.21% 94.87% # Bytes accessed per row activation +system.physmem.bytesPerActivate::42112-42119 1 0.00% 94.87% # Bytes accessed per row activation +system.physmem.bytesPerActivate::42240-42247 11 0.01% 94.88% # Bytes accessed per row activation +system.physmem.bytesPerActivate::42496-42503 81 0.11% 94.99% # Bytes accessed per row activation +system.physmem.bytesPerActivate::42752-42759 25 0.03% 95.03% # Bytes accessed per row activation +system.physmem.bytesPerActivate::42944-42951 1 0.00% 95.03% # Bytes accessed per row activation +system.physmem.bytesPerActivate::43008-43015 90 0.12% 95.15% # Bytes accessed per row activation +system.physmem.bytesPerActivate::43136-43143 1 0.00% 95.15% # Bytes accessed per row activation +system.physmem.bytesPerActivate::43264-43271 18 0.02% 95.18% # Bytes accessed per row activation +system.physmem.bytesPerActivate::43392-43399 1 0.00% 95.18% # Bytes accessed per row activation +system.physmem.bytesPerActivate::43520-43527 87 0.12% 95.29% # Bytes accessed per row activation system.physmem.bytesPerActivate::43648-43655 1 0.00% 95.29% # Bytes accessed per row activation -system.physmem.bytesPerActivate::43776-43783 24 0.03% 95.32% # Bytes accessed per row activation -system.physmem.bytesPerActivate::44032-44039 140 0.19% 95.51% # Bytes accessed per row activation -system.physmem.bytesPerActivate::44288-44295 12 0.02% 95.53% # Bytes accessed per row activation -system.physmem.bytesPerActivate::44352-44359 1 0.00% 95.53% # Bytes accessed per row activation -system.physmem.bytesPerActivate::44416-44423 1 0.00% 95.53% # Bytes accessed per row activation -system.physmem.bytesPerActivate::44544-44551 23 0.03% 95.56% # Bytes accessed per row activation -system.physmem.bytesPerActivate::44800-44807 80 0.11% 95.67% # Bytes accessed per row activation -system.physmem.bytesPerActivate::44928-44935 2 0.00% 95.67% # Bytes accessed per row activation -system.physmem.bytesPerActivate::45056-45063 165 0.22% 95.89% # Bytes accessed per row activation -system.physmem.bytesPerActivate::45184-45191 1 0.00% 95.89% # Bytes accessed per row activation -system.physmem.bytesPerActivate::45248-45255 1 0.00% 95.89% # Bytes accessed per row activation -system.physmem.bytesPerActivate::45312-45319 38 0.05% 95.94% # Bytes accessed per row activation -system.physmem.bytesPerActivate::45568-45575 12 0.02% 95.96% # Bytes accessed per row activation -system.physmem.bytesPerActivate::45824-45831 71 0.09% 96.05% # Bytes accessed per row activation -system.physmem.bytesPerActivate::45952-45959 1 0.00% 96.05% # Bytes accessed per row activation -system.physmem.bytesPerActivate::46080-46087 165 0.22% 96.27% # Bytes accessed per row activation -system.physmem.bytesPerActivate::46144-46151 1 0.00% 96.28% # Bytes accessed per row activation -system.physmem.bytesPerActivate::46336-46343 15 0.02% 96.30% # Bytes accessed per row activation -system.physmem.bytesPerActivate::46592-46599 12 0.02% 96.31% # Bytes accessed per row activation -system.physmem.bytesPerActivate::46720-46727 1 0.00% 96.31% # Bytes accessed per row activation -system.physmem.bytesPerActivate::46848-46855 31 0.04% 96.35% # Bytes accessed per row activation -system.physmem.bytesPerActivate::47104-47111 151 0.20% 96.56% # Bytes accessed per row activation -system.physmem.bytesPerActivate::47168-47175 1 0.00% 96.56% # Bytes accessed per row activation -system.physmem.bytesPerActivate::47360-47367 87 0.12% 96.67% # Bytes accessed per row activation -system.physmem.bytesPerActivate::47488-47495 1 0.00% 96.67% # Bytes accessed per row activation -system.physmem.bytesPerActivate::47616-47623 21 0.03% 96.70% # Bytes accessed per row activation -system.physmem.bytesPerActivate::47744-47751 1 0.00% 96.70% # Bytes accessed per row activation -system.physmem.bytesPerActivate::47872-47879 24 0.03% 96.74% # Bytes accessed per row activation -system.physmem.bytesPerActivate::47936-47943 2 0.00% 96.74% # Bytes accessed per row activation -system.physmem.bytesPerActivate::48000-48007 1 0.00% 96.74% # Bytes accessed per row activation -system.physmem.bytesPerActivate::48128-48135 239 0.32% 97.06% # Bytes accessed per row activation -system.physmem.bytesPerActivate::48192-48199 1 0.00% 97.06% # Bytes accessed per row activation -system.physmem.bytesPerActivate::48256-48263 1 0.00% 97.06% # Bytes accessed per row activation -system.physmem.bytesPerActivate::48384-48391 36 0.05% 97.11% # Bytes accessed per row activation -system.physmem.bytesPerActivate::48640-48647 14 0.02% 97.13% # Bytes accessed per row activation -system.physmem.bytesPerActivate::48768-48775 14 0.02% 97.15% # Bytes accessed per row activation -system.physmem.bytesPerActivate::48896-48903 4 0.01% 97.15% # Bytes accessed per row activation -system.physmem.bytesPerActivate::48960-48967 5 0.01% 97.16% # Bytes accessed per row activation -system.physmem.bytesPerActivate::49024-49031 3 0.00% 97.16% # Bytes accessed per row activation -system.physmem.bytesPerActivate::49088-49095 2 0.00% 97.17% # Bytes accessed per row activation -system.physmem.bytesPerActivate::49152-49159 2125 2.83% 100.00% # Bytes accessed per row activation -system.physmem.bytesPerActivate::total 74963 # Bytes accessed per row activation -system.physmem.totQLat 159518930750 # Total ticks spent queuing -system.physmem.totMemAccLat 202571234500 # Total ticks spent from burst creation until serviced by the DRAM -system.physmem.totBusLat 33271365000 # Total ticks spent in databus transfers -system.physmem.totBankLat 9780938750 # Total ticks spent accessing banks -system.physmem.avgQLat 23972.41 # Average queueing delay per DRAM burst -system.physmem.avgBankLat 1469.87 # Average bank access latency per DRAM burst +system.physmem.bytesPerActivate::43776-43783 17 0.02% 95.32% # Bytes accessed per row activation +system.physmem.bytesPerActivate::44032-44039 146 0.20% 95.51% # Bytes accessed per row activation +system.physmem.bytesPerActivate::44096-44103 1 0.00% 95.52% # Bytes accessed per row activation +system.physmem.bytesPerActivate::44288-44295 71 0.10% 95.61% # Bytes accessed per row activation +system.physmem.bytesPerActivate::44416-44423 1 0.00% 95.61% # Bytes accessed per row activation +system.physmem.bytesPerActivate::44544-44551 32 0.04% 95.65% # Bytes accessed per row activation +system.physmem.bytesPerActivate::44608-44615 1 0.00% 95.66% # Bytes accessed per row activation +system.physmem.bytesPerActivate::44800-44807 75 0.10% 95.76% # Bytes accessed per row activation +system.physmem.bytesPerActivate::45056-45063 96 0.13% 95.89% # Bytes accessed per row activation +system.physmem.bytesPerActivate::45120-45127 1 0.00% 95.89% # Bytes accessed per row activation +system.physmem.bytesPerActivate::45312-45319 16 0.02% 95.91% # Bytes accessed per row activation +system.physmem.bytesPerActivate::45568-45575 9 0.01% 95.92% # Bytes accessed per row activation +system.physmem.bytesPerActivate::45824-45831 79 0.11% 96.03% # Bytes accessed per row activation +system.physmem.bytesPerActivate::45888-45895 1 0.00% 96.03% # Bytes accessed per row activation +system.physmem.bytesPerActivate::46080-46087 86 0.12% 96.14% # Bytes accessed per row activation +system.physmem.bytesPerActivate::46336-46343 3 0.00% 96.15% # Bytes accessed per row activation +system.physmem.bytesPerActivate::46592-46599 79 0.11% 96.25% # Bytes accessed per row activation +system.physmem.bytesPerActivate::46720-46727 1 0.00% 96.26% # Bytes accessed per row activation +system.physmem.bytesPerActivate::46784-46791 1 0.00% 96.26% # Bytes accessed per row activation +system.physmem.bytesPerActivate::46848-46855 84 0.11% 96.37% # Bytes accessed per row activation +system.physmem.bytesPerActivate::47040-47047 1 0.00% 96.37% # Bytes accessed per row activation +system.physmem.bytesPerActivate::47104-47111 165 0.22% 96.59% # Bytes accessed per row activation +system.physmem.bytesPerActivate::47360-47367 25 0.03% 96.63% # Bytes accessed per row activation +system.physmem.bytesPerActivate::47424-47431 1 0.00% 96.63% # Bytes accessed per row activation +system.physmem.bytesPerActivate::47488-47495 1 0.00% 96.63% # Bytes accessed per row activation +system.physmem.bytesPerActivate::47616-47623 19 0.03% 96.65% # Bytes accessed per row activation +system.physmem.bytesPerActivate::47680-47687 1 0.00% 96.66% # Bytes accessed per row activation +system.physmem.bytesPerActivate::47744-47751 1 0.00% 96.66% # Bytes accessed per row activation +system.physmem.bytesPerActivate::47872-47879 18 0.02% 96.68% # Bytes accessed per row activation +system.physmem.bytesPerActivate::47936-47943 2 0.00% 96.68% # Bytes accessed per row activation +system.physmem.bytesPerActivate::48128-48135 184 0.25% 96.93% # Bytes accessed per row activation +system.physmem.bytesPerActivate::48192-48199 1 0.00% 96.93% # Bytes accessed per row activation +system.physmem.bytesPerActivate::48256-48263 2 0.00% 96.94% # Bytes accessed per row activation +system.physmem.bytesPerActivate::48320-48327 3 0.00% 96.94% # Bytes accessed per row activation +system.physmem.bytesPerActivate::48384-48391 42 0.06% 97.00% # Bytes accessed per row activation +system.physmem.bytesPerActivate::48448-48455 1 0.00% 97.00% # Bytes accessed per row activation +system.physmem.bytesPerActivate::48640-48647 81 0.11% 97.11% # Bytes accessed per row activation +system.physmem.bytesPerActivate::48768-48775 11 0.01% 97.12% # Bytes accessed per row activation +system.physmem.bytesPerActivate::48896-48903 14 0.02% 97.14% # Bytes accessed per row activation +system.physmem.bytesPerActivate::48960-48967 7 0.01% 97.15% # Bytes accessed per row activation +system.physmem.bytesPerActivate::49024-49031 10 0.01% 97.16% # Bytes accessed per row activation +system.physmem.bytesPerActivate::49088-49095 7 0.01% 97.17% # Bytes accessed per row activation +system.physmem.bytesPerActivate::49152-49159 2105 2.83% 100.00% # Bytes accessed per row activation +system.physmem.bytesPerActivate::total 74428 # Bytes accessed per row activation +system.physmem.totQLat 159442536500 # Total ticks spent queuing +system.physmem.totMemAccLat 202459287750 # Total ticks spent from burst creation until serviced by the DRAM +system.physmem.totBusLat 33270160000 # Total ticks spent in databus transfers +system.physmem.totBankLat 9746591250 # Total ticks spent accessing banks +system.physmem.avgQLat 23961.79 # Average queueing delay per DRAM burst +system.physmem.avgBankLat 1464.76 # Average bank access latency per DRAM burst system.physmem.avgBusLat 5000.00 # Average bus latency per DRAM burst -system.physmem.avgMemAccLat 30442.28 # Average memory access latency per DRAM burst -system.physmem.avgRdBW 356.14 # Average DRAM read bandwidth in MiByte/s -system.physmem.avgWrBW 6.10 # Average achieved write bandwidth in MiByte/s -system.physmem.avgRdBWSys 51.98 # Average system read bandwidth in MiByte/s -system.physmem.avgWrBWSys 5.99 # Average system write bandwidth in MiByte/s +system.physmem.avgMemAccLat 30426.56 # Average memory access latency per DRAM burst +system.physmem.avgRdBW 356.03 # Average DRAM read bandwidth in MiByte/s +system.physmem.avgWrBW 6.08 # Average achieved write bandwidth in MiByte/s +system.physmem.avgRdBWSys 51.96 # Average system read bandwidth in MiByte/s +system.physmem.avgWrBWSys 5.97 # Average system write bandwidth in MiByte/s system.physmem.peakBW 12800.00 # Theoretical peak bandwidth in MiByte/s system.physmem.busUtil 2.83 # Data bus utilization in percentage system.physmem.busUtilRead 2.78 # Data bus utilization in percentage for reads system.physmem.busUtilWrite 0.05 # Data bus utilization in percentage for writes system.physmem.avgRdQLen 0.17 # Average read queue length when enqueuing -system.physmem.avgWrQLen 12.12 # Average write queue length when enqueuing -system.physmem.readRowHits 6598430 # Number of row buffer hits during reads -system.physmem.writeRowHits 94836 # Number of row buffer hits during writes +system.physmem.avgWrQLen 12.52 # Average write queue length when enqueuing +system.physmem.readRowHits 6598367 # Number of row buffer hits during reads +system.physmem.writeRowHits 94814 # Number of row buffer hits during writes system.physmem.readRowHitRate 99.16 # Row buffer hit rate for reads -system.physmem.writeRowHitRate 83.22 # Row buffer hit rate for writes -system.physmem.avgGap 159948.21 # Average gap between requests -system.physmem.pageHitRate 98.89 # Row buffer hit rate, read and write combined -system.physmem.prechargeAllPercent 4.87 # Percentage of time for which DRAM has all the banks in precharge state +system.physmem.writeRowHitRate 83.48 # Row buffer hit rate for writes +system.physmem.avgGap 160004.95 # Average gap between requests +system.physmem.pageHitRate 98.90 # Row buffer hit rate, read and write combined +system.physmem.prechargeAllPercent 4.95 # Percentage of time for which DRAM has all the banks in precharge state system.realview.nvmem.bytes_read::cpu0.inst 20 # Number of bytes read from this memory system.realview.nvmem.bytes_read::cpu1.inst 48 # Number of bytes read from this memory system.realview.nvmem.bytes_read::total 68 # Number of bytes read from this memory @@ -611,286 +610,314 @@ system.realview.nvmem.bw_inst_read::total 57 # I system.realview.nvmem.bw_total::cpu0.inst 17 # Total bandwidth to/from this memory (bytes/s) system.realview.nvmem.bw_total::cpu1.inst 40 # Total bandwidth to/from this memory (bytes/s) system.realview.nvmem.bw_total::total 57 # Total bandwidth to/from this memory (bytes/s) -system.membus.throughput 59983824 # Throughput (bytes/s) -system.membus.trans_dist::ReadReq 7703157 # Transaction distribution -system.membus.trans_dist::ReadResp 7703157 # Transaction distribution -system.membus.trans_dist::WriteReq 767205 # Transaction distribution -system.membus.trans_dist::WriteResp 767205 # Transaction distribution -system.membus.trans_dist::Writeback 64627 # Transaction distribution -system.membus.trans_dist::UpgradeReq 27746 # Transaction distribution -system.membus.trans_dist::SCUpgradeReq 16446 # Transaction distribution -system.membus.trans_dist::UpgradeResp 10661 # Transaction distribution -system.membus.trans_dist::ReadExReq 137744 # Transaction distribution -system.membus.trans_dist::ReadExResp 137297 # Transaction distribution -system.membus.pkt_count_system.l2c.mem_side::system.bridge.slave 2382570 # Packet count per connected master and slave (bytes) +system.membus.throughput 59941628 # Throughput (bytes/s) +system.membus.trans_dist::ReadReq 7703327 # Transaction distribution +system.membus.trans_dist::ReadResp 7703327 # Transaction distribution +system.membus.trans_dist::WriteReq 767563 # Transaction distribution +system.membus.trans_dist::WriteResp 767563 # Transaction distribution +system.membus.trans_dist::Writeback 64262 # Transaction distribution +system.membus.trans_dist::UpgradeReq 31362 # Transaction distribution +system.membus.trans_dist::SCUpgradeReq 17250 # Transaction distribution +system.membus.trans_dist::UpgradeResp 11807 # Transaction distribution +system.membus.trans_dist::ReadExReq 137774 # Transaction distribution +system.membus.trans_dist::ReadExResp 137331 # Transaction distribution +system.membus.pkt_count_system.l2c.mem_side::system.bridge.slave 2382556 # Packet count per connected master and slave (bytes) system.membus.pkt_count_system.l2c.mem_side::system.realview.nvmem.port 34 # Packet count per connected master and slave (bytes) -system.membus.pkt_count_system.l2c.mem_side::system.realview.gic.pio 8870 # Packet count per connected master and slave (bytes) +system.membus.pkt_count_system.l2c.mem_side::system.realview.gic.pio 10302 # Packet count per connected master and slave (bytes) system.membus.pkt_count_system.l2c.mem_side::system.realview.a9scu.pio 4 # Packet count per connected master and slave (bytes) system.membus.pkt_count_system.l2c.mem_side::system.realview.local_cpu_timer.pio 910 # Packet count per connected master and slave (bytes) -system.membus.pkt_count_system.l2c.mem_side::system.physmem.port 1966729 # Packet count per connected master and slave (bytes) -system.membus.pkt_count_system.l2c.mem_side::total 4359117 # Packet count per connected master and slave (bytes) +system.membus.pkt_count_system.l2c.mem_side::system.physmem.port 1971632 # Packet count per connected master and slave (bytes) +system.membus.pkt_count_system.l2c.mem_side::total 4365438 # Packet count per connected master and slave (bytes) system.membus.pkt_count_system.iocache.mem_side::system.physmem.port 12976128 # Packet count per connected master and slave (bytes) system.membus.pkt_count_system.iocache.mem_side::total 12976128 # Packet count per connected master and slave (bytes) -system.membus.pkt_count::total 17335245 # Packet count per connected master and slave (bytes) -system.membus.tot_pkt_size_system.l2c.mem_side::system.bridge.slave 2389894 # Cumulative packet size per connected master and slave (bytes) +system.membus.pkt_count::total 17341566 # Packet count per connected master and slave (bytes) +system.membus.tot_pkt_size_system.l2c.mem_side::system.bridge.slave 2389866 # Cumulative packet size per connected master and slave (bytes) system.membus.tot_pkt_size_system.l2c.mem_side::system.realview.nvmem.port 68 # Cumulative packet size per connected master and slave (bytes) -system.membus.tot_pkt_size_system.l2c.mem_side::system.realview.gic.pio 17740 # Cumulative packet size per connected master and slave (bytes) +system.membus.tot_pkt_size_system.l2c.mem_side::system.realview.gic.pio 20604 # Cumulative packet size per connected master and slave (bytes) system.membus.tot_pkt_size_system.l2c.mem_side::system.realview.a9scu.pio 8 # Cumulative packet size per connected master and slave (bytes) system.membus.tot_pkt_size_system.l2c.mem_side::system.realview.local_cpu_timer.pio 1820 # Cumulative packet size per connected master and slave (bytes) -system.membus.tot_pkt_size_system.l2c.mem_side::system.physmem.port 17414132 # Cumulative packet size per connected master and slave (bytes) -system.membus.tot_pkt_size_system.l2c.mem_side::total 19823662 # Cumulative packet size per connected master and slave (bytes) +system.membus.tot_pkt_size_system.l2c.mem_side::system.physmem.port 17381364 # Cumulative packet size per connected master and slave (bytes) +system.membus.tot_pkt_size_system.l2c.mem_side::total 19793730 # Cumulative packet size per connected master and slave (bytes) system.membus.tot_pkt_size_system.iocache.mem_side::system.physmem.port 51904512 # Cumulative packet size per connected master and slave (bytes) system.membus.tot_pkt_size_system.iocache.mem_side::total 51904512 # Cumulative packet size per connected master and slave (bytes) -system.membus.tot_pkt_size::total 71728174 # Cumulative packet size per connected master and slave (bytes) -system.membus.data_through_bus 71728174 # Total data (bytes) +system.membus.tot_pkt_size::total 71698242 # Cumulative packet size per connected master and slave (bytes) +system.membus.data_through_bus 71698242 # Total data (bytes) system.membus.snoop_data_through_bus 0 # Total snoop data (bytes) -system.membus.reqLayer0.occupancy 1224786000 # Layer occupancy (ticks) +system.membus.reqLayer0.occupancy 1224728000 # Layer occupancy (ticks) system.membus.reqLayer0.utilization 0.1 # Layer utilization (%) system.membus.reqLayer1.occupancy 18000 # Layer occupancy (ticks) system.membus.reqLayer1.utilization 0.0 # Layer utilization (%) -system.membus.reqLayer2.occupancy 7986500 # Layer occupancy (ticks) +system.membus.reqLayer2.occupancy 9233500 # Layer occupancy (ticks) system.membus.reqLayer2.utilization 0.0 # Layer utilization (%) system.membus.reqLayer4.occupancy 2500 # Layer occupancy (ticks) system.membus.reqLayer4.utilization 0.0 # Layer utilization (%) system.membus.reqLayer5.occupancy 782000 # Layer occupancy (ticks) system.membus.reqLayer5.utilization 0.0 # Layer utilization (%) -system.membus.reqLayer6.occupancy 9213145499 # Layer occupancy (ticks) +system.membus.reqLayer6.occupancy 9211169999 # Layer occupancy (ticks) system.membus.reqLayer6.utilization 0.8 # Layer utilization (%) -system.membus.respLayer1.occupancy 5079077969 # Layer occupancy (ticks) +system.membus.respLayer1.occupancy 5081009046 # Layer occupancy (ticks) system.membus.respLayer1.utilization 0.4 # Layer utilization (%) -system.membus.respLayer2.occupancy 14657796999 # Layer occupancy (ticks) +system.membus.respLayer2.occupancy 14657682249 # Layer occupancy (ticks) system.membus.respLayer2.utilization 1.2 # Layer utilization (%) -system.l2c.tags.replacements 69622 # number of replacements -system.l2c.tags.tagsinuse 53154.714662 # Cycle average of tags in use -system.l2c.tags.total_refs 1651251 # Total number of references to valid blocks. -system.l2c.tags.sampled_refs 134786 # Sample count of references to valid blocks. -system.l2c.tags.avg_refs 12.250909 # Average number of references to valid blocks. +system.cpu_clk_domain.clock 500 # Clock period in ticks +system.l2c.tags.replacements 69474 # number of replacements +system.l2c.tags.tagsinuse 52958.436277 # Cycle average of tags in use +system.l2c.tags.total_refs 1673866 # Total number of references to valid blocks. +system.l2c.tags.sampled_refs 134633 # Sample count of references to valid blocks. +system.l2c.tags.avg_refs 12.432806 # Average number of references to valid blocks. system.l2c.tags.warmup_cycle 0 # Cycle when the warmup percentage was hit. -system.l2c.tags.occ_blocks::writebacks 40043.388352 # Average occupied blocks per requestor -system.l2c.tags.occ_blocks::cpu0.dtb.walker 2.667642 # Average occupied blocks per requestor +system.l2c.tags.occ_blocks::writebacks 40141.137275 # Average occupied blocks per requestor +system.l2c.tags.occ_blocks::cpu0.dtb.walker 0.000411 # Average occupied blocks per requestor system.l2c.tags.occ_blocks::cpu0.itb.walker 0.001544 # Average occupied blocks per requestor -system.l2c.tags.occ_blocks::cpu0.inst 4637.694613 # Average occupied blocks per requestor -system.l2c.tags.occ_blocks::cpu0.data 5787.547519 # Average occupied blocks per requestor -system.l2c.tags.occ_blocks::cpu1.itb.walker 0.001664 # Average occupied blocks per requestor -system.l2c.tags.occ_blocks::cpu1.inst 1927.667021 # Average occupied blocks per requestor -system.l2c.tags.occ_blocks::cpu1.data 755.746308 # Average occupied blocks per requestor -system.l2c.tags.occ_percent::writebacks 0.611014 # Average percentage of cache occupancy -system.l2c.tags.occ_percent::cpu0.dtb.walker 0.000041 # Average percentage of cache occupancy +system.l2c.tags.occ_blocks::cpu0.inst 3711.443492 # Average occupied blocks per requestor +system.l2c.tags.occ_blocks::cpu0.data 4231.516476 # Average occupied blocks per requestor +system.l2c.tags.occ_blocks::cpu1.dtb.walker 2.742470 # Average occupied blocks per requestor +system.l2c.tags.occ_blocks::cpu1.itb.walker 0.001688 # Average occupied blocks per requestor +system.l2c.tags.occ_blocks::cpu1.inst 2812.646868 # Average occupied blocks per requestor +system.l2c.tags.occ_blocks::cpu1.data 2058.946054 # Average occupied blocks per requestor +system.l2c.tags.occ_percent::writebacks 0.612505 # Average percentage of cache occupancy +system.l2c.tags.occ_percent::cpu0.dtb.walker 0.000000 # Average percentage of cache occupancy system.l2c.tags.occ_percent::cpu0.itb.walker 0.000000 # Average percentage of cache occupancy -system.l2c.tags.occ_percent::cpu0.inst 0.070766 # Average percentage of cache occupancy -system.l2c.tags.occ_percent::cpu0.data 0.088311 # Average percentage of cache occupancy +system.l2c.tags.occ_percent::cpu0.inst 0.056632 # Average percentage of cache occupancy +system.l2c.tags.occ_percent::cpu0.data 0.064568 # Average percentage of cache occupancy +system.l2c.tags.occ_percent::cpu1.dtb.walker 0.000042 # Average percentage of cache occupancy system.l2c.tags.occ_percent::cpu1.itb.walker 0.000000 # Average percentage of cache occupancy -system.l2c.tags.occ_percent::cpu1.inst 0.029414 # Average percentage of cache occupancy -system.l2c.tags.occ_percent::cpu1.data 0.011532 # Average percentage of cache occupancy -system.l2c.tags.occ_percent::total 0.811077 # Average percentage of cache occupancy -system.l2c.ReadReq_hits::cpu0.dtb.walker 4526 # number of ReadReq hits -system.l2c.ReadReq_hits::cpu0.itb.walker 1443 # number of ReadReq hits -system.l2c.ReadReq_hits::cpu0.inst 483144 # number of ReadReq hits -system.l2c.ReadReq_hits::cpu0.data 241974 # number of ReadReq hits -system.l2c.ReadReq_hits::cpu1.dtb.walker 3792 # number of ReadReq hits -system.l2c.ReadReq_hits::cpu1.itb.walker 1866 # number of ReadReq hits -system.l2c.ReadReq_hits::cpu1.inst 372505 # number of ReadReq hits -system.l2c.ReadReq_hits::cpu1.data 110561 # number of ReadReq hits -system.l2c.ReadReq_hits::total 1219811 # number of ReadReq hits -system.l2c.Writeback_hits::writebacks 576138 # number of Writeback hits -system.l2c.Writeback_hits::total 576138 # number of Writeback hits -system.l2c.UpgradeReq_hits::cpu0.data 1247 # number of UpgradeReq hits -system.l2c.UpgradeReq_hits::cpu1.data 445 # number of UpgradeReq hits -system.l2c.UpgradeReq_hits::total 1692 # number of UpgradeReq hits -system.l2c.SCUpgradeReq_hits::cpu0.data 260 # number of SCUpgradeReq hits -system.l2c.SCUpgradeReq_hits::cpu1.data 101 # number of SCUpgradeReq hits -system.l2c.SCUpgradeReq_hits::total 361 # number of SCUpgradeReq hits -system.l2c.ReadExReq_hits::cpu0.data 65526 # number of ReadExReq hits -system.l2c.ReadExReq_hits::cpu1.data 45407 # number of ReadExReq hits -system.l2c.ReadExReq_hits::total 110933 # number of ReadExReq hits -system.l2c.demand_hits::cpu0.dtb.walker 4526 # number of demand (read+write) hits -system.l2c.demand_hits::cpu0.itb.walker 1443 # number of demand (read+write) hits -system.l2c.demand_hits::cpu0.inst 483144 # number of demand (read+write) hits -system.l2c.demand_hits::cpu0.data 307500 # number of demand (read+write) hits -system.l2c.demand_hits::cpu1.dtb.walker 3792 # number of demand (read+write) hits -system.l2c.demand_hits::cpu1.itb.walker 1866 # number of demand (read+write) hits -system.l2c.demand_hits::cpu1.inst 372505 # number of demand (read+write) hits -system.l2c.demand_hits::cpu1.data 155968 # number of demand (read+write) hits -system.l2c.demand_hits::total 1330744 # number of demand (read+write) hits -system.l2c.overall_hits::cpu0.dtb.walker 4526 # number of overall hits -system.l2c.overall_hits::cpu0.itb.walker 1443 # number of overall hits -system.l2c.overall_hits::cpu0.inst 483144 # number of overall hits -system.l2c.overall_hits::cpu0.data 307500 # number of overall hits -system.l2c.overall_hits::cpu1.dtb.walker 3792 # number of overall hits -system.l2c.overall_hits::cpu1.itb.walker 1866 # number of overall hits -system.l2c.overall_hits::cpu1.inst 372505 # number of overall hits -system.l2c.overall_hits::cpu1.data 155968 # number of overall hits -system.l2c.overall_hits::total 1330744 # number of overall hits -system.l2c.ReadReq_misses::cpu0.dtb.walker 4 # number of ReadReq misses +system.l2c.tags.occ_percent::cpu1.inst 0.042918 # Average percentage of cache occupancy +system.l2c.tags.occ_percent::cpu1.data 0.031417 # Average percentage of cache occupancy +system.l2c.tags.occ_percent::total 0.808082 # Average percentage of cache occupancy +system.l2c.tags.occ_task_id_blocks::1023 5 # Occupied blocks per task id +system.l2c.tags.occ_task_id_blocks::1024 65154 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1023::3 1 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1023::4 4 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::0 20 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::1 27 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::2 1929 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::3 8108 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::4 55070 # Occupied blocks per task id +system.l2c.tags.occ_task_id_percent::1023 0.000076 # Percentage of cache occupancy per task id +system.l2c.tags.occ_task_id_percent::1024 0.994171 # Percentage of cache occupancy per task id +system.l2c.tags.tag_accesses 17208079 # Number of tag accesses +system.l2c.tags.data_accesses 17208079 # Number of data accesses +system.l2c.ReadReq_hits::cpu0.dtb.walker 3830 # number of ReadReq hits +system.l2c.ReadReq_hits::cpu0.itb.walker 1752 # number of ReadReq hits +system.l2c.ReadReq_hits::cpu0.inst 419673 # number of ReadReq hits +system.l2c.ReadReq_hits::cpu0.data 205846 # number of ReadReq hits +system.l2c.ReadReq_hits::cpu1.dtb.walker 5350 # number of ReadReq hits +system.l2c.ReadReq_hits::cpu1.itb.walker 1845 # number of ReadReq hits +system.l2c.ReadReq_hits::cpu1.inst 464495 # number of ReadReq hits +system.l2c.ReadReq_hits::cpu1.data 143269 # number of ReadReq hits +system.l2c.ReadReq_hits::total 1246060 # number of ReadReq hits +system.l2c.Writeback_hits::writebacks 570845 # number of Writeback hits +system.l2c.Writeback_hits::total 570845 # number of Writeback hits +system.l2c.UpgradeReq_hits::cpu0.data 1159 # number of UpgradeReq hits +system.l2c.UpgradeReq_hits::cpu1.data 560 # number of UpgradeReq hits +system.l2c.UpgradeReq_hits::total 1719 # number of UpgradeReq hits +system.l2c.SCUpgradeReq_hits::cpu0.data 214 # number of SCUpgradeReq hits +system.l2c.SCUpgradeReq_hits::cpu1.data 100 # number of SCUpgradeReq hits +system.l2c.SCUpgradeReq_hits::total 314 # number of SCUpgradeReq hits +system.l2c.ReadExReq_hits::cpu0.data 56634 # number of ReadExReq hits +system.l2c.ReadExReq_hits::cpu1.data 52596 # number of ReadExReq hits +system.l2c.ReadExReq_hits::total 109230 # number of ReadExReq hits +system.l2c.demand_hits::cpu0.dtb.walker 3830 # number of demand (read+write) hits +system.l2c.demand_hits::cpu0.itb.walker 1752 # number of demand (read+write) hits +system.l2c.demand_hits::cpu0.inst 419673 # number of demand (read+write) hits +system.l2c.demand_hits::cpu0.data 262480 # number of demand (read+write) hits +system.l2c.demand_hits::cpu1.dtb.walker 5350 # number of demand (read+write) hits +system.l2c.demand_hits::cpu1.itb.walker 1845 # number of demand (read+write) hits +system.l2c.demand_hits::cpu1.inst 464495 # number of demand (read+write) hits +system.l2c.demand_hits::cpu1.data 195865 # number of demand (read+write) hits +system.l2c.demand_hits::total 1355290 # number of demand (read+write) hits +system.l2c.overall_hits::cpu0.dtb.walker 3830 # number of overall hits +system.l2c.overall_hits::cpu0.itb.walker 1752 # number of overall hits +system.l2c.overall_hits::cpu0.inst 419673 # number of overall hits +system.l2c.overall_hits::cpu0.data 262480 # number of overall hits +system.l2c.overall_hits::cpu1.dtb.walker 5350 # number of overall hits +system.l2c.overall_hits::cpu1.itb.walker 1845 # number of overall hits +system.l2c.overall_hits::cpu1.inst 464495 # number of overall hits +system.l2c.overall_hits::cpu1.data 195865 # number of overall hits +system.l2c.overall_hits::total 1355290 # number of overall hits +system.l2c.ReadReq_misses::cpu0.dtb.walker 1 # number of ReadReq misses system.l2c.ReadReq_misses::cpu0.itb.walker 2 # number of ReadReq misses -system.l2c.ReadReq_misses::cpu0.inst 6832 # number of ReadReq misses -system.l2c.ReadReq_misses::cpu0.data 9714 # number of ReadReq misses +system.l2c.ReadReq_misses::cpu0.inst 5733 # number of ReadReq misses +system.l2c.ReadReq_misses::cpu0.data 7847 # number of ReadReq misses +system.l2c.ReadReq_misses::cpu1.dtb.walker 4 # number of ReadReq misses system.l2c.ReadReq_misses::cpu1.itb.walker 1 # number of ReadReq misses -system.l2c.ReadReq_misses::cpu1.inst 4001 # number of ReadReq misses -system.l2c.ReadReq_misses::cpu1.data 1890 # number of ReadReq misses -system.l2c.ReadReq_misses::total 22444 # number of ReadReq misses -system.l2c.UpgradeReq_misses::cpu0.data 3977 # number of UpgradeReq misses -system.l2c.UpgradeReq_misses::cpu1.data 3384 # number of UpgradeReq misses -system.l2c.UpgradeReq_misses::total 7361 # number of UpgradeReq misses -system.l2c.SCUpgradeReq_misses::cpu0.data 388 # number of SCUpgradeReq misses -system.l2c.SCUpgradeReq_misses::cpu1.data 479 # number of SCUpgradeReq misses -system.l2c.SCUpgradeReq_misses::total 867 # number of SCUpgradeReq misses -system.l2c.ReadExReq_misses::cpu0.data 95136 # number of ReadExReq misses -system.l2c.ReadExReq_misses::cpu1.data 44594 # number of ReadExReq misses -system.l2c.ReadExReq_misses::total 139730 # number of ReadExReq misses -system.l2c.demand_misses::cpu0.dtb.walker 4 # number of demand (read+write) misses +system.l2c.ReadReq_misses::cpu1.inst 5057 # number of ReadReq misses +system.l2c.ReadReq_misses::cpu1.data 3618 # number of ReadReq misses +system.l2c.ReadReq_misses::total 22263 # number of ReadReq misses +system.l2c.UpgradeReq_misses::cpu0.data 4707 # number of UpgradeReq misses +system.l2c.UpgradeReq_misses::cpu1.data 3611 # number of UpgradeReq misses +system.l2c.UpgradeReq_misses::total 8318 # number of UpgradeReq misses +system.l2c.SCUpgradeReq_misses::cpu0.data 563 # number of SCUpgradeReq misses +system.l2c.SCUpgradeReq_misses::cpu1.data 483 # number of SCUpgradeReq misses +system.l2c.SCUpgradeReq_misses::total 1046 # number of SCUpgradeReq misses +system.l2c.ReadExReq_misses::cpu0.data 67314 # number of ReadExReq misses +system.l2c.ReadExReq_misses::cpu1.data 72460 # number of ReadExReq misses +system.l2c.ReadExReq_misses::total 139774 # number of ReadExReq misses +system.l2c.demand_misses::cpu0.dtb.walker 1 # number of demand (read+write) misses system.l2c.demand_misses::cpu0.itb.walker 2 # number of demand (read+write) misses -system.l2c.demand_misses::cpu0.inst 6832 # number of demand (read+write) misses -system.l2c.demand_misses::cpu0.data 104850 # number of demand (read+write) misses +system.l2c.demand_misses::cpu0.inst 5733 # number of demand (read+write) misses +system.l2c.demand_misses::cpu0.data 75161 # number of demand (read+write) misses +system.l2c.demand_misses::cpu1.dtb.walker 4 # number of demand (read+write) misses system.l2c.demand_misses::cpu1.itb.walker 1 # number of demand (read+write) misses -system.l2c.demand_misses::cpu1.inst 4001 # number of demand (read+write) misses -system.l2c.demand_misses::cpu1.data 46484 # number of demand (read+write) misses -system.l2c.demand_misses::total 162174 # number of demand (read+write) misses -system.l2c.overall_misses::cpu0.dtb.walker 4 # number of overall misses +system.l2c.demand_misses::cpu1.inst 5057 # number of demand (read+write) misses +system.l2c.demand_misses::cpu1.data 76078 # number of demand (read+write) misses +system.l2c.demand_misses::total 162037 # number of demand (read+write) misses +system.l2c.overall_misses::cpu0.dtb.walker 1 # number of overall misses system.l2c.overall_misses::cpu0.itb.walker 2 # number of overall misses -system.l2c.overall_misses::cpu0.inst 6832 # number of overall misses -system.l2c.overall_misses::cpu0.data 104850 # number of overall misses +system.l2c.overall_misses::cpu0.inst 5733 # number of overall misses +system.l2c.overall_misses::cpu0.data 75161 # number of overall misses +system.l2c.overall_misses::cpu1.dtb.walker 4 # number of overall misses system.l2c.overall_misses::cpu1.itb.walker 1 # number of overall misses -system.l2c.overall_misses::cpu1.inst 4001 # number of overall misses -system.l2c.overall_misses::cpu1.data 46484 # number of overall misses -system.l2c.overall_misses::total 162174 # number of overall misses -system.l2c.ReadReq_miss_latency::cpu0.dtb.walker 303000 # number of ReadReq miss cycles +system.l2c.overall_misses::cpu1.inst 5057 # number of overall misses +system.l2c.overall_misses::cpu1.data 76078 # number of overall misses +system.l2c.overall_misses::total 162037 # number of overall misses +system.l2c.ReadReq_miss_latency::cpu0.dtb.walker 32000 # number of ReadReq miss cycles system.l2c.ReadReq_miss_latency::cpu0.itb.walker 149500 # number of ReadReq miss cycles -system.l2c.ReadReq_miss_latency::cpu0.inst 496292750 # number of ReadReq miss cycles -system.l2c.ReadReq_miss_latency::cpu0.data 730648247 # number of ReadReq miss cycles +system.l2c.ReadReq_miss_latency::cpu0.inst 409552750 # number of ReadReq miss cycles +system.l2c.ReadReq_miss_latency::cpu0.data 583496999 # number of ReadReq miss cycles +system.l2c.ReadReq_miss_latency::cpu1.dtb.walker 333250 # number of ReadReq miss cycles system.l2c.ReadReq_miss_latency::cpu1.itb.walker 74500 # number of ReadReq miss cycles -system.l2c.ReadReq_miss_latency::cpu1.inst 288737750 # number of ReadReq miss cycles -system.l2c.ReadReq_miss_latency::cpu1.data 148289750 # number of ReadReq miss cycles -system.l2c.ReadReq_miss_latency::total 1664495497 # number of ReadReq miss cycles -system.l2c.UpgradeReq_miss_latency::cpu0.data 11403008 # number of UpgradeReq miss cycles -system.l2c.UpgradeReq_miss_latency::cpu1.data 12327473 # number of UpgradeReq miss cycles -system.l2c.UpgradeReq_miss_latency::total 23730481 # number of UpgradeReq miss cycles -system.l2c.SCUpgradeReq_miss_latency::cpu0.data 1953416 # number of SCUpgradeReq miss cycles -system.l2c.SCUpgradeReq_miss_latency::cpu1.data 1118452 # number of SCUpgradeReq miss cycles -system.l2c.SCUpgradeReq_miss_latency::total 3071868 # number of SCUpgradeReq miss cycles -system.l2c.ReadExReq_miss_latency::cpu0.data 6560641426 # number of ReadExReq miss cycles -system.l2c.ReadExReq_miss_latency::cpu1.data 3440385640 # number of ReadExReq miss cycles -system.l2c.ReadExReq_miss_latency::total 10001027066 # number of ReadExReq miss cycles -system.l2c.demand_miss_latency::cpu0.dtb.walker 303000 # number of demand (read+write) miss cycles +system.l2c.ReadReq_miss_latency::cpu1.inst 367800500 # number of ReadReq miss cycles +system.l2c.ReadReq_miss_latency::cpu1.data 284508500 # number of ReadReq miss cycles +system.l2c.ReadReq_miss_latency::total 1645947999 # number of ReadReq miss cycles +system.l2c.UpgradeReq_miss_latency::cpu0.data 13156432 # number of UpgradeReq miss cycles +system.l2c.UpgradeReq_miss_latency::cpu1.data 12072481 # number of UpgradeReq miss cycles +system.l2c.UpgradeReq_miss_latency::total 25228913 # number of UpgradeReq miss cycles +system.l2c.SCUpgradeReq_miss_latency::cpu0.data 1791423 # number of SCUpgradeReq miss cycles +system.l2c.SCUpgradeReq_miss_latency::cpu1.data 2509392 # number of SCUpgradeReq miss cycles +system.l2c.SCUpgradeReq_miss_latency::total 4300815 # number of SCUpgradeReq miss cycles +system.l2c.ReadExReq_miss_latency::cpu0.data 4530126409 # number of ReadExReq miss cycles +system.l2c.ReadExReq_miss_latency::cpu1.data 5459163398 # number of ReadExReq miss cycles +system.l2c.ReadExReq_miss_latency::total 9989289807 # number of ReadExReq miss cycles +system.l2c.demand_miss_latency::cpu0.dtb.walker 32000 # number of demand (read+write) miss cycles system.l2c.demand_miss_latency::cpu0.itb.walker 149500 # number of demand (read+write) miss cycles -system.l2c.demand_miss_latency::cpu0.inst 496292750 # number of demand (read+write) miss cycles -system.l2c.demand_miss_latency::cpu0.data 7291289673 # number of demand (read+write) miss cycles +system.l2c.demand_miss_latency::cpu0.inst 409552750 # number of demand (read+write) miss cycles +system.l2c.demand_miss_latency::cpu0.data 5113623408 # number of demand (read+write) miss cycles +system.l2c.demand_miss_latency::cpu1.dtb.walker 333250 # number of demand (read+write) miss cycles system.l2c.demand_miss_latency::cpu1.itb.walker 74500 # number of demand (read+write) miss cycles -system.l2c.demand_miss_latency::cpu1.inst 288737750 # number of demand (read+write) miss cycles -system.l2c.demand_miss_latency::cpu1.data 3588675390 # number of demand (read+write) miss cycles -system.l2c.demand_miss_latency::total 11665522563 # number of demand (read+write) miss cycles -system.l2c.overall_miss_latency::cpu0.dtb.walker 303000 # number of overall miss cycles +system.l2c.demand_miss_latency::cpu1.inst 367800500 # number of demand (read+write) miss cycles +system.l2c.demand_miss_latency::cpu1.data 5743671898 # number of demand (read+write) miss cycles +system.l2c.demand_miss_latency::total 11635237806 # number of demand (read+write) miss cycles +system.l2c.overall_miss_latency::cpu0.dtb.walker 32000 # number of overall miss cycles system.l2c.overall_miss_latency::cpu0.itb.walker 149500 # number of overall miss cycles -system.l2c.overall_miss_latency::cpu0.inst 496292750 # number of overall miss cycles -system.l2c.overall_miss_latency::cpu0.data 7291289673 # number of overall miss cycles +system.l2c.overall_miss_latency::cpu0.inst 409552750 # number of overall miss cycles +system.l2c.overall_miss_latency::cpu0.data 5113623408 # number of overall miss cycles +system.l2c.overall_miss_latency::cpu1.dtb.walker 333250 # number of overall miss cycles system.l2c.overall_miss_latency::cpu1.itb.walker 74500 # number of overall miss cycles -system.l2c.overall_miss_latency::cpu1.inst 288737750 # number of overall miss cycles -system.l2c.overall_miss_latency::cpu1.data 3588675390 # number of overall miss cycles -system.l2c.overall_miss_latency::total 11665522563 # number of overall miss cycles -system.l2c.ReadReq_accesses::cpu0.dtb.walker 4530 # number of ReadReq accesses(hits+misses) -system.l2c.ReadReq_accesses::cpu0.itb.walker 1445 # number of ReadReq accesses(hits+misses) -system.l2c.ReadReq_accesses::cpu0.inst 489976 # number of ReadReq accesses(hits+misses) -system.l2c.ReadReq_accesses::cpu0.data 251688 # number of ReadReq accesses(hits+misses) -system.l2c.ReadReq_accesses::cpu1.dtb.walker 3792 # number of ReadReq accesses(hits+misses) -system.l2c.ReadReq_accesses::cpu1.itb.walker 1867 # number of ReadReq accesses(hits+misses) -system.l2c.ReadReq_accesses::cpu1.inst 376506 # number of ReadReq accesses(hits+misses) -system.l2c.ReadReq_accesses::cpu1.data 112451 # number of ReadReq accesses(hits+misses) -system.l2c.ReadReq_accesses::total 1242255 # number of ReadReq accesses(hits+misses) -system.l2c.Writeback_accesses::writebacks 576138 # number of Writeback accesses(hits+misses) -system.l2c.Writeback_accesses::total 576138 # number of Writeback accesses(hits+misses) -system.l2c.UpgradeReq_accesses::cpu0.data 5224 # number of UpgradeReq accesses(hits+misses) -system.l2c.UpgradeReq_accesses::cpu1.data 3829 # number of UpgradeReq accesses(hits+misses) -system.l2c.UpgradeReq_accesses::total 9053 # number of UpgradeReq accesses(hits+misses) -system.l2c.SCUpgradeReq_accesses::cpu0.data 648 # number of SCUpgradeReq accesses(hits+misses) -system.l2c.SCUpgradeReq_accesses::cpu1.data 580 # number of SCUpgradeReq accesses(hits+misses) -system.l2c.SCUpgradeReq_accesses::total 1228 # number of SCUpgradeReq accesses(hits+misses) -system.l2c.ReadExReq_accesses::cpu0.data 160662 # number of ReadExReq accesses(hits+misses) -system.l2c.ReadExReq_accesses::cpu1.data 90001 # number of ReadExReq accesses(hits+misses) -system.l2c.ReadExReq_accesses::total 250663 # number of ReadExReq accesses(hits+misses) -system.l2c.demand_accesses::cpu0.dtb.walker 4530 # number of demand (read+write) accesses -system.l2c.demand_accesses::cpu0.itb.walker 1445 # number of demand (read+write) accesses -system.l2c.demand_accesses::cpu0.inst 489976 # number of demand (read+write) accesses -system.l2c.demand_accesses::cpu0.data 412350 # number of demand (read+write) accesses -system.l2c.demand_accesses::cpu1.dtb.walker 3792 # number of demand (read+write) accesses -system.l2c.demand_accesses::cpu1.itb.walker 1867 # number of demand (read+write) accesses -system.l2c.demand_accesses::cpu1.inst 376506 # number of demand (read+write) accesses -system.l2c.demand_accesses::cpu1.data 202452 # number of demand (read+write) accesses -system.l2c.demand_accesses::total 1492918 # number of demand (read+write) accesses -system.l2c.overall_accesses::cpu0.dtb.walker 4530 # number of overall (read+write) accesses -system.l2c.overall_accesses::cpu0.itb.walker 1445 # number of overall (read+write) accesses -system.l2c.overall_accesses::cpu0.inst 489976 # number of overall (read+write) accesses -system.l2c.overall_accesses::cpu0.data 412350 # number of overall (read+write) accesses -system.l2c.overall_accesses::cpu1.dtb.walker 3792 # number of overall (read+write) accesses -system.l2c.overall_accesses::cpu1.itb.walker 1867 # number of overall (read+write) accesses -system.l2c.overall_accesses::cpu1.inst 376506 # number of overall (read+write) accesses -system.l2c.overall_accesses::cpu1.data 202452 # number of overall (read+write) accesses -system.l2c.overall_accesses::total 1492918 # number of overall (read+write) accesses -system.l2c.ReadReq_miss_rate::cpu0.dtb.walker 0.000883 # miss rate for ReadReq accesses -system.l2c.ReadReq_miss_rate::cpu0.itb.walker 0.001384 # miss rate for ReadReq accesses -system.l2c.ReadReq_miss_rate::cpu0.inst 0.013944 # miss rate for ReadReq accesses -system.l2c.ReadReq_miss_rate::cpu0.data 0.038595 # miss rate for ReadReq accesses -system.l2c.ReadReq_miss_rate::cpu1.itb.walker 0.000536 # miss rate for ReadReq accesses -system.l2c.ReadReq_miss_rate::cpu1.inst 0.010627 # miss rate for ReadReq accesses -system.l2c.ReadReq_miss_rate::cpu1.data 0.016807 # miss rate for ReadReq accesses -system.l2c.ReadReq_miss_rate::total 0.018067 # miss rate for ReadReq accesses -system.l2c.UpgradeReq_miss_rate::cpu0.data 0.761294 # miss rate for UpgradeReq accesses -system.l2c.UpgradeReq_miss_rate::cpu1.data 0.883782 # miss rate for UpgradeReq accesses -system.l2c.UpgradeReq_miss_rate::total 0.813101 # miss rate for UpgradeReq accesses -system.l2c.SCUpgradeReq_miss_rate::cpu0.data 0.598765 # miss rate for SCUpgradeReq accesses -system.l2c.SCUpgradeReq_miss_rate::cpu1.data 0.825862 # miss rate for SCUpgradeReq accesses -system.l2c.SCUpgradeReq_miss_rate::total 0.706026 # miss rate for SCUpgradeReq accesses -system.l2c.ReadExReq_miss_rate::cpu0.data 0.592150 # miss rate for ReadExReq accesses -system.l2c.ReadExReq_miss_rate::cpu1.data 0.495483 # miss rate for ReadExReq accesses -system.l2c.ReadExReq_miss_rate::total 0.557442 # miss rate for ReadExReq accesses -system.l2c.demand_miss_rate::cpu0.dtb.walker 0.000883 # miss rate for demand accesses -system.l2c.demand_miss_rate::cpu0.itb.walker 0.001384 # miss rate for demand accesses -system.l2c.demand_miss_rate::cpu0.inst 0.013944 # miss rate for demand accesses -system.l2c.demand_miss_rate::cpu0.data 0.254274 # miss rate for demand accesses -system.l2c.demand_miss_rate::cpu1.itb.walker 0.000536 # miss rate for demand accesses -system.l2c.demand_miss_rate::cpu1.inst 0.010627 # miss rate for demand accesses -system.l2c.demand_miss_rate::cpu1.data 0.229605 # miss rate for demand accesses -system.l2c.demand_miss_rate::total 0.108629 # miss rate for demand accesses -system.l2c.overall_miss_rate::cpu0.dtb.walker 0.000883 # miss rate for overall accesses -system.l2c.overall_miss_rate::cpu0.itb.walker 0.001384 # miss rate for overall accesses -system.l2c.overall_miss_rate::cpu0.inst 0.013944 # miss rate for overall accesses -system.l2c.overall_miss_rate::cpu0.data 0.254274 # miss rate for overall accesses -system.l2c.overall_miss_rate::cpu1.itb.walker 0.000536 # miss rate for overall accesses -system.l2c.overall_miss_rate::cpu1.inst 0.010627 # miss rate for overall accesses -system.l2c.overall_miss_rate::cpu1.data 0.229605 # miss rate for overall accesses -system.l2c.overall_miss_rate::total 0.108629 # miss rate for overall accesses -system.l2c.ReadReq_avg_miss_latency::cpu0.dtb.walker 75750 # average ReadReq miss latency +system.l2c.overall_miss_latency::cpu1.inst 367800500 # number of overall miss cycles +system.l2c.overall_miss_latency::cpu1.data 5743671898 # number of overall miss cycles +system.l2c.overall_miss_latency::total 11635237806 # number of overall miss cycles +system.l2c.ReadReq_accesses::cpu0.dtb.walker 3831 # number of ReadReq accesses(hits+misses) +system.l2c.ReadReq_accesses::cpu0.itb.walker 1754 # number of ReadReq accesses(hits+misses) +system.l2c.ReadReq_accesses::cpu0.inst 425406 # number of ReadReq accesses(hits+misses) +system.l2c.ReadReq_accesses::cpu0.data 213693 # number of ReadReq accesses(hits+misses) +system.l2c.ReadReq_accesses::cpu1.dtb.walker 5354 # number of ReadReq accesses(hits+misses) +system.l2c.ReadReq_accesses::cpu1.itb.walker 1846 # number of ReadReq accesses(hits+misses) +system.l2c.ReadReq_accesses::cpu1.inst 469552 # number of ReadReq accesses(hits+misses) +system.l2c.ReadReq_accesses::cpu1.data 146887 # number of ReadReq accesses(hits+misses) +system.l2c.ReadReq_accesses::total 1268323 # number of ReadReq accesses(hits+misses) +system.l2c.Writeback_accesses::writebacks 570845 # number of Writeback accesses(hits+misses) +system.l2c.Writeback_accesses::total 570845 # number of Writeback accesses(hits+misses) +system.l2c.UpgradeReq_accesses::cpu0.data 5866 # number of UpgradeReq accesses(hits+misses) +system.l2c.UpgradeReq_accesses::cpu1.data 4171 # number of UpgradeReq accesses(hits+misses) +system.l2c.UpgradeReq_accesses::total 10037 # number of UpgradeReq accesses(hits+misses) +system.l2c.SCUpgradeReq_accesses::cpu0.data 777 # number of SCUpgradeReq accesses(hits+misses) +system.l2c.SCUpgradeReq_accesses::cpu1.data 583 # number of SCUpgradeReq accesses(hits+misses) +system.l2c.SCUpgradeReq_accesses::total 1360 # number of SCUpgradeReq accesses(hits+misses) +system.l2c.ReadExReq_accesses::cpu0.data 123948 # number of ReadExReq accesses(hits+misses) +system.l2c.ReadExReq_accesses::cpu1.data 125056 # number of ReadExReq accesses(hits+misses) +system.l2c.ReadExReq_accesses::total 249004 # number of ReadExReq accesses(hits+misses) +system.l2c.demand_accesses::cpu0.dtb.walker 3831 # number of demand (read+write) accesses +system.l2c.demand_accesses::cpu0.itb.walker 1754 # number of demand (read+write) accesses +system.l2c.demand_accesses::cpu0.inst 425406 # number of demand (read+write) accesses +system.l2c.demand_accesses::cpu0.data 337641 # number of demand (read+write) accesses +system.l2c.demand_accesses::cpu1.dtb.walker 5354 # number of demand (read+write) accesses +system.l2c.demand_accesses::cpu1.itb.walker 1846 # number of demand (read+write) accesses +system.l2c.demand_accesses::cpu1.inst 469552 # number of demand (read+write) accesses +system.l2c.demand_accesses::cpu1.data 271943 # number of demand (read+write) accesses +system.l2c.demand_accesses::total 1517327 # number of demand (read+write) accesses +system.l2c.overall_accesses::cpu0.dtb.walker 3831 # number of overall (read+write) accesses +system.l2c.overall_accesses::cpu0.itb.walker 1754 # number of overall (read+write) accesses +system.l2c.overall_accesses::cpu0.inst 425406 # number of overall (read+write) accesses +system.l2c.overall_accesses::cpu0.data 337641 # number of overall (read+write) accesses +system.l2c.overall_accesses::cpu1.dtb.walker 5354 # number of overall (read+write) accesses +system.l2c.overall_accesses::cpu1.itb.walker 1846 # number of overall (read+write) accesses +system.l2c.overall_accesses::cpu1.inst 469552 # number of overall (read+write) accesses +system.l2c.overall_accesses::cpu1.data 271943 # number of overall (read+write) accesses +system.l2c.overall_accesses::total 1517327 # number of overall (read+write) accesses +system.l2c.ReadReq_miss_rate::cpu0.dtb.walker 0.000261 # miss rate for ReadReq accesses +system.l2c.ReadReq_miss_rate::cpu0.itb.walker 0.001140 # miss rate for ReadReq accesses +system.l2c.ReadReq_miss_rate::cpu0.inst 0.013477 # miss rate for ReadReq accesses +system.l2c.ReadReq_miss_rate::cpu0.data 0.036721 # miss rate for ReadReq accesses +system.l2c.ReadReq_miss_rate::cpu1.dtb.walker 0.000747 # miss rate for ReadReq accesses +system.l2c.ReadReq_miss_rate::cpu1.itb.walker 0.000542 # miss rate for ReadReq accesses +system.l2c.ReadReq_miss_rate::cpu1.inst 0.010770 # miss rate for ReadReq accesses +system.l2c.ReadReq_miss_rate::cpu1.data 0.024631 # miss rate for ReadReq accesses +system.l2c.ReadReq_miss_rate::total 0.017553 # miss rate for ReadReq accesses +system.l2c.UpgradeReq_miss_rate::cpu0.data 0.802421 # miss rate for UpgradeReq accesses +system.l2c.UpgradeReq_miss_rate::cpu1.data 0.865740 # miss rate for UpgradeReq accesses +system.l2c.UpgradeReq_miss_rate::total 0.828734 # miss rate for UpgradeReq accesses +system.l2c.SCUpgradeReq_miss_rate::cpu0.data 0.724582 # miss rate for SCUpgradeReq accesses +system.l2c.SCUpgradeReq_miss_rate::cpu1.data 0.828473 # miss rate for SCUpgradeReq accesses +system.l2c.SCUpgradeReq_miss_rate::total 0.769118 # miss rate for SCUpgradeReq accesses +system.l2c.ReadExReq_miss_rate::cpu0.data 0.543083 # miss rate for ReadExReq accesses +system.l2c.ReadExReq_miss_rate::cpu1.data 0.579420 # miss rate for ReadExReq accesses +system.l2c.ReadExReq_miss_rate::total 0.561332 # miss rate for ReadExReq accesses +system.l2c.demand_miss_rate::cpu0.dtb.walker 0.000261 # miss rate for demand accesses +system.l2c.demand_miss_rate::cpu0.itb.walker 0.001140 # miss rate for demand accesses +system.l2c.demand_miss_rate::cpu0.inst 0.013477 # miss rate for demand accesses +system.l2c.demand_miss_rate::cpu0.data 0.222606 # miss rate for demand accesses +system.l2c.demand_miss_rate::cpu1.dtb.walker 0.000747 # miss rate for demand accesses +system.l2c.demand_miss_rate::cpu1.itb.walker 0.000542 # miss rate for demand accesses +system.l2c.demand_miss_rate::cpu1.inst 0.010770 # miss rate for demand accesses +system.l2c.demand_miss_rate::cpu1.data 0.279757 # miss rate for demand accesses +system.l2c.demand_miss_rate::total 0.106791 # miss rate for demand accesses +system.l2c.overall_miss_rate::cpu0.dtb.walker 0.000261 # miss rate for overall accesses +system.l2c.overall_miss_rate::cpu0.itb.walker 0.001140 # miss rate for overall accesses +system.l2c.overall_miss_rate::cpu0.inst 0.013477 # miss rate for overall accesses +system.l2c.overall_miss_rate::cpu0.data 0.222606 # miss rate for overall accesses +system.l2c.overall_miss_rate::cpu1.dtb.walker 0.000747 # miss rate for overall accesses +system.l2c.overall_miss_rate::cpu1.itb.walker 0.000542 # miss rate for overall accesses +system.l2c.overall_miss_rate::cpu1.inst 0.010770 # miss rate for overall accesses +system.l2c.overall_miss_rate::cpu1.data 0.279757 # miss rate for overall accesses +system.l2c.overall_miss_rate::total 0.106791 # miss rate for overall accesses +system.l2c.ReadReq_avg_miss_latency::cpu0.dtb.walker 32000 # average ReadReq miss latency system.l2c.ReadReq_avg_miss_latency::cpu0.itb.walker 74750 # average ReadReq miss latency -system.l2c.ReadReq_avg_miss_latency::cpu0.inst 72642.381440 # average ReadReq miss latency -system.l2c.ReadReq_avg_miss_latency::cpu0.data 75216.002368 # average ReadReq miss latency +system.l2c.ReadReq_avg_miss_latency::cpu0.inst 71437.772545 # average ReadReq miss latency +system.l2c.ReadReq_avg_miss_latency::cpu0.data 74359.245444 # average ReadReq miss latency +system.l2c.ReadReq_avg_miss_latency::cpu1.dtb.walker 83312.500000 # average ReadReq miss latency system.l2c.ReadReq_avg_miss_latency::cpu1.itb.walker 74500 # average ReadReq miss latency -system.l2c.ReadReq_avg_miss_latency::cpu1.inst 72166.395901 # average ReadReq miss latency -system.l2c.ReadReq_avg_miss_latency::cpu1.data 78460.185185 # average ReadReq miss latency -system.l2c.ReadReq_avg_miss_latency::total 74162.159018 # average ReadReq miss latency -system.l2c.UpgradeReq_avg_miss_latency::cpu0.data 2867.238622 # average UpgradeReq miss latency -system.l2c.UpgradeReq_avg_miss_latency::cpu1.data 3642.870272 # average UpgradeReq miss latency -system.l2c.UpgradeReq_avg_miss_latency::total 3223.812118 # average UpgradeReq miss latency -system.l2c.SCUpgradeReq_avg_miss_latency::cpu0.data 5034.577320 # average SCUpgradeReq miss latency -system.l2c.SCUpgradeReq_avg_miss_latency::cpu1.data 2334.972860 # average SCUpgradeReq miss latency -system.l2c.SCUpgradeReq_avg_miss_latency::total 3543.100346 # average SCUpgradeReq miss latency -system.l2c.ReadExReq_avg_miss_latency::cpu0.data 68960.660801 # average ReadExReq miss latency -system.l2c.ReadExReq_avg_miss_latency::cpu1.data 77149.070279 # average ReadExReq miss latency -system.l2c.ReadExReq_avg_miss_latency::total 71573.943076 # average ReadExReq miss latency -system.l2c.demand_avg_miss_latency::cpu0.dtb.walker 75750 # average overall miss latency +system.l2c.ReadReq_avg_miss_latency::cpu1.inst 72730.966976 # average ReadReq miss latency +system.l2c.ReadReq_avg_miss_latency::cpu1.data 78636.954118 # average ReadReq miss latency +system.l2c.ReadReq_avg_miss_latency::total 73931.994745 # average ReadReq miss latency +system.l2c.UpgradeReq_avg_miss_latency::cpu0.data 2795.077969 # average UpgradeReq miss latency +system.l2c.UpgradeReq_avg_miss_latency::cpu1.data 3343.251454 # average UpgradeReq miss latency +system.l2c.UpgradeReq_avg_miss_latency::total 3033.050373 # average UpgradeReq miss latency +system.l2c.SCUpgradeReq_avg_miss_latency::cpu0.data 3181.923623 # average SCUpgradeReq miss latency +system.l2c.SCUpgradeReq_avg_miss_latency::cpu1.data 5195.428571 # average SCUpgradeReq miss latency +system.l2c.SCUpgradeReq_avg_miss_latency::total 4111.677820 # average SCUpgradeReq miss latency +system.l2c.ReadExReq_avg_miss_latency::cpu0.data 67298.428395 # average ReadExReq miss latency +system.l2c.ReadExReq_avg_miss_latency::cpu1.data 75340.372592 # average ReadExReq miss latency +system.l2c.ReadExReq_avg_miss_latency::total 71467.438916 # average ReadExReq miss latency +system.l2c.demand_avg_miss_latency::cpu0.dtb.walker 32000 # average overall miss latency system.l2c.demand_avg_miss_latency::cpu0.itb.walker 74750 # average overall miss latency -system.l2c.demand_avg_miss_latency::cpu0.inst 72642.381440 # average overall miss latency -system.l2c.demand_avg_miss_latency::cpu0.data 69540.197167 # average overall miss latency +system.l2c.demand_avg_miss_latency::cpu0.inst 71437.772545 # average overall miss latency +system.l2c.demand_avg_miss_latency::cpu0.data 68035.595695 # average overall miss latency +system.l2c.demand_avg_miss_latency::cpu1.dtb.walker 83312.500000 # average overall miss latency system.l2c.demand_avg_miss_latency::cpu1.itb.walker 74500 # average overall miss latency -system.l2c.demand_avg_miss_latency::cpu1.inst 72166.395901 # average overall miss latency -system.l2c.demand_avg_miss_latency::cpu1.data 77202.379098 # average overall miss latency -system.l2c.demand_avg_miss_latency::total 71932.138092 # average overall miss latency -system.l2c.overall_avg_miss_latency::cpu0.dtb.walker 75750 # average overall miss latency +system.l2c.demand_avg_miss_latency::cpu1.inst 72730.966976 # average overall miss latency +system.l2c.demand_avg_miss_latency::cpu1.data 75497.146324 # average overall miss latency +system.l2c.demand_avg_miss_latency::total 71806.055444 # average overall miss latency +system.l2c.overall_avg_miss_latency::cpu0.dtb.walker 32000 # average overall miss latency system.l2c.overall_avg_miss_latency::cpu0.itb.walker 74750 # average overall miss latency -system.l2c.overall_avg_miss_latency::cpu0.inst 72642.381440 # average overall miss latency -system.l2c.overall_avg_miss_latency::cpu0.data 69540.197167 # average overall miss latency +system.l2c.overall_avg_miss_latency::cpu0.inst 71437.772545 # average overall miss latency +system.l2c.overall_avg_miss_latency::cpu0.data 68035.595695 # average overall miss latency +system.l2c.overall_avg_miss_latency::cpu1.dtb.walker 83312.500000 # average overall miss latency system.l2c.overall_avg_miss_latency::cpu1.itb.walker 74500 # average overall miss latency -system.l2c.overall_avg_miss_latency::cpu1.inst 72166.395901 # average overall miss latency -system.l2c.overall_avg_miss_latency::cpu1.data 77202.379098 # average overall miss latency -system.l2c.overall_avg_miss_latency::total 71932.138092 # average overall miss latency +system.l2c.overall_avg_miss_latency::cpu1.inst 72730.966976 # average overall miss latency +system.l2c.overall_avg_miss_latency::cpu1.data 75497.146324 # average overall miss latency +system.l2c.overall_avg_miss_latency::total 71806.055444 # average overall miss latency system.l2c.blocked_cycles::no_mshrs 0 # number of cycles access was blocked system.l2c.blocked_cycles::no_targets 0 # number of cycles access was blocked system.l2c.blocked::no_mshrs 0 # number of cycles access was blocked @@ -899,159 +926,171 @@ system.l2c.avg_blocked_cycles::no_mshrs nan # av system.l2c.avg_blocked_cycles::no_targets nan # average number of cycles each access was blocked system.l2c.fast_writes 0 # number of fast writes performed system.l2c.cache_copies 0 # number of cache copies performed -system.l2c.writebacks::writebacks 64627 # number of writebacks -system.l2c.writebacks::total 64627 # number of writebacks +system.l2c.writebacks::writebacks 64262 # number of writebacks +system.l2c.writebacks::total 64262 # number of writebacks system.l2c.ReadReq_mshr_hits::cpu0.inst 1 # number of ReadReq MSHR hits system.l2c.ReadReq_mshr_hits::total 1 # number of ReadReq MSHR hits system.l2c.demand_mshr_hits::cpu0.inst 1 # number of demand (read+write) MSHR hits system.l2c.demand_mshr_hits::total 1 # number of demand (read+write) MSHR hits system.l2c.overall_mshr_hits::cpu0.inst 1 # number of overall MSHR hits system.l2c.overall_mshr_hits::total 1 # number of overall MSHR hits -system.l2c.ReadReq_mshr_misses::cpu0.dtb.walker 4 # number of ReadReq MSHR misses +system.l2c.ReadReq_mshr_misses::cpu0.dtb.walker 1 # number of ReadReq MSHR misses system.l2c.ReadReq_mshr_misses::cpu0.itb.walker 2 # number of ReadReq MSHR misses -system.l2c.ReadReq_mshr_misses::cpu0.inst 6831 # number of ReadReq MSHR misses -system.l2c.ReadReq_mshr_misses::cpu0.data 9714 # number of ReadReq MSHR misses +system.l2c.ReadReq_mshr_misses::cpu0.inst 5732 # number of ReadReq MSHR misses +system.l2c.ReadReq_mshr_misses::cpu0.data 7847 # number of ReadReq MSHR misses +system.l2c.ReadReq_mshr_misses::cpu1.dtb.walker 4 # number of ReadReq MSHR misses system.l2c.ReadReq_mshr_misses::cpu1.itb.walker 1 # number of ReadReq MSHR misses -system.l2c.ReadReq_mshr_misses::cpu1.inst 4001 # number of ReadReq MSHR misses -system.l2c.ReadReq_mshr_misses::cpu1.data 1890 # number of ReadReq MSHR misses -system.l2c.ReadReq_mshr_misses::total 22443 # number of ReadReq MSHR misses -system.l2c.UpgradeReq_mshr_misses::cpu0.data 3977 # number of UpgradeReq MSHR misses -system.l2c.UpgradeReq_mshr_misses::cpu1.data 3384 # number of UpgradeReq MSHR misses -system.l2c.UpgradeReq_mshr_misses::total 7361 # number of UpgradeReq MSHR misses -system.l2c.SCUpgradeReq_mshr_misses::cpu0.data 388 # number of SCUpgradeReq MSHR misses -system.l2c.SCUpgradeReq_mshr_misses::cpu1.data 479 # number of SCUpgradeReq MSHR misses -system.l2c.SCUpgradeReq_mshr_misses::total 867 # number of SCUpgradeReq MSHR misses -system.l2c.ReadExReq_mshr_misses::cpu0.data 95136 # number of ReadExReq MSHR misses -system.l2c.ReadExReq_mshr_misses::cpu1.data 44594 # number of ReadExReq MSHR misses -system.l2c.ReadExReq_mshr_misses::total 139730 # number of ReadExReq MSHR misses -system.l2c.demand_mshr_misses::cpu0.dtb.walker 4 # number of demand (read+write) MSHR misses +system.l2c.ReadReq_mshr_misses::cpu1.inst 5057 # number of ReadReq MSHR misses +system.l2c.ReadReq_mshr_misses::cpu1.data 3618 # number of ReadReq MSHR misses +system.l2c.ReadReq_mshr_misses::total 22262 # number of ReadReq MSHR misses +system.l2c.UpgradeReq_mshr_misses::cpu0.data 4707 # number of UpgradeReq MSHR misses +system.l2c.UpgradeReq_mshr_misses::cpu1.data 3611 # number of UpgradeReq MSHR misses +system.l2c.UpgradeReq_mshr_misses::total 8318 # number of UpgradeReq MSHR misses +system.l2c.SCUpgradeReq_mshr_misses::cpu0.data 563 # number of SCUpgradeReq MSHR misses +system.l2c.SCUpgradeReq_mshr_misses::cpu1.data 483 # number of SCUpgradeReq MSHR misses +system.l2c.SCUpgradeReq_mshr_misses::total 1046 # number of SCUpgradeReq MSHR misses +system.l2c.ReadExReq_mshr_misses::cpu0.data 67314 # number of ReadExReq MSHR misses +system.l2c.ReadExReq_mshr_misses::cpu1.data 72460 # number of ReadExReq MSHR misses +system.l2c.ReadExReq_mshr_misses::total 139774 # number of ReadExReq MSHR misses +system.l2c.demand_mshr_misses::cpu0.dtb.walker 1 # number of demand (read+write) MSHR misses system.l2c.demand_mshr_misses::cpu0.itb.walker 2 # number of demand (read+write) MSHR misses -system.l2c.demand_mshr_misses::cpu0.inst 6831 # number of demand (read+write) MSHR misses -system.l2c.demand_mshr_misses::cpu0.data 104850 # number of demand (read+write) MSHR misses +system.l2c.demand_mshr_misses::cpu0.inst 5732 # number of demand (read+write) MSHR misses +system.l2c.demand_mshr_misses::cpu0.data 75161 # number of demand (read+write) MSHR misses +system.l2c.demand_mshr_misses::cpu1.dtb.walker 4 # number of demand (read+write) MSHR misses system.l2c.demand_mshr_misses::cpu1.itb.walker 1 # number of demand (read+write) MSHR misses -system.l2c.demand_mshr_misses::cpu1.inst 4001 # number of demand (read+write) MSHR misses -system.l2c.demand_mshr_misses::cpu1.data 46484 # number of demand (read+write) MSHR misses -system.l2c.demand_mshr_misses::total 162173 # number of demand (read+write) MSHR misses -system.l2c.overall_mshr_misses::cpu0.dtb.walker 4 # number of overall MSHR misses +system.l2c.demand_mshr_misses::cpu1.inst 5057 # number of demand (read+write) MSHR misses +system.l2c.demand_mshr_misses::cpu1.data 76078 # number of demand (read+write) MSHR misses +system.l2c.demand_mshr_misses::total 162036 # number of demand (read+write) MSHR misses +system.l2c.overall_mshr_misses::cpu0.dtb.walker 1 # number of overall MSHR misses system.l2c.overall_mshr_misses::cpu0.itb.walker 2 # number of overall MSHR misses -system.l2c.overall_mshr_misses::cpu0.inst 6831 # number of overall MSHR misses -system.l2c.overall_mshr_misses::cpu0.data 104850 # number of overall MSHR misses +system.l2c.overall_mshr_misses::cpu0.inst 5732 # number of overall MSHR misses +system.l2c.overall_mshr_misses::cpu0.data 75161 # number of overall MSHR misses +system.l2c.overall_mshr_misses::cpu1.dtb.walker 4 # number of overall MSHR misses system.l2c.overall_mshr_misses::cpu1.itb.walker 1 # number of overall MSHR misses -system.l2c.overall_mshr_misses::cpu1.inst 4001 # number of overall MSHR misses -system.l2c.overall_mshr_misses::cpu1.data 46484 # number of overall MSHR misses -system.l2c.overall_mshr_misses::total 162173 # number of overall MSHR misses -system.l2c.ReadReq_mshr_miss_latency::cpu0.dtb.walker 254500 # number of ReadReq MSHR miss cycles +system.l2c.overall_mshr_misses::cpu1.inst 5057 # number of overall MSHR misses +system.l2c.overall_mshr_misses::cpu1.data 76078 # number of overall MSHR misses +system.l2c.overall_mshr_misses::total 162036 # number of overall MSHR misses +system.l2c.ReadReq_mshr_miss_latency::cpu0.dtb.walker 20000 # number of ReadReq MSHR miss cycles system.l2c.ReadReq_mshr_miss_latency::cpu0.itb.walker 125000 # number of ReadReq MSHR miss cycles -system.l2c.ReadReq_mshr_miss_latency::cpu0.inst 409725000 # number of ReadReq MSHR miss cycles -system.l2c.ReadReq_mshr_miss_latency::cpu0.data 609561247 # number of ReadReq MSHR miss cycles +system.l2c.ReadReq_mshr_miss_latency::cpu0.inst 336863000 # number of ReadReq MSHR miss cycles +system.l2c.ReadReq_mshr_miss_latency::cpu0.data 485632499 # number of ReadReq MSHR miss cycles +system.l2c.ReadReq_mshr_miss_latency::cpu1.dtb.walker 283250 # number of ReadReq MSHR miss cycles system.l2c.ReadReq_mshr_miss_latency::cpu1.itb.walker 62500 # number of ReadReq MSHR miss cycles -system.l2c.ReadReq_mshr_miss_latency::cpu1.inst 238005250 # number of ReadReq MSHR miss cycles -system.l2c.ReadReq_mshr_miss_latency::cpu1.data 124849250 # number of ReadReq MSHR miss cycles -system.l2c.ReadReq_mshr_miss_latency::total 1382582747 # number of ReadReq MSHR miss cycles -system.l2c.UpgradeReq_mshr_miss_latency::cpu0.data 39807972 # number of UpgradeReq MSHR miss cycles -system.l2c.UpgradeReq_mshr_miss_latency::cpu1.data 33948871 # number of UpgradeReq MSHR miss cycles -system.l2c.UpgradeReq_mshr_miss_latency::total 73756843 # number of UpgradeReq MSHR miss cycles -system.l2c.SCUpgradeReq_mshr_miss_latency::cpu0.data 3881387 # number of SCUpgradeReq MSHR miss cycles -system.l2c.SCUpgradeReq_mshr_miss_latency::cpu1.data 4800478 # number of SCUpgradeReq MSHR miss cycles -system.l2c.SCUpgradeReq_mshr_miss_latency::total 8681865 # number of SCUpgradeReq MSHR miss cycles -system.l2c.ReadExReq_mshr_miss_latency::cpu0.data 5331965068 # number of ReadExReq MSHR miss cycles -system.l2c.ReadExReq_mshr_miss_latency::cpu1.data 2878373354 # number of ReadExReq MSHR miss cycles -system.l2c.ReadExReq_mshr_miss_latency::total 8210338422 # number of ReadExReq MSHR miss cycles -system.l2c.demand_mshr_miss_latency::cpu0.dtb.walker 254500 # number of demand (read+write) MSHR miss cycles +system.l2c.ReadReq_mshr_miss_latency::cpu1.inst 303733000 # number of ReadReq MSHR miss cycles +system.l2c.ReadReq_mshr_miss_latency::cpu1.data 239532500 # number of ReadReq MSHR miss cycles +system.l2c.ReadReq_mshr_miss_latency::total 1366251749 # number of ReadReq MSHR miss cycles +system.l2c.UpgradeReq_mshr_miss_latency::cpu0.data 47084205 # number of UpgradeReq MSHR miss cycles +system.l2c.UpgradeReq_mshr_miss_latency::cpu1.data 36188103 # number of UpgradeReq MSHR miss cycles +system.l2c.UpgradeReq_mshr_miss_latency::total 83272308 # number of UpgradeReq MSHR miss cycles +system.l2c.SCUpgradeReq_mshr_miss_latency::cpu0.data 5633061 # number of SCUpgradeReq MSHR miss cycles +system.l2c.SCUpgradeReq_mshr_miss_latency::cpu1.data 4838482 # number of SCUpgradeReq MSHR miss cycles +system.l2c.SCUpgradeReq_mshr_miss_latency::total 10471543 # number of SCUpgradeReq MSHR miss cycles +system.l2c.ReadExReq_mshr_miss_latency::cpu0.data 3661300587 # number of ReadExReq MSHR miss cycles +system.l2c.ReadExReq_mshr_miss_latency::cpu1.data 4536666102 # number of ReadExReq MSHR miss cycles +system.l2c.ReadExReq_mshr_miss_latency::total 8197966689 # number of ReadExReq MSHR miss cycles +system.l2c.demand_mshr_miss_latency::cpu0.dtb.walker 20000 # number of demand (read+write) MSHR miss cycles system.l2c.demand_mshr_miss_latency::cpu0.itb.walker 125000 # number of demand (read+write) MSHR miss cycles -system.l2c.demand_mshr_miss_latency::cpu0.inst 409725000 # number of demand (read+write) MSHR miss cycles -system.l2c.demand_mshr_miss_latency::cpu0.data 5941526315 # number of demand (read+write) MSHR miss cycles +system.l2c.demand_mshr_miss_latency::cpu0.inst 336863000 # number of demand (read+write) MSHR miss cycles +system.l2c.demand_mshr_miss_latency::cpu0.data 4146933086 # number of demand (read+write) MSHR miss cycles +system.l2c.demand_mshr_miss_latency::cpu1.dtb.walker 283250 # number of demand (read+write) MSHR miss cycles system.l2c.demand_mshr_miss_latency::cpu1.itb.walker 62500 # number of demand (read+write) MSHR miss cycles -system.l2c.demand_mshr_miss_latency::cpu1.inst 238005250 # number of demand (read+write) MSHR miss cycles -system.l2c.demand_mshr_miss_latency::cpu1.data 3003222604 # number of demand (read+write) MSHR miss cycles -system.l2c.demand_mshr_miss_latency::total 9592921169 # number of demand (read+write) MSHR miss cycles -system.l2c.overall_mshr_miss_latency::cpu0.dtb.walker 254500 # number of overall MSHR miss cycles +system.l2c.demand_mshr_miss_latency::cpu1.inst 303733000 # number of demand (read+write) MSHR miss cycles +system.l2c.demand_mshr_miss_latency::cpu1.data 4776198602 # number of demand (read+write) MSHR miss cycles +system.l2c.demand_mshr_miss_latency::total 9564218438 # number of demand (read+write) MSHR miss cycles +system.l2c.overall_mshr_miss_latency::cpu0.dtb.walker 20000 # number of overall MSHR miss cycles system.l2c.overall_mshr_miss_latency::cpu0.itb.walker 125000 # number of overall MSHR miss cycles -system.l2c.overall_mshr_miss_latency::cpu0.inst 409725000 # number of overall MSHR miss cycles -system.l2c.overall_mshr_miss_latency::cpu0.data 5941526315 # number of overall MSHR miss cycles +system.l2c.overall_mshr_miss_latency::cpu0.inst 336863000 # number of overall MSHR miss cycles +system.l2c.overall_mshr_miss_latency::cpu0.data 4146933086 # number of overall MSHR miss cycles +system.l2c.overall_mshr_miss_latency::cpu1.dtb.walker 283250 # number of overall MSHR miss cycles system.l2c.overall_mshr_miss_latency::cpu1.itb.walker 62500 # number of overall MSHR miss cycles -system.l2c.overall_mshr_miss_latency::cpu1.inst 238005250 # number of overall MSHR miss cycles -system.l2c.overall_mshr_miss_latency::cpu1.data 3003222604 # number of overall MSHR miss cycles -system.l2c.overall_mshr_miss_latency::total 9592921169 # number of overall MSHR miss cycles +system.l2c.overall_mshr_miss_latency::cpu1.inst 303733000 # number of overall MSHR miss cycles +system.l2c.overall_mshr_miss_latency::cpu1.data 4776198602 # number of overall MSHR miss cycles +system.l2c.overall_mshr_miss_latency::total 9564218438 # number of overall MSHR miss cycles system.l2c.ReadReq_mshr_uncacheable_latency::cpu0.inst 344713750 # number of ReadReq MSHR uncacheable cycles -system.l2c.ReadReq_mshr_uncacheable_latency::cpu0.data 12649169494 # number of ReadReq MSHR uncacheable cycles +system.l2c.ReadReq_mshr_uncacheable_latency::cpu0.data 12451303994 # number of ReadReq MSHR uncacheable cycles system.l2c.ReadReq_mshr_uncacheable_latency::cpu1.inst 5149250 # number of ReadReq MSHR uncacheable cycles -system.l2c.ReadReq_mshr_uncacheable_latency::cpu1.data 154086197998 # number of ReadReq MSHR uncacheable cycles -system.l2c.ReadReq_mshr_uncacheable_latency::total 167085230492 # number of ReadReq MSHR uncacheable cycles -system.l2c.WriteReq_mshr_uncacheable_latency::cpu0.data 16272194135 # number of WriteReq MSHR uncacheable cycles -system.l2c.WriteReq_mshr_uncacheable_latency::cpu1.data 486232500 # number of WriteReq MSHR uncacheable cycles -system.l2c.WriteReq_mshr_uncacheable_latency::total 16758426635 # number of WriteReq MSHR uncacheable cycles +system.l2c.ReadReq_mshr_uncacheable_latency::cpu1.data 154289743997 # number of ReadReq MSHR uncacheable cycles +system.l2c.ReadReq_mshr_uncacheable_latency::total 167090910991 # number of ReadReq MSHR uncacheable cycles +system.l2c.WriteReq_mshr_uncacheable_latency::cpu0.data 1043284995 # number of WriteReq MSHR uncacheable cycles +system.l2c.WriteReq_mshr_uncacheable_latency::cpu1.data 15722213658 # number of WriteReq MSHR uncacheable cycles +system.l2c.WriteReq_mshr_uncacheable_latency::total 16765498653 # number of WriteReq MSHR uncacheable cycles system.l2c.overall_mshr_uncacheable_latency::cpu0.inst 344713750 # number of overall MSHR uncacheable cycles -system.l2c.overall_mshr_uncacheable_latency::cpu0.data 28921363629 # number of overall MSHR uncacheable cycles +system.l2c.overall_mshr_uncacheable_latency::cpu0.data 13494588989 # number of overall MSHR uncacheable cycles system.l2c.overall_mshr_uncacheable_latency::cpu1.inst 5149250 # number of overall MSHR uncacheable cycles -system.l2c.overall_mshr_uncacheable_latency::cpu1.data 154572430498 # number of overall MSHR uncacheable cycles -system.l2c.overall_mshr_uncacheable_latency::total 183843657127 # number of overall MSHR uncacheable cycles -system.l2c.ReadReq_mshr_miss_rate::cpu0.dtb.walker 0.000883 # mshr miss rate for ReadReq accesses -system.l2c.ReadReq_mshr_miss_rate::cpu0.itb.walker 0.001384 # mshr miss rate for ReadReq accesses -system.l2c.ReadReq_mshr_miss_rate::cpu0.inst 0.013941 # mshr miss rate for ReadReq accesses -system.l2c.ReadReq_mshr_miss_rate::cpu0.data 0.038595 # mshr miss rate for ReadReq accesses -system.l2c.ReadReq_mshr_miss_rate::cpu1.itb.walker 0.000536 # mshr miss rate for ReadReq accesses -system.l2c.ReadReq_mshr_miss_rate::cpu1.inst 0.010627 # mshr miss rate for ReadReq accesses -system.l2c.ReadReq_mshr_miss_rate::cpu1.data 0.016807 # mshr miss rate for ReadReq accesses -system.l2c.ReadReq_mshr_miss_rate::total 0.018066 # mshr miss rate for ReadReq accesses -system.l2c.UpgradeReq_mshr_miss_rate::cpu0.data 0.761294 # mshr miss rate for UpgradeReq accesses -system.l2c.UpgradeReq_mshr_miss_rate::cpu1.data 0.883782 # mshr miss rate for UpgradeReq accesses -system.l2c.UpgradeReq_mshr_miss_rate::total 0.813101 # mshr miss rate for UpgradeReq accesses -system.l2c.SCUpgradeReq_mshr_miss_rate::cpu0.data 0.598765 # mshr miss rate for SCUpgradeReq accesses -system.l2c.SCUpgradeReq_mshr_miss_rate::cpu1.data 0.825862 # mshr miss rate for SCUpgradeReq accesses -system.l2c.SCUpgradeReq_mshr_miss_rate::total 0.706026 # mshr miss rate for SCUpgradeReq accesses -system.l2c.ReadExReq_mshr_miss_rate::cpu0.data 0.592150 # mshr miss rate for ReadExReq accesses -system.l2c.ReadExReq_mshr_miss_rate::cpu1.data 0.495483 # mshr miss rate for ReadExReq accesses -system.l2c.ReadExReq_mshr_miss_rate::total 0.557442 # mshr miss rate for ReadExReq accesses -system.l2c.demand_mshr_miss_rate::cpu0.dtb.walker 0.000883 # mshr miss rate for demand accesses -system.l2c.demand_mshr_miss_rate::cpu0.itb.walker 0.001384 # mshr miss rate for demand accesses -system.l2c.demand_mshr_miss_rate::cpu0.inst 0.013941 # mshr miss rate for demand accesses -system.l2c.demand_mshr_miss_rate::cpu0.data 0.254274 # mshr miss rate for demand accesses -system.l2c.demand_mshr_miss_rate::cpu1.itb.walker 0.000536 # mshr miss rate for demand accesses -system.l2c.demand_mshr_miss_rate::cpu1.inst 0.010627 # mshr miss rate for demand accesses -system.l2c.demand_mshr_miss_rate::cpu1.data 0.229605 # mshr miss rate for demand accesses -system.l2c.demand_mshr_miss_rate::total 0.108628 # mshr miss rate for demand accesses -system.l2c.overall_mshr_miss_rate::cpu0.dtb.walker 0.000883 # mshr miss rate for overall accesses -system.l2c.overall_mshr_miss_rate::cpu0.itb.walker 0.001384 # mshr miss rate for overall accesses -system.l2c.overall_mshr_miss_rate::cpu0.inst 0.013941 # mshr miss rate for overall accesses -system.l2c.overall_mshr_miss_rate::cpu0.data 0.254274 # mshr miss rate for overall accesses -system.l2c.overall_mshr_miss_rate::cpu1.itb.walker 0.000536 # mshr miss rate for overall accesses -system.l2c.overall_mshr_miss_rate::cpu1.inst 0.010627 # mshr miss rate for overall accesses -system.l2c.overall_mshr_miss_rate::cpu1.data 0.229605 # mshr miss rate for overall accesses -system.l2c.overall_mshr_miss_rate::total 0.108628 # mshr miss rate for overall accesses -system.l2c.ReadReq_avg_mshr_miss_latency::cpu0.dtb.walker 63625 # average ReadReq mshr miss latency +system.l2c.overall_mshr_uncacheable_latency::cpu1.data 170011957655 # number of overall MSHR uncacheable cycles +system.l2c.overall_mshr_uncacheable_latency::total 183856409644 # number of overall MSHR uncacheable cycles +system.l2c.ReadReq_mshr_miss_rate::cpu0.dtb.walker 0.000261 # mshr miss rate for ReadReq accesses +system.l2c.ReadReq_mshr_miss_rate::cpu0.itb.walker 0.001140 # mshr miss rate for ReadReq accesses +system.l2c.ReadReq_mshr_miss_rate::cpu0.inst 0.013474 # mshr miss rate for ReadReq accesses +system.l2c.ReadReq_mshr_miss_rate::cpu0.data 0.036721 # mshr miss rate for ReadReq accesses +system.l2c.ReadReq_mshr_miss_rate::cpu1.dtb.walker 0.000747 # mshr miss rate for ReadReq accesses +system.l2c.ReadReq_mshr_miss_rate::cpu1.itb.walker 0.000542 # mshr miss rate for ReadReq accesses +system.l2c.ReadReq_mshr_miss_rate::cpu1.inst 0.010770 # mshr miss rate for ReadReq accesses +system.l2c.ReadReq_mshr_miss_rate::cpu1.data 0.024631 # mshr miss rate for ReadReq accesses +system.l2c.ReadReq_mshr_miss_rate::total 0.017552 # mshr miss rate for ReadReq accesses +system.l2c.UpgradeReq_mshr_miss_rate::cpu0.data 0.802421 # mshr miss rate for UpgradeReq accesses +system.l2c.UpgradeReq_mshr_miss_rate::cpu1.data 0.865740 # mshr miss rate for UpgradeReq accesses +system.l2c.UpgradeReq_mshr_miss_rate::total 0.828734 # mshr miss rate for UpgradeReq accesses +system.l2c.SCUpgradeReq_mshr_miss_rate::cpu0.data 0.724582 # mshr miss rate for SCUpgradeReq accesses +system.l2c.SCUpgradeReq_mshr_miss_rate::cpu1.data 0.828473 # mshr miss rate for SCUpgradeReq accesses +system.l2c.SCUpgradeReq_mshr_miss_rate::total 0.769118 # mshr miss rate for SCUpgradeReq accesses +system.l2c.ReadExReq_mshr_miss_rate::cpu0.data 0.543083 # mshr miss rate for ReadExReq accesses +system.l2c.ReadExReq_mshr_miss_rate::cpu1.data 0.579420 # mshr miss rate for ReadExReq accesses +system.l2c.ReadExReq_mshr_miss_rate::total 0.561332 # mshr miss rate for ReadExReq accesses +system.l2c.demand_mshr_miss_rate::cpu0.dtb.walker 0.000261 # mshr miss rate for demand accesses +system.l2c.demand_mshr_miss_rate::cpu0.itb.walker 0.001140 # mshr miss rate for demand accesses +system.l2c.demand_mshr_miss_rate::cpu0.inst 0.013474 # mshr miss rate for demand accesses +system.l2c.demand_mshr_miss_rate::cpu0.data 0.222606 # mshr miss rate for demand accesses +system.l2c.demand_mshr_miss_rate::cpu1.dtb.walker 0.000747 # mshr miss rate for demand accesses +system.l2c.demand_mshr_miss_rate::cpu1.itb.walker 0.000542 # mshr miss rate for demand accesses +system.l2c.demand_mshr_miss_rate::cpu1.inst 0.010770 # mshr miss rate for demand accesses +system.l2c.demand_mshr_miss_rate::cpu1.data 0.279757 # mshr miss rate for demand accesses +system.l2c.demand_mshr_miss_rate::total 0.106790 # mshr miss rate for demand accesses +system.l2c.overall_mshr_miss_rate::cpu0.dtb.walker 0.000261 # mshr miss rate for overall accesses +system.l2c.overall_mshr_miss_rate::cpu0.itb.walker 0.001140 # mshr miss rate for overall accesses +system.l2c.overall_mshr_miss_rate::cpu0.inst 0.013474 # mshr miss rate for overall accesses +system.l2c.overall_mshr_miss_rate::cpu0.data 0.222606 # mshr miss rate for overall accesses +system.l2c.overall_mshr_miss_rate::cpu1.dtb.walker 0.000747 # mshr miss rate for overall accesses +system.l2c.overall_mshr_miss_rate::cpu1.itb.walker 0.000542 # mshr miss rate for overall accesses +system.l2c.overall_mshr_miss_rate::cpu1.inst 0.010770 # mshr miss rate for overall accesses +system.l2c.overall_mshr_miss_rate::cpu1.data 0.279757 # mshr miss rate for overall accesses +system.l2c.overall_mshr_miss_rate::total 0.106790 # mshr miss rate for overall accesses +system.l2c.ReadReq_avg_mshr_miss_latency::cpu0.dtb.walker 20000 # average ReadReq mshr miss latency system.l2c.ReadReq_avg_mshr_miss_latency::cpu0.itb.walker 62500 # average ReadReq mshr miss latency -system.l2c.ReadReq_avg_mshr_miss_latency::cpu0.inst 59980.237154 # average ReadReq mshr miss latency -system.l2c.ReadReq_avg_mshr_miss_latency::cpu0.data 62750.797509 # average ReadReq mshr miss latency +system.l2c.ReadReq_avg_mshr_miss_latency::cpu0.inst 58768.841591 # average ReadReq mshr miss latency +system.l2c.ReadReq_avg_mshr_miss_latency::cpu0.data 61887.663948 # average ReadReq mshr miss latency +system.l2c.ReadReq_avg_mshr_miss_latency::cpu1.dtb.walker 70812.500000 # average ReadReq mshr miss latency system.l2c.ReadReq_avg_mshr_miss_latency::cpu1.itb.walker 62500 # average ReadReq mshr miss latency -system.l2c.ReadReq_avg_mshr_miss_latency::cpu1.inst 59486.440890 # average ReadReq mshr miss latency -system.l2c.ReadReq_avg_mshr_miss_latency::cpu1.data 66057.804233 # average ReadReq mshr miss latency -system.l2c.ReadReq_avg_mshr_miss_latency::total 61604.186027 # average ReadReq mshr miss latency -system.l2c.UpgradeReq_avg_mshr_miss_latency::cpu0.data 10009.547900 # average UpgradeReq mshr miss latency -system.l2c.UpgradeReq_avg_mshr_miss_latency::cpu1.data 10032.172281 # average UpgradeReq mshr miss latency -system.l2c.UpgradeReq_avg_mshr_miss_latency::total 10019.948784 # average UpgradeReq mshr miss latency -system.l2c.SCUpgradeReq_avg_mshr_miss_latency::cpu0.data 10003.574742 # average SCUpgradeReq mshr miss latency -system.l2c.SCUpgradeReq_avg_mshr_miss_latency::cpu1.data 10021.874739 # average SCUpgradeReq mshr miss latency -system.l2c.SCUpgradeReq_avg_mshr_miss_latency::total 10013.685121 # average SCUpgradeReq mshr miss latency -system.l2c.ReadExReq_avg_mshr_miss_latency::cpu0.data 56045.714220 # average ReadExReq mshr miss latency -system.l2c.ReadExReq_avg_mshr_miss_latency::cpu1.data 64546.202494 # average ReadExReq mshr miss latency -system.l2c.ReadExReq_avg_mshr_miss_latency::total 58758.594590 # average ReadExReq mshr miss latency -system.l2c.demand_avg_mshr_miss_latency::cpu0.dtb.walker 63625 # average overall mshr miss latency +system.l2c.ReadReq_avg_mshr_miss_latency::cpu1.inst 60061.894404 # average ReadReq mshr miss latency +system.l2c.ReadReq_avg_mshr_miss_latency::cpu1.data 66205.776672 # average ReadReq mshr miss latency +system.l2c.ReadReq_avg_mshr_miss_latency::total 61371.473767 # average ReadReq mshr miss latency +system.l2c.UpgradeReq_avg_mshr_miss_latency::cpu0.data 10003.017846 # average UpgradeReq mshr miss latency +system.l2c.UpgradeReq_avg_mshr_miss_latency::cpu1.data 10021.629189 # average UpgradeReq mshr miss latency +system.l2c.UpgradeReq_avg_mshr_miss_latency::total 10011.097379 # average UpgradeReq mshr miss latency +system.l2c.SCUpgradeReq_avg_mshr_miss_latency::cpu0.data 10005.436945 # average SCUpgradeReq mshr miss latency +system.l2c.SCUpgradeReq_avg_mshr_miss_latency::cpu1.data 10017.561077 # average SCUpgradeReq mshr miss latency +system.l2c.SCUpgradeReq_avg_mshr_miss_latency::total 10011.035373 # average SCUpgradeReq mshr miss latency +system.l2c.ReadExReq_avg_mshr_miss_latency::cpu0.data 54391.368616 # average ReadExReq mshr miss latency +system.l2c.ReadExReq_avg_mshr_miss_latency::cpu1.data 62609.247888 # average ReadExReq mshr miss latency +system.l2c.ReadExReq_avg_mshr_miss_latency::total 58651.585338 # average ReadExReq mshr miss latency +system.l2c.demand_avg_mshr_miss_latency::cpu0.dtb.walker 20000 # average overall mshr miss latency system.l2c.demand_avg_mshr_miss_latency::cpu0.itb.walker 62500 # average overall mshr miss latency -system.l2c.demand_avg_mshr_miss_latency::cpu0.inst 59980.237154 # average overall mshr miss latency -system.l2c.demand_avg_mshr_miss_latency::cpu0.data 56666.917644 # average overall mshr miss latency +system.l2c.demand_avg_mshr_miss_latency::cpu0.inst 58768.841591 # average overall mshr miss latency +system.l2c.demand_avg_mshr_miss_latency::cpu0.data 55174.000958 # average overall mshr miss latency +system.l2c.demand_avg_mshr_miss_latency::cpu1.dtb.walker 70812.500000 # average overall mshr miss latency system.l2c.demand_avg_mshr_miss_latency::cpu1.itb.walker 62500 # average overall mshr miss latency -system.l2c.demand_avg_mshr_miss_latency::cpu1.inst 59486.440890 # average overall mshr miss latency -system.l2c.demand_avg_mshr_miss_latency::cpu1.data 64607.662938 # average overall mshr miss latency -system.l2c.demand_avg_mshr_miss_latency::total 59152.393857 # average overall mshr miss latency -system.l2c.overall_avg_mshr_miss_latency::cpu0.dtb.walker 63625 # average overall mshr miss latency +system.l2c.demand_avg_mshr_miss_latency::cpu1.inst 60061.894404 # average overall mshr miss latency +system.l2c.demand_avg_mshr_miss_latency::cpu1.data 62780.286049 # average overall mshr miss latency +system.l2c.demand_avg_mshr_miss_latency::total 59025.268693 # average overall mshr miss latency +system.l2c.overall_avg_mshr_miss_latency::cpu0.dtb.walker 20000 # average overall mshr miss latency system.l2c.overall_avg_mshr_miss_latency::cpu0.itb.walker 62500 # average overall mshr miss latency -system.l2c.overall_avg_mshr_miss_latency::cpu0.inst 59980.237154 # average overall mshr miss latency -system.l2c.overall_avg_mshr_miss_latency::cpu0.data 56666.917644 # average overall mshr miss latency +system.l2c.overall_avg_mshr_miss_latency::cpu0.inst 58768.841591 # average overall mshr miss latency +system.l2c.overall_avg_mshr_miss_latency::cpu0.data 55174.000958 # average overall mshr miss latency +system.l2c.overall_avg_mshr_miss_latency::cpu1.dtb.walker 70812.500000 # average overall mshr miss latency system.l2c.overall_avg_mshr_miss_latency::cpu1.itb.walker 62500 # average overall mshr miss latency -system.l2c.overall_avg_mshr_miss_latency::cpu1.inst 59486.440890 # average overall mshr miss latency -system.l2c.overall_avg_mshr_miss_latency::cpu1.data 64607.662938 # average overall mshr miss latency -system.l2c.overall_avg_mshr_miss_latency::total 59152.393857 # average overall mshr miss latency +system.l2c.overall_avg_mshr_miss_latency::cpu1.inst 60061.894404 # average overall mshr miss latency +system.l2c.overall_avg_mshr_miss_latency::cpu1.data 62780.286049 # average overall mshr miss latency +system.l2c.overall_avg_mshr_miss_latency::total 59025.268693 # average overall mshr miss latency system.l2c.ReadReq_avg_mshr_uncacheable_latency::cpu0.inst inf # average ReadReq mshr uncacheable latency system.l2c.ReadReq_avg_mshr_uncacheable_latency::cpu0.data inf # average ReadReq mshr uncacheable latency system.l2c.ReadReq_avg_mshr_uncacheable_latency::cpu1.inst inf # average ReadReq mshr uncacheable latency @@ -1072,62 +1111,62 @@ system.cf0.dma_read_txs 0 # Nu system.cf0.dma_write_full_pages 0 # Number of full page size DMA writes. system.cf0.dma_write_bytes 0 # Number of bytes transfered via DMA writes. system.cf0.dma_write_txs 0 # Number of DMA write transactions. -system.toL2Bus.throughput 118330469 # Throughput (bytes/s) -system.toL2Bus.trans_dist::ReadReq 2505274 # Transaction distribution -system.toL2Bus.trans_dist::ReadResp 2505274 # Transaction distribution -system.toL2Bus.trans_dist::WriteReq 767205 # Transaction distribution -system.toL2Bus.trans_dist::WriteResp 767205 # Transaction distribution -system.toL2Bus.trans_dist::Writeback 576138 # Transaction distribution -system.toL2Bus.trans_dist::UpgradeReq 27005 # Transaction distribution -system.toL2Bus.trans_dist::SCUpgradeReq 16807 # Transaction distribution -system.toL2Bus.trans_dist::UpgradeResp 43812 # Transaction distribution -system.toL2Bus.trans_dist::ReadExReq 262415 # Transaction distribution -system.toL2Bus.trans_dist::ReadExResp 262415 # Transaction distribution -system.toL2Bus.pkt_count_system.cpu0.icache.mem_side::system.l2c.cpu_side 993978 # Packet count per connected master and slave (bytes) -system.toL2Bus.pkt_count_system.cpu0.dcache.mem_side::system.l2c.cpu_side 2951141 # Packet count per connected master and slave (bytes) -system.toL2Bus.pkt_count_system.cpu0.itb.walker.dma::system.l2c.cpu_side 5841 # Packet count per connected master and slave (bytes) -system.toL2Bus.pkt_count_system.cpu0.dtb.walker.dma::system.l2c.cpu_side 14926 # Packet count per connected master and slave (bytes) -system.toL2Bus.pkt_count_system.cpu1.icache.mem_side::system.l2c.cpu_side 753985 # Packet count per connected master and slave (bytes) -system.toL2Bus.pkt_count_system.cpu1.dcache.mem_side::system.l2c.cpu_side 2879812 # Packet count per connected master and slave (bytes) -system.toL2Bus.pkt_count_system.cpu1.itb.walker.dma::system.l2c.cpu_side 6193 # Packet count per connected master and slave (bytes) -system.toL2Bus.pkt_count_system.cpu1.dtb.walker.dma::system.l2c.cpu_side 12022 # Packet count per connected master and slave (bytes) -system.toL2Bus.pkt_count::total 7617898 # Packet count per connected master and slave (bytes) -system.toL2Bus.tot_pkt_size_system.cpu0.icache.mem_side::system.l2c.cpu_side 31385016 # Cumulative packet size per connected master and slave (bytes) -system.toL2Bus.tot_pkt_size_system.cpu0.dcache.mem_side::system.l2c.cpu_side 53721240 # Cumulative packet size per connected master and slave (bytes) -system.toL2Bus.tot_pkt_size_system.cpu0.itb.walker.dma::system.l2c.cpu_side 5780 # Cumulative packet size per connected master and slave (bytes) -system.toL2Bus.tot_pkt_size_system.cpu0.dtb.walker.dma::system.l2c.cpu_side 18120 # Cumulative packet size per connected master and slave (bytes) -system.toL2Bus.tot_pkt_size_system.cpu1.icache.mem_side::system.l2c.cpu_side 24096780 # Cumulative packet size per connected master and slave (bytes) -system.toL2Bus.tot_pkt_size_system.cpu1.dcache.mem_side::system.l2c.cpu_side 27936146 # Cumulative packet size per connected master and slave (bytes) -system.toL2Bus.tot_pkt_size_system.cpu1.itb.walker.dma::system.l2c.cpu_side 7468 # Cumulative packet size per connected master and slave (bytes) -system.toL2Bus.tot_pkt_size_system.cpu1.dtb.walker.dma::system.l2c.cpu_side 15168 # Cumulative packet size per connected master and slave (bytes) -system.toL2Bus.tot_pkt_size::total 137185718 # Cumulative packet size per connected master and slave (bytes) -system.toL2Bus.data_through_bus 137185718 # Total data (bytes) -system.toL2Bus.snoop_data_through_bus 4312904 # Total snoop data (bytes) -system.toL2Bus.reqLayer0.occupancy 4765712727 # Layer occupancy (ticks) +system.toL2Bus.throughput 119504988 # Throughput (bytes/s) +system.toL2Bus.trans_dist::ReadReq 2535165 # Transaction distribution +system.toL2Bus.trans_dist::ReadResp 2535165 # Transaction distribution +system.toL2Bus.trans_dist::WriteReq 767563 # Transaction distribution +system.toL2Bus.trans_dist::WriteResp 767563 # Transaction distribution +system.toL2Bus.trans_dist::Writeback 570845 # Transaction distribution +system.toL2Bus.trans_dist::UpgradeReq 30638 # Transaction distribution +system.toL2Bus.trans_dist::SCUpgradeReq 17564 # Transaction distribution +system.toL2Bus.trans_dist::UpgradeResp 48202 # Transaction distribution +system.toL2Bus.trans_dist::ReadExReq 260860 # Transaction distribution +system.toL2Bus.trans_dist::ReadExResp 260860 # Transaction distribution +system.toL2Bus.pkt_count_system.cpu0.icache.mem_side::system.l2c.cpu_side 864640 # Packet count per connected master and slave (bytes) +system.toL2Bus.pkt_count_system.cpu0.dcache.mem_side::system.l2c.cpu_side 1226897 # Packet count per connected master and slave (bytes) +system.toL2Bus.pkt_count_system.cpu0.itb.walker.dma::system.l2c.cpu_side 6150 # Packet count per connected master and slave (bytes) +system.toL2Bus.pkt_count_system.cpu0.dtb.walker.dma::system.l2c.cpu_side 12700 # Packet count per connected master and slave (bytes) +system.toL2Bus.pkt_count_system.cpu1.icache.mem_side::system.l2c.cpu_side 939820 # Packet count per connected master and slave (bytes) +system.toL2Bus.pkt_count_system.cpu1.dcache.mem_side::system.l2c.cpu_side 4600271 # Packet count per connected master and slave (bytes) +system.toL2Bus.pkt_count_system.cpu1.itb.walker.dma::system.l2c.cpu_side 6172 # Packet count per connected master and slave (bytes) +system.toL2Bus.pkt_count_system.cpu1.dtb.walker.dma::system.l2c.cpu_side 15273 # Packet count per connected master and slave (bytes) +system.toL2Bus.pkt_count::total 7671923 # Packet count per connected master and slave (bytes) +system.toL2Bus.tot_pkt_size_system.cpu0.icache.mem_side::system.l2c.cpu_side 27252536 # Cumulative packet size per connected master and slave (bytes) +system.toL2Bus.tot_pkt_size_system.cpu0.dcache.mem_side::system.l2c.cpu_side 41401460 # Cumulative packet size per connected master and slave (bytes) +system.toL2Bus.tot_pkt_size_system.cpu0.itb.walker.dma::system.l2c.cpu_side 7016 # Cumulative packet size per connected master and slave (bytes) +system.toL2Bus.tot_pkt_size_system.cpu0.dtb.walker.dma::system.l2c.cpu_side 15324 # Cumulative packet size per connected master and slave (bytes) +system.toL2Bus.tot_pkt_size_system.cpu1.icache.mem_side::system.l2c.cpu_side 30051724 # Cumulative packet size per connected master and slave (bytes) +system.toL2Bus.tot_pkt_size_system.cpu1.dcache.mem_side::system.l2c.cpu_side 39586058 # Cumulative packet size per connected master and slave (bytes) +system.toL2Bus.tot_pkt_size_system.cpu1.itb.walker.dma::system.l2c.cpu_side 7384 # Cumulative packet size per connected master and slave (bytes) +system.toL2Bus.tot_pkt_size_system.cpu1.dtb.walker.dma::system.l2c.cpu_side 21416 # Cumulative packet size per connected master and slave (bytes) +system.toL2Bus.tot_pkt_size::total 138342918 # Cumulative packet size per connected master and slave (bytes) +system.toL2Bus.data_through_bus 138342918 # Total data (bytes) +system.toL2Bus.snoop_data_through_bus 4601108 # Total snoop data (bytes) +system.toL2Bus.reqLayer0.occupancy 4758624958 # Layer occupancy (ticks) system.toL2Bus.reqLayer0.utilization 0.4 # Layer utilization (%) -system.toL2Bus.respLayer0.occupancy 2217854478 # Layer occupancy (ticks) +system.toL2Bus.respLayer0.occupancy 1926201968 # Layer occupancy (ticks) system.toL2Bus.respLayer0.utilization 0.2 # Layer utilization (%) -system.toL2Bus.respLayer1.occupancy 2469983321 # Layer occupancy (ticks) -system.toL2Bus.respLayer1.utilization 0.2 # Layer utilization (%) +system.toL2Bus.respLayer1.occupancy 1755625353 # Layer occupancy (ticks) +system.toL2Bus.respLayer1.utilization 0.1 # Layer utilization (%) system.toL2Bus.respLayer2.occupancy 4396000 # Layer occupancy (ticks) system.toL2Bus.respLayer2.utilization 0.0 # Layer utilization (%) -system.toL2Bus.respLayer3.occupancy 10396000 # Layer occupancy (ticks) +system.toL2Bus.respLayer3.occupancy 8869000 # Layer occupancy (ticks) system.toL2Bus.respLayer3.utilization 0.0 # Layer utilization (%) -system.toL2Bus.respLayer4.occupancy 1698669462 # Layer occupancy (ticks) -system.toL2Bus.respLayer4.utilization 0.1 # Layer utilization (%) -system.toL2Bus.respLayer5.occupancy 2208533441 # Layer occupancy (ticks) +system.toL2Bus.respLayer4.occupancy 2116407722 # Layer occupancy (ticks) +system.toL2Bus.respLayer4.utilization 0.2 # Layer utilization (%) +system.toL2Bus.respLayer5.occupancy 2924723840 # Layer occupancy (ticks) system.toL2Bus.respLayer5.utilization 0.2 # Layer utilization (%) -system.toL2Bus.respLayer6.occupancy 4326000 # Layer occupancy (ticks) +system.toL2Bus.respLayer6.occupancy 4326499 # Layer occupancy (ticks) system.toL2Bus.respLayer6.utilization 0.0 # Layer utilization (%) -system.toL2Bus.respLayer7.occupancy 8230499 # Layer occupancy (ticks) +system.toL2Bus.respLayer7.occupancy 9919749 # Layer occupancy (ticks) system.toL2Bus.respLayer7.utilization 0.0 # Layer utilization (%) -system.iobus.throughput 45404559 # Throughput (bytes/s) -system.iobus.trans_dist::ReadReq 7671403 # Transaction distribution -system.iobus.trans_dist::ReadResp 7671403 # Transaction distribution +system.iobus.throughput 45391537 # Throughput (bytes/s) +system.iobus.trans_dist::ReadReq 7671396 # Transaction distribution +system.iobus.trans_dist::ReadResp 7671396 # Transaction distribution system.iobus.trans_dist::WriteReq 7946 # Transaction distribution system.iobus.trans_dist::WriteResp 7946 # Transaction distribution system.iobus.pkt_count_system.bridge.master::system.realview.uart.pio 30448 # Packet count per connected master and slave (bytes) -system.iobus.pkt_count_system.bridge.master::system.realview.realview_io.pio 8066 # Packet count per connected master and slave (bytes) +system.iobus.pkt_count_system.bridge.master::system.realview.realview_io.pio 8052 # Packet count per connected master and slave (bytes) system.iobus.pkt_count_system.bridge.master::system.realview.timer0.pio 34 # Packet count per connected master and slave (bytes) system.iobus.pkt_count_system.bridge.master::system.realview.timer1.pio 742 # Packet count per connected master and slave (bytes) system.iobus.pkt_count_system.bridge.master::system.realview.clcd.pio 36 # Packet count per connected master and slave (bytes) @@ -1149,12 +1188,12 @@ system.iobus.pkt_count_system.bridge.master::system.realview.sci_fake.pio system.iobus.pkt_count_system.bridge.master::system.realview.aaci_fake.pio 16 # Packet count per connected master and slave (bytes) system.iobus.pkt_count_system.bridge.master::system.realview.mmc_fake.pio 16 # Packet count per connected master and slave (bytes) system.iobus.pkt_count_system.bridge.master::system.realview.rtc.pio 16 # Packet count per connected master and slave (bytes) -system.iobus.pkt_count_system.bridge.master::total 2382570 # Packet count per connected master and slave (bytes) +system.iobus.pkt_count_system.bridge.master::total 2382556 # Packet count per connected master and slave (bytes) system.iobus.pkt_count_system.realview.clcd.dma::system.iocache.cpu_side 12976128 # Packet count per connected master and slave (bytes) system.iobus.pkt_count_system.realview.clcd.dma::total 12976128 # Packet count per connected master and slave (bytes) -system.iobus.pkt_count::total 15358698 # Packet count per connected master and slave (bytes) +system.iobus.pkt_count::total 15358684 # Packet count per connected master and slave (bytes) system.iobus.tot_pkt_size_system.bridge.master::system.realview.uart.pio 40166 # Cumulative packet size per connected master and slave (bytes) -system.iobus.tot_pkt_size_system.bridge.master::system.realview.realview_io.pio 16132 # Cumulative packet size per connected master and slave (bytes) +system.iobus.tot_pkt_size_system.bridge.master::system.realview.realview_io.pio 16104 # Cumulative packet size per connected master and slave (bytes) system.iobus.tot_pkt_size_system.bridge.master::system.realview.timer0.pio 68 # Cumulative packet size per connected master and slave (bytes) system.iobus.tot_pkt_size_system.bridge.master::system.realview.timer1.pio 1484 # Cumulative packet size per connected master and slave (bytes) system.iobus.tot_pkt_size_system.bridge.master::system.realview.clcd.pio 72 # Cumulative packet size per connected master and slave (bytes) @@ -1176,14 +1215,14 @@ system.iobus.tot_pkt_size_system.bridge.master::system.realview.sci_fake.pio system.iobus.tot_pkt_size_system.bridge.master::system.realview.aaci_fake.pio 32 # Cumulative packet size per connected master and slave (bytes) system.iobus.tot_pkt_size_system.bridge.master::system.realview.mmc_fake.pio 32 # Cumulative packet size per connected master and slave (bytes) system.iobus.tot_pkt_size_system.bridge.master::system.realview.rtc.pio 32 # Cumulative packet size per connected master and slave (bytes) -system.iobus.tot_pkt_size_system.bridge.master::total 2389894 # Cumulative packet size per connected master and slave (bytes) +system.iobus.tot_pkt_size_system.bridge.master::total 2389866 # Cumulative packet size per connected master and slave (bytes) system.iobus.tot_pkt_size_system.realview.clcd.dma::system.iocache.cpu_side 51904512 # Cumulative packet size per connected master and slave (bytes) system.iobus.tot_pkt_size_system.realview.clcd.dma::total 51904512 # Cumulative packet size per connected master and slave (bytes) -system.iobus.tot_pkt_size::total 54294406 # Cumulative packet size per connected master and slave (bytes) -system.iobus.data_through_bus 54294406 # Total data (bytes) +system.iobus.tot_pkt_size::total 54294378 # Cumulative packet size per connected master and slave (bytes) +system.iobus.data_through_bus 54294378 # Total data (bytes) system.iobus.reqLayer0.occupancy 21350000 # Layer occupancy (ticks) system.iobus.reqLayer0.utilization 0.0 # Layer utilization (%) -system.iobus.reqLayer1.occupancy 4039000 # Layer occupancy (ticks) +system.iobus.reqLayer1.occupancy 4032000 # Layer occupancy (ticks) system.iobus.reqLayer1.utilization 0.0 # Layer utilization (%) system.iobus.reqLayer2.occupancy 34000 # Layer occupancy (ticks) system.iobus.reqLayer2.utilization 0.0 # Layer utilization (%) @@ -1229,32 +1268,32 @@ system.iobus.reqLayer23.occupancy 8000 # La system.iobus.reqLayer23.utilization 0.0 # Layer utilization (%) system.iobus.reqLayer25.occupancy 6488064000 # Layer occupancy (ticks) system.iobus.reqLayer25.utilization 0.5 # Layer utilization (%) -system.iobus.respLayer0.occupancy 2374624000 # Layer occupancy (ticks) +system.iobus.respLayer0.occupancy 2374610000 # Layer occupancy (ticks) system.iobus.respLayer0.utilization 0.2 # Layer utilization (%) -system.iobus.respLayer1.occupancy 17777853001 # Layer occupancy (ticks) +system.iobus.respLayer1.occupancy 17778098751 # Layer occupancy (ticks) system.iobus.respLayer1.utilization 1.5 # Layer utilization (%) system.cpu0.dtb.inst_hits 0 # ITB inst hits system.cpu0.dtb.inst_misses 0 # ITB inst misses -system.cpu0.dtb.read_hits 9652640 # DTB read hits -system.cpu0.dtb.read_misses 3742 # DTB read misses -system.cpu0.dtb.write_hits 7596858 # DTB write hits -system.cpu0.dtb.write_misses 1582 # DTB write misses +system.cpu0.dtb.read_hits 7069308 # DTB read hits +system.cpu0.dtb.read_misses 3747 # DTB read misses +system.cpu0.dtb.write_hits 5655300 # DTB write hits +system.cpu0.dtb.write_misses 806 # DTB write misses system.cpu0.dtb.flush_tlb 4 # Number of times complete TLB was flushed system.cpu0.dtb.flush_tlb_mva 0 # Number of times TLB was flushed by MVA system.cpu0.dtb.flush_tlb_mva_asid 1439 # Number of times TLB was flushed by MVA & ASID system.cpu0.dtb.flush_tlb_asid 63 # Number of times TLB was flushed by ASID -system.cpu0.dtb.flush_entries 1811 # Number of entries that have been flushed from TLB +system.cpu0.dtb.flush_entries 1799 # Number of entries that have been flushed from TLB system.cpu0.dtb.align_faults 0 # Number of TLB faults due to alignment restrictions -system.cpu0.dtb.prefetch_faults 138 # Number of TLB faults due to prefetch +system.cpu0.dtb.prefetch_faults 142 # Number of TLB faults due to prefetch system.cpu0.dtb.domain_faults 0 # Number of TLB faults due to domain restrictions system.cpu0.dtb.perms_faults 204 # Number of TLB faults due to permissions restrictions -system.cpu0.dtb.read_accesses 9656382 # DTB read accesses -system.cpu0.dtb.write_accesses 7598440 # DTB write accesses +system.cpu0.dtb.read_accesses 7073055 # DTB read accesses +system.cpu0.dtb.write_accesses 5656106 # DTB write accesses system.cpu0.dtb.inst_accesses 0 # ITB inst accesses -system.cpu0.dtb.hits 17249498 # DTB hits -system.cpu0.dtb.misses 5324 # DTB misses -system.cpu0.dtb.accesses 17254822 # DTB accesses -system.cpu0.itb.inst_hits 43298691 # ITB inst hits +system.cpu0.dtb.hits 12724608 # DTB hits +system.cpu0.dtb.misses 4553 # DTB misses +system.cpu0.dtb.accesses 12729161 # DTB accesses +system.cpu0.itb.inst_hits 29565201 # ITB inst hits system.cpu0.itb.inst_misses 2205 # ITB inst misses system.cpu0.itb.read_hits 0 # DTB read hits system.cpu0.itb.read_misses 0 # DTB read misses @@ -1271,79 +1310,87 @@ system.cpu0.itb.domain_faults 0 # Nu system.cpu0.itb.perms_faults 0 # Number of TLB faults due to permissions restrictions system.cpu0.itb.read_accesses 0 # DTB read accesses system.cpu0.itb.write_accesses 0 # DTB write accesses -system.cpu0.itb.inst_accesses 43300896 # ITB inst accesses -system.cpu0.itb.hits 43298691 # DTB hits +system.cpu0.itb.inst_accesses 29567406 # ITB inst accesses +system.cpu0.itb.hits 29565201 # DTB hits system.cpu0.itb.misses 2205 # DTB misses -system.cpu0.itb.accesses 43300896 # DTB accesses -system.cpu0.numCycles 2391583901 # number of cpu cycles simulated +system.cpu0.itb.accesses 29567406 # DTB accesses +system.cpu0.numCycles 2392268776 # number of cpu cycles simulated system.cpu0.numWorkItemsStarted 0 # number of work items this cpu started system.cpu0.numWorkItemsCompleted 0 # number of work items this cpu completed -system.cpu0.committedInsts 42571767 # Number of instructions committed -system.cpu0.committedOps 53302041 # Number of ops (including micro ops) committed -system.cpu0.num_int_alu_accesses 48059042 # Number of integer alu accesses +system.cpu0.committedInsts 28867316 # Number of instructions committed +system.cpu0.committedOps 37205643 # Number of ops (including micro ops) committed +system.cpu0.num_int_alu_accesses 33092917 # Number of integer alu accesses system.cpu0.num_fp_alu_accesses 3860 # Number of float alu accesses -system.cpu0.num_func_calls 1403630 # number of times a function call or return occured -system.cpu0.num_conditional_control_insts 5582817 # number of instructions that are conditional controls -system.cpu0.num_int_insts 48059042 # number of integer instructions +system.cpu0.num_func_calls 1241596 # number of times a function call or return occured +system.cpu0.num_conditional_control_insts 4372519 # number of instructions that are conditional controls +system.cpu0.num_int_insts 33092917 # number of integer instructions system.cpu0.num_fp_insts 3860 # number of float instructions -system.cpu0.num_int_register_reads 272441604 # number of times the integer registers were read -system.cpu0.num_int_register_writes 52270515 # number of times the integer registers were written +system.cpu0.num_int_register_reads 190017972 # number of times the integer registers were read +system.cpu0.num_int_register_writes 36219842 # number of times the integer registers were written system.cpu0.num_fp_register_reads 3022 # number of times the floating registers were read system.cpu0.num_fp_register_writes 840 # number of times the floating registers were written -system.cpu0.num_mem_refs 18019009 # number of memory refs -system.cpu0.num_load_insts 10036503 # Number of load instructions -system.cpu0.num_store_insts 7982506 # Number of store instructions -system.cpu0.num_idle_cycles 2151142905.888201 # Number of idle cycles -system.cpu0.num_busy_cycles 240440995.111799 # Number of busy cycles -system.cpu0.not_idle_fraction 0.100536 # Percentage of non-idle cycles -system.cpu0.idle_fraction 0.899464 # Percentage of idle cycles +system.cpu0.num_mem_refs 13392372 # number of memory refs +system.cpu0.num_load_insts 7406786 # Number of load instructions +system.cpu0.num_store_insts 5985586 # Number of store instructions +system.cpu0.num_idle_cycles 2246456550.382122 # Number of idle cycles +system.cpu0.num_busy_cycles 145812225.617878 # Number of busy cycles +system.cpu0.not_idle_fraction 0.060951 # Percentage of non-idle cycles +system.cpu0.idle_fraction 0.939049 # Percentage of idle cycles system.cpu0.kern.inst.arm 0 # number of arm instructions executed -system.cpu0.kern.inst.quiesce 51319 # number of quiesce instructions executed -system.cpu0.icache.tags.replacements 490213 # number of replacements -system.cpu0.icache.tags.tagsinuse 509.358566 # Cycle average of tags in use -system.cpu0.icache.tags.total_refs 42807948 # Total number of references to valid blocks. -system.cpu0.icache.tags.sampled_refs 490725 # Sample count of references to valid blocks. -system.cpu0.icache.tags.avg_refs 87.234088 # Average number of references to valid blocks. +system.cpu0.kern.inst.quiesce 46712 # number of quiesce instructions executed +system.cpu0.icache.tags.replacements 425445 # number of replacements +system.cpu0.icache.tags.tagsinuse 509.359322 # Cycle average of tags in use +system.cpu0.icache.tags.total_refs 29139226 # Total number of references to valid blocks. +system.cpu0.icache.tags.sampled_refs 425957 # Sample count of references to valid blocks. +system.cpu0.icache.tags.avg_refs 68.408844 # Average number of references to valid blocks. system.cpu0.icache.tags.warmup_cycle 76218358000 # Cycle when the warmup percentage was hit. -system.cpu0.icache.tags.occ_blocks::cpu0.inst 509.358566 # Average occupied blocks per requestor -system.cpu0.icache.tags.occ_percent::cpu0.inst 0.994841 # Average percentage of cache occupancy -system.cpu0.icache.tags.occ_percent::total 0.994841 # Average percentage of cache occupancy -system.cpu0.icache.ReadReq_hits::cpu0.inst 42807948 # number of ReadReq hits -system.cpu0.icache.ReadReq_hits::total 42807948 # number of ReadReq hits -system.cpu0.icache.demand_hits::cpu0.inst 42807948 # number of demand (read+write) hits -system.cpu0.icache.demand_hits::total 42807948 # number of demand (read+write) hits -system.cpu0.icache.overall_hits::cpu0.inst 42807948 # number of overall hits -system.cpu0.icache.overall_hits::total 42807948 # number of overall hits -system.cpu0.icache.ReadReq_misses::cpu0.inst 490726 # number of ReadReq misses -system.cpu0.icache.ReadReq_misses::total 490726 # number of ReadReq misses -system.cpu0.icache.demand_misses::cpu0.inst 490726 # number of demand (read+write) misses -system.cpu0.icache.demand_misses::total 490726 # number of demand (read+write) misses -system.cpu0.icache.overall_misses::cpu0.inst 490726 # number of overall misses -system.cpu0.icache.overall_misses::total 490726 # number of overall misses -system.cpu0.icache.ReadReq_miss_latency::cpu0.inst 6824885728 # number of ReadReq miss cycles -system.cpu0.icache.ReadReq_miss_latency::total 6824885728 # number of ReadReq miss cycles -system.cpu0.icache.demand_miss_latency::cpu0.inst 6824885728 # number of demand (read+write) miss cycles -system.cpu0.icache.demand_miss_latency::total 6824885728 # number of demand (read+write) miss cycles -system.cpu0.icache.overall_miss_latency::cpu0.inst 6824885728 # number of overall miss cycles -system.cpu0.icache.overall_miss_latency::total 6824885728 # number of overall miss cycles -system.cpu0.icache.ReadReq_accesses::cpu0.inst 43298674 # number of ReadReq accesses(hits+misses) -system.cpu0.icache.ReadReq_accesses::total 43298674 # number of ReadReq accesses(hits+misses) -system.cpu0.icache.demand_accesses::cpu0.inst 43298674 # number of demand (read+write) accesses -system.cpu0.icache.demand_accesses::total 43298674 # number of demand (read+write) accesses -system.cpu0.icache.overall_accesses::cpu0.inst 43298674 # number of overall (read+write) accesses -system.cpu0.icache.overall_accesses::total 43298674 # number of overall (read+write) accesses -system.cpu0.icache.ReadReq_miss_rate::cpu0.inst 0.011334 # miss rate for ReadReq accesses -system.cpu0.icache.ReadReq_miss_rate::total 0.011334 # miss rate for ReadReq accesses -system.cpu0.icache.demand_miss_rate::cpu0.inst 0.011334 # miss rate for demand accesses -system.cpu0.icache.demand_miss_rate::total 0.011334 # miss rate for demand accesses -system.cpu0.icache.overall_miss_rate::cpu0.inst 0.011334 # miss rate for overall accesses -system.cpu0.icache.overall_miss_rate::total 0.011334 # miss rate for overall accesses -system.cpu0.icache.ReadReq_avg_miss_latency::cpu0.inst 13907.732070 # average ReadReq miss latency -system.cpu0.icache.ReadReq_avg_miss_latency::total 13907.732070 # average ReadReq miss latency -system.cpu0.icache.demand_avg_miss_latency::cpu0.inst 13907.732070 # average overall miss latency -system.cpu0.icache.demand_avg_miss_latency::total 13907.732070 # average overall miss latency -system.cpu0.icache.overall_avg_miss_latency::cpu0.inst 13907.732070 # average overall miss latency -system.cpu0.icache.overall_avg_miss_latency::total 13907.732070 # average overall miss latency +system.cpu0.icache.tags.occ_blocks::cpu0.inst 509.359322 # Average occupied blocks per requestor +system.cpu0.icache.tags.occ_percent::cpu0.inst 0.994842 # Average percentage of cache occupancy +system.cpu0.icache.tags.occ_percent::total 0.994842 # Average percentage of cache occupancy +system.cpu0.icache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::0 38 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::1 196 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::2 266 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::3 12 # Occupied blocks per task id +system.cpu0.icache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu0.icache.tags.tag_accesses 29991142 # Number of tag accesses +system.cpu0.icache.tags.data_accesses 29991142 # Number of data accesses +system.cpu0.icache.ReadReq_hits::cpu0.inst 29139226 # number of ReadReq hits +system.cpu0.icache.ReadReq_hits::total 29139226 # number of ReadReq hits +system.cpu0.icache.demand_hits::cpu0.inst 29139226 # number of demand (read+write) hits +system.cpu0.icache.demand_hits::total 29139226 # number of demand (read+write) hits +system.cpu0.icache.overall_hits::cpu0.inst 29139226 # number of overall hits +system.cpu0.icache.overall_hits::total 29139226 # number of overall hits +system.cpu0.icache.ReadReq_misses::cpu0.inst 425958 # number of ReadReq misses +system.cpu0.icache.ReadReq_misses::total 425958 # number of ReadReq misses +system.cpu0.icache.demand_misses::cpu0.inst 425958 # number of demand (read+write) misses +system.cpu0.icache.demand_misses::total 425958 # number of demand (read+write) misses +system.cpu0.icache.overall_misses::cpu0.inst 425958 # number of overall misses +system.cpu0.icache.overall_misses::total 425958 # number of overall misses +system.cpu0.icache.ReadReq_miss_latency::cpu0.inst 5905644218 # number of ReadReq miss cycles +system.cpu0.icache.ReadReq_miss_latency::total 5905644218 # number of ReadReq miss cycles +system.cpu0.icache.demand_miss_latency::cpu0.inst 5905644218 # number of demand (read+write) miss cycles +system.cpu0.icache.demand_miss_latency::total 5905644218 # number of demand (read+write) miss cycles +system.cpu0.icache.overall_miss_latency::cpu0.inst 5905644218 # number of overall miss cycles +system.cpu0.icache.overall_miss_latency::total 5905644218 # number of overall miss cycles +system.cpu0.icache.ReadReq_accesses::cpu0.inst 29565184 # number of ReadReq accesses(hits+misses) +system.cpu0.icache.ReadReq_accesses::total 29565184 # number of ReadReq accesses(hits+misses) +system.cpu0.icache.demand_accesses::cpu0.inst 29565184 # number of demand (read+write) accesses +system.cpu0.icache.demand_accesses::total 29565184 # number of demand (read+write) accesses +system.cpu0.icache.overall_accesses::cpu0.inst 29565184 # number of overall (read+write) accesses +system.cpu0.icache.overall_accesses::total 29565184 # number of overall (read+write) accesses +system.cpu0.icache.ReadReq_miss_rate::cpu0.inst 0.014407 # miss rate for ReadReq accesses +system.cpu0.icache.ReadReq_miss_rate::total 0.014407 # miss rate for ReadReq accesses +system.cpu0.icache.demand_miss_rate::cpu0.inst 0.014407 # miss rate for demand accesses +system.cpu0.icache.demand_miss_rate::total 0.014407 # miss rate for demand accesses +system.cpu0.icache.overall_miss_rate::cpu0.inst 0.014407 # miss rate for overall accesses +system.cpu0.icache.overall_miss_rate::total 0.014407 # miss rate for overall accesses +system.cpu0.icache.ReadReq_avg_miss_latency::cpu0.inst 13864.381507 # average ReadReq miss latency +system.cpu0.icache.ReadReq_avg_miss_latency::total 13864.381507 # average ReadReq miss latency +system.cpu0.icache.demand_avg_miss_latency::cpu0.inst 13864.381507 # average overall miss latency +system.cpu0.icache.demand_avg_miss_latency::total 13864.381507 # average overall miss latency +system.cpu0.icache.overall_avg_miss_latency::cpu0.inst 13864.381507 # average overall miss latency +system.cpu0.icache.overall_avg_miss_latency::total 13864.381507 # average overall miss latency system.cpu0.icache.blocked_cycles::no_mshrs 0 # number of cycles access was blocked system.cpu0.icache.blocked_cycles::no_targets 0 # number of cycles access was blocked system.cpu0.icache.blocked::no_mshrs 0 # number of cycles access was blocked @@ -1352,120 +1399,128 @@ system.cpu0.icache.avg_blocked_cycles::no_mshrs nan system.cpu0.icache.avg_blocked_cycles::no_targets nan # average number of cycles each access was blocked system.cpu0.icache.fast_writes 0 # number of fast writes performed system.cpu0.icache.cache_copies 0 # number of cache copies performed -system.cpu0.icache.ReadReq_mshr_misses::cpu0.inst 490726 # number of ReadReq MSHR misses -system.cpu0.icache.ReadReq_mshr_misses::total 490726 # number of ReadReq MSHR misses -system.cpu0.icache.demand_mshr_misses::cpu0.inst 490726 # number of demand (read+write) MSHR misses -system.cpu0.icache.demand_mshr_misses::total 490726 # number of demand (read+write) MSHR misses -system.cpu0.icache.overall_mshr_misses::cpu0.inst 490726 # number of overall MSHR misses -system.cpu0.icache.overall_mshr_misses::total 490726 # number of overall MSHR misses -system.cpu0.icache.ReadReq_mshr_miss_latency::cpu0.inst 5840816272 # number of ReadReq MSHR miss cycles -system.cpu0.icache.ReadReq_mshr_miss_latency::total 5840816272 # number of ReadReq MSHR miss cycles -system.cpu0.icache.demand_mshr_miss_latency::cpu0.inst 5840816272 # number of demand (read+write) MSHR miss cycles -system.cpu0.icache.demand_mshr_miss_latency::total 5840816272 # number of demand (read+write) MSHR miss cycles -system.cpu0.icache.overall_mshr_miss_latency::cpu0.inst 5840816272 # number of overall MSHR miss cycles -system.cpu0.icache.overall_mshr_miss_latency::total 5840816272 # number of overall MSHR miss cycles +system.cpu0.icache.ReadReq_mshr_misses::cpu0.inst 425958 # number of ReadReq MSHR misses +system.cpu0.icache.ReadReq_mshr_misses::total 425958 # number of ReadReq MSHR misses +system.cpu0.icache.demand_mshr_misses::cpu0.inst 425958 # number of demand (read+write) MSHR misses +system.cpu0.icache.demand_mshr_misses::total 425958 # number of demand (read+write) MSHR misses +system.cpu0.icache.overall_mshr_misses::cpu0.inst 425958 # number of overall MSHR misses +system.cpu0.icache.overall_mshr_misses::total 425958 # number of overall MSHR misses +system.cpu0.icache.ReadReq_mshr_miss_latency::cpu0.inst 5051503782 # number of ReadReq MSHR miss cycles +system.cpu0.icache.ReadReq_mshr_miss_latency::total 5051503782 # number of ReadReq MSHR miss cycles +system.cpu0.icache.demand_mshr_miss_latency::cpu0.inst 5051503782 # number of demand (read+write) MSHR miss cycles +system.cpu0.icache.demand_mshr_miss_latency::total 5051503782 # number of demand (read+write) MSHR miss cycles +system.cpu0.icache.overall_mshr_miss_latency::cpu0.inst 5051503782 # number of overall MSHR miss cycles +system.cpu0.icache.overall_mshr_miss_latency::total 5051503782 # number of overall MSHR miss cycles system.cpu0.icache.ReadReq_mshr_uncacheable_latency::cpu0.inst 436393750 # number of ReadReq MSHR uncacheable cycles system.cpu0.icache.ReadReq_mshr_uncacheable_latency::total 436393750 # number of ReadReq MSHR uncacheable cycles system.cpu0.icache.overall_mshr_uncacheable_latency::cpu0.inst 436393750 # number of overall MSHR uncacheable cycles system.cpu0.icache.overall_mshr_uncacheable_latency::total 436393750 # number of overall MSHR uncacheable cycles -system.cpu0.icache.ReadReq_mshr_miss_rate::cpu0.inst 0.011334 # mshr miss rate for ReadReq accesses -system.cpu0.icache.ReadReq_mshr_miss_rate::total 0.011334 # mshr miss rate for ReadReq accesses -system.cpu0.icache.demand_mshr_miss_rate::cpu0.inst 0.011334 # mshr miss rate for demand accesses -system.cpu0.icache.demand_mshr_miss_rate::total 0.011334 # mshr miss rate for demand accesses -system.cpu0.icache.overall_mshr_miss_rate::cpu0.inst 0.011334 # mshr miss rate for overall accesses -system.cpu0.icache.overall_mshr_miss_rate::total 0.011334 # mshr miss rate for overall accesses -system.cpu0.icache.ReadReq_avg_mshr_miss_latency::cpu0.inst 11902.398226 # average ReadReq mshr miss latency -system.cpu0.icache.ReadReq_avg_mshr_miss_latency::total 11902.398226 # average ReadReq mshr miss latency -system.cpu0.icache.demand_avg_mshr_miss_latency::cpu0.inst 11902.398226 # average overall mshr miss latency -system.cpu0.icache.demand_avg_mshr_miss_latency::total 11902.398226 # average overall mshr miss latency -system.cpu0.icache.overall_avg_mshr_miss_latency::cpu0.inst 11902.398226 # average overall mshr miss latency -system.cpu0.icache.overall_avg_mshr_miss_latency::total 11902.398226 # average overall mshr miss latency +system.cpu0.icache.ReadReq_mshr_miss_rate::cpu0.inst 0.014407 # mshr miss rate for ReadReq accesses +system.cpu0.icache.ReadReq_mshr_miss_rate::total 0.014407 # mshr miss rate for ReadReq accesses +system.cpu0.icache.demand_mshr_miss_rate::cpu0.inst 0.014407 # mshr miss rate for demand accesses +system.cpu0.icache.demand_mshr_miss_rate::total 0.014407 # mshr miss rate for demand accesses +system.cpu0.icache.overall_mshr_miss_rate::cpu0.inst 0.014407 # mshr miss rate for overall accesses +system.cpu0.icache.overall_mshr_miss_rate::total 0.014407 # mshr miss rate for overall accesses +system.cpu0.icache.ReadReq_avg_mshr_miss_latency::cpu0.inst 11859.159311 # average ReadReq mshr miss latency +system.cpu0.icache.ReadReq_avg_mshr_miss_latency::total 11859.159311 # average ReadReq mshr miss latency +system.cpu0.icache.demand_avg_mshr_miss_latency::cpu0.inst 11859.159311 # average overall mshr miss latency +system.cpu0.icache.demand_avg_mshr_miss_latency::total 11859.159311 # average overall mshr miss latency +system.cpu0.icache.overall_avg_mshr_miss_latency::cpu0.inst 11859.159311 # average overall mshr miss latency +system.cpu0.icache.overall_avg_mshr_miss_latency::total 11859.159311 # average overall mshr miss latency system.cpu0.icache.ReadReq_avg_mshr_uncacheable_latency::cpu0.inst inf # average ReadReq mshr uncacheable latency system.cpu0.icache.ReadReq_avg_mshr_uncacheable_latency::total inf # average ReadReq mshr uncacheable latency system.cpu0.icache.overall_avg_mshr_uncacheable_latency::cpu0.inst inf # average overall mshr uncacheable latency system.cpu0.icache.overall_avg_mshr_uncacheable_latency::total inf # average overall mshr uncacheable latency system.cpu0.icache.no_allocate_misses 0 # Number of misses that were no-allocate -system.cpu0.dcache.tags.replacements 406717 # number of replacements -system.cpu0.dcache.tags.tagsinuse 471.656866 # Cycle average of tags in use -system.cpu0.dcache.tags.total_refs 15966646 # Total number of references to valid blocks. -system.cpu0.dcache.tags.sampled_refs 407229 # Sample count of references to valid blocks. -system.cpu0.dcache.tags.avg_refs 39.208028 # Average number of references to valid blocks. +system.cpu0.dcache.tags.replacements 330301 # number of replacements +system.cpu0.dcache.tags.tagsinuse 454.615886 # Cycle average of tags in use +system.cpu0.dcache.tags.total_refs 12269300 # Total number of references to valid blocks. +system.cpu0.dcache.tags.sampled_refs 330813 # Sample count of references to valid blocks. +system.cpu0.dcache.tags.avg_refs 37.088325 # Average number of references to valid blocks. system.cpu0.dcache.tags.warmup_cycle 666436250 # Cycle when the warmup percentage was hit. -system.cpu0.dcache.tags.occ_blocks::cpu0.data 471.656866 # Average occupied blocks per requestor -system.cpu0.dcache.tags.occ_percent::cpu0.data 0.921205 # Average percentage of cache occupancy -system.cpu0.dcache.tags.occ_percent::total 0.921205 # Average percentage of cache occupancy -system.cpu0.dcache.ReadReq_hits::cpu0.data 9136610 # number of ReadReq hits -system.cpu0.dcache.ReadReq_hits::total 9136610 # number of ReadReq hits -system.cpu0.dcache.WriteReq_hits::cpu0.data 6494353 # number of WriteReq hits -system.cpu0.dcache.WriteReq_hits::total 6494353 # number of WriteReq hits -system.cpu0.dcache.LoadLockedReq_hits::cpu0.data 156522 # number of LoadLockedReq hits -system.cpu0.dcache.LoadLockedReq_hits::total 156522 # number of LoadLockedReq hits -system.cpu0.dcache.StoreCondReq_hits::cpu0.data 158977 # number of StoreCondReq hits -system.cpu0.dcache.StoreCondReq_hits::total 158977 # number of StoreCondReq hits -system.cpu0.dcache.demand_hits::cpu0.data 15630963 # number of demand (read+write) hits -system.cpu0.dcache.demand_hits::total 15630963 # number of demand (read+write) hits -system.cpu0.dcache.overall_hits::cpu0.data 15630963 # number of overall hits -system.cpu0.dcache.overall_hits::total 15630963 # number of overall hits -system.cpu0.dcache.ReadReq_misses::cpu0.data 263803 # number of ReadReq misses -system.cpu0.dcache.ReadReq_misses::total 263803 # number of ReadReq misses -system.cpu0.dcache.WriteReq_misses::cpu0.data 176623 # number of WriteReq misses -system.cpu0.dcache.WriteReq_misses::total 176623 # number of WriteReq misses -system.cpu0.dcache.LoadLockedReq_misses::cpu0.data 9911 # number of LoadLockedReq misses -system.cpu0.dcache.LoadLockedReq_misses::total 9911 # number of LoadLockedReq misses -system.cpu0.dcache.StoreCondReq_misses::cpu0.data 7399 # number of StoreCondReq misses -system.cpu0.dcache.StoreCondReq_misses::total 7399 # number of StoreCondReq misses -system.cpu0.dcache.demand_misses::cpu0.data 440426 # number of demand (read+write) misses -system.cpu0.dcache.demand_misses::total 440426 # number of demand (read+write) misses -system.cpu0.dcache.overall_misses::cpu0.data 440426 # number of overall misses -system.cpu0.dcache.overall_misses::total 440426 # number of overall misses -system.cpu0.dcache.ReadReq_miss_latency::cpu0.data 3917573248 # number of ReadReq miss cycles -system.cpu0.dcache.ReadReq_miss_latency::total 3917573248 # number of ReadReq miss cycles -system.cpu0.dcache.WriteReq_miss_latency::cpu0.data 7906184046 # number of WriteReq miss cycles -system.cpu0.dcache.WriteReq_miss_latency::total 7906184046 # number of WriteReq miss cycles -system.cpu0.dcache.LoadLockedReq_miss_latency::cpu0.data 99581999 # number of LoadLockedReq miss cycles -system.cpu0.dcache.LoadLockedReq_miss_latency::total 99581999 # number of LoadLockedReq miss cycles -system.cpu0.dcache.StoreCondReq_miss_latency::cpu0.data 40689888 # number of StoreCondReq miss cycles -system.cpu0.dcache.StoreCondReq_miss_latency::total 40689888 # number of StoreCondReq miss cycles -system.cpu0.dcache.demand_miss_latency::cpu0.data 11823757294 # number of demand (read+write) miss cycles -system.cpu0.dcache.demand_miss_latency::total 11823757294 # number of demand (read+write) miss cycles -system.cpu0.dcache.overall_miss_latency::cpu0.data 11823757294 # number of overall miss cycles -system.cpu0.dcache.overall_miss_latency::total 11823757294 # number of overall miss cycles -system.cpu0.dcache.ReadReq_accesses::cpu0.data 9400413 # number of ReadReq accesses(hits+misses) -system.cpu0.dcache.ReadReq_accesses::total 9400413 # number of ReadReq accesses(hits+misses) -system.cpu0.dcache.WriteReq_accesses::cpu0.data 6670976 # number of WriteReq accesses(hits+misses) -system.cpu0.dcache.WriteReq_accesses::total 6670976 # number of WriteReq accesses(hits+misses) -system.cpu0.dcache.LoadLockedReq_accesses::cpu0.data 166433 # number of LoadLockedReq accesses(hits+misses) -system.cpu0.dcache.LoadLockedReq_accesses::total 166433 # number of LoadLockedReq accesses(hits+misses) -system.cpu0.dcache.StoreCondReq_accesses::cpu0.data 166376 # number of StoreCondReq accesses(hits+misses) -system.cpu0.dcache.StoreCondReq_accesses::total 166376 # number of StoreCondReq accesses(hits+misses) -system.cpu0.dcache.demand_accesses::cpu0.data 16071389 # number of demand (read+write) accesses -system.cpu0.dcache.demand_accesses::total 16071389 # number of demand (read+write) accesses -system.cpu0.dcache.overall_accesses::cpu0.data 16071389 # number of overall (read+write) accesses -system.cpu0.dcache.overall_accesses::total 16071389 # number of overall (read+write) accesses -system.cpu0.dcache.ReadReq_miss_rate::cpu0.data 0.028063 # miss rate for ReadReq accesses -system.cpu0.dcache.ReadReq_miss_rate::total 0.028063 # miss rate for ReadReq accesses -system.cpu0.dcache.WriteReq_miss_rate::cpu0.data 0.026476 # miss rate for WriteReq accesses -system.cpu0.dcache.WriteReq_miss_rate::total 0.026476 # miss rate for WriteReq accesses -system.cpu0.dcache.LoadLockedReq_miss_rate::cpu0.data 0.059549 # miss rate for LoadLockedReq accesses -system.cpu0.dcache.LoadLockedReq_miss_rate::total 0.059549 # miss rate for LoadLockedReq accesses -system.cpu0.dcache.StoreCondReq_miss_rate::cpu0.data 0.044472 # miss rate for StoreCondReq accesses -system.cpu0.dcache.StoreCondReq_miss_rate::total 0.044472 # miss rate for StoreCondReq accesses -system.cpu0.dcache.demand_miss_rate::cpu0.data 0.027404 # miss rate for demand accesses -system.cpu0.dcache.demand_miss_rate::total 0.027404 # miss rate for demand accesses -system.cpu0.dcache.overall_miss_rate::cpu0.data 0.027404 # miss rate for overall accesses -system.cpu0.dcache.overall_miss_rate::total 0.027404 # miss rate for overall accesses -system.cpu0.dcache.ReadReq_avg_miss_latency::cpu0.data 14850.374135 # average ReadReq miss latency -system.cpu0.dcache.ReadReq_avg_miss_latency::total 14850.374135 # average ReadReq miss latency -system.cpu0.dcache.WriteReq_avg_miss_latency::cpu0.data 44763.049240 # average WriteReq miss latency -system.cpu0.dcache.WriteReq_avg_miss_latency::total 44763.049240 # average WriteReq miss latency -system.cpu0.dcache.LoadLockedReq_avg_miss_latency::cpu0.data 10047.623751 # average LoadLockedReq miss latency -system.cpu0.dcache.LoadLockedReq_avg_miss_latency::total 10047.623751 # average LoadLockedReq miss latency -system.cpu0.dcache.StoreCondReq_avg_miss_latency::cpu0.data 5499.376673 # average StoreCondReq miss latency -system.cpu0.dcache.StoreCondReq_avg_miss_latency::total 5499.376673 # average StoreCondReq miss latency -system.cpu0.dcache.demand_avg_miss_latency::cpu0.data 26846.183681 # average overall miss latency -system.cpu0.dcache.demand_avg_miss_latency::total 26846.183681 # average overall miss latency -system.cpu0.dcache.overall_avg_miss_latency::cpu0.data 26846.183681 # average overall miss latency -system.cpu0.dcache.overall_avg_miss_latency::total 26846.183681 # average overall miss latency +system.cpu0.dcache.tags.occ_blocks::cpu0.data 454.615886 # Average occupied blocks per requestor +system.cpu0.dcache.tags.occ_percent::cpu0.data 0.887922 # Average percentage of cache occupancy +system.cpu0.dcache.tags.occ_percent::total 0.887922 # Average percentage of cache occupancy +system.cpu0.dcache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::0 71 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::1 343 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::2 97 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::3 1 # Occupied blocks per task id +system.cpu0.dcache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu0.dcache.tags.tag_accesses 50897043 # Number of tag accesses +system.cpu0.dcache.tags.data_accesses 50897043 # Number of data accesses +system.cpu0.dcache.ReadReq_hits::cpu0.data 6599288 # number of ReadReq hits +system.cpu0.dcache.ReadReq_hits::total 6599288 # number of ReadReq hits +system.cpu0.dcache.WriteReq_hits::cpu0.data 5350353 # number of WriteReq hits +system.cpu0.dcache.WriteReq_hits::total 5350353 # number of WriteReq hits +system.cpu0.dcache.LoadLockedReq_hits::cpu0.data 147935 # number of LoadLockedReq hits +system.cpu0.dcache.LoadLockedReq_hits::total 147935 # number of LoadLockedReq hits +system.cpu0.dcache.StoreCondReq_hits::cpu0.data 149626 # number of StoreCondReq hits +system.cpu0.dcache.StoreCondReq_hits::total 149626 # number of StoreCondReq hits +system.cpu0.dcache.demand_hits::cpu0.data 11949641 # number of demand (read+write) hits +system.cpu0.dcache.demand_hits::total 11949641 # number of demand (read+write) hits +system.cpu0.dcache.overall_hits::cpu0.data 11949641 # number of overall hits +system.cpu0.dcache.overall_hits::total 11949641 # number of overall hits +system.cpu0.dcache.ReadReq_misses::cpu0.data 227704 # number of ReadReq misses +system.cpu0.dcache.ReadReq_misses::total 227704 # number of ReadReq misses +system.cpu0.dcache.WriteReq_misses::cpu0.data 141542 # number of WriteReq misses +system.cpu0.dcache.WriteReq_misses::total 141542 # number of WriteReq misses +system.cpu0.dcache.LoadLockedReq_misses::cpu0.data 9305 # number of LoadLockedReq misses +system.cpu0.dcache.LoadLockedReq_misses::total 9305 # number of LoadLockedReq misses +system.cpu0.dcache.StoreCondReq_misses::cpu0.data 7516 # number of StoreCondReq misses +system.cpu0.dcache.StoreCondReq_misses::total 7516 # number of StoreCondReq misses +system.cpu0.dcache.demand_misses::cpu0.data 369246 # number of demand (read+write) misses +system.cpu0.dcache.demand_misses::total 369246 # number of demand (read+write) misses +system.cpu0.dcache.overall_misses::cpu0.data 369246 # number of overall misses +system.cpu0.dcache.overall_misses::total 369246 # number of overall misses +system.cpu0.dcache.ReadReq_miss_latency::cpu0.data 3302919746 # number of ReadReq miss cycles +system.cpu0.dcache.ReadReq_miss_latency::total 3302919746 # number of ReadReq miss cycles +system.cpu0.dcache.WriteReq_miss_latency::cpu0.data 5684238795 # number of WriteReq miss cycles +system.cpu0.dcache.WriteReq_miss_latency::total 5684238795 # number of WriteReq miss cycles +system.cpu0.dcache.LoadLockedReq_miss_latency::cpu0.data 91447249 # number of LoadLockedReq miss cycles +system.cpu0.dcache.LoadLockedReq_miss_latency::total 91447249 # number of LoadLockedReq miss cycles +system.cpu0.dcache.StoreCondReq_miss_latency::cpu0.data 44459563 # number of StoreCondReq miss cycles +system.cpu0.dcache.StoreCondReq_miss_latency::total 44459563 # number of StoreCondReq miss cycles +system.cpu0.dcache.demand_miss_latency::cpu0.data 8987158541 # number of demand (read+write) miss cycles +system.cpu0.dcache.demand_miss_latency::total 8987158541 # number of demand (read+write) miss cycles +system.cpu0.dcache.overall_miss_latency::cpu0.data 8987158541 # number of overall miss cycles +system.cpu0.dcache.overall_miss_latency::total 8987158541 # number of overall miss cycles +system.cpu0.dcache.ReadReq_accesses::cpu0.data 6826992 # number of ReadReq accesses(hits+misses) +system.cpu0.dcache.ReadReq_accesses::total 6826992 # number of ReadReq accesses(hits+misses) +system.cpu0.dcache.WriteReq_accesses::cpu0.data 5491895 # number of WriteReq accesses(hits+misses) +system.cpu0.dcache.WriteReq_accesses::total 5491895 # number of WriteReq accesses(hits+misses) +system.cpu0.dcache.LoadLockedReq_accesses::cpu0.data 157240 # number of LoadLockedReq accesses(hits+misses) +system.cpu0.dcache.LoadLockedReq_accesses::total 157240 # number of LoadLockedReq accesses(hits+misses) +system.cpu0.dcache.StoreCondReq_accesses::cpu0.data 157142 # number of StoreCondReq accesses(hits+misses) +system.cpu0.dcache.StoreCondReq_accesses::total 157142 # number of StoreCondReq accesses(hits+misses) +system.cpu0.dcache.demand_accesses::cpu0.data 12318887 # number of demand (read+write) accesses +system.cpu0.dcache.demand_accesses::total 12318887 # number of demand (read+write) accesses +system.cpu0.dcache.overall_accesses::cpu0.data 12318887 # number of overall (read+write) accesses +system.cpu0.dcache.overall_accesses::total 12318887 # number of overall (read+write) accesses +system.cpu0.dcache.ReadReq_miss_rate::cpu0.data 0.033353 # miss rate for ReadReq accesses +system.cpu0.dcache.ReadReq_miss_rate::total 0.033353 # miss rate for ReadReq accesses +system.cpu0.dcache.WriteReq_miss_rate::cpu0.data 0.025773 # miss rate for WriteReq accesses +system.cpu0.dcache.WriteReq_miss_rate::total 0.025773 # miss rate for WriteReq accesses +system.cpu0.dcache.LoadLockedReq_miss_rate::cpu0.data 0.059177 # miss rate for LoadLockedReq accesses +system.cpu0.dcache.LoadLockedReq_miss_rate::total 0.059177 # miss rate for LoadLockedReq accesses +system.cpu0.dcache.StoreCondReq_miss_rate::cpu0.data 0.047829 # miss rate for StoreCondReq accesses +system.cpu0.dcache.StoreCondReq_miss_rate::total 0.047829 # miss rate for StoreCondReq accesses +system.cpu0.dcache.demand_miss_rate::cpu0.data 0.029974 # miss rate for demand accesses +system.cpu0.dcache.demand_miss_rate::total 0.029974 # miss rate for demand accesses +system.cpu0.dcache.overall_miss_rate::cpu0.data 0.029974 # miss rate for overall accesses +system.cpu0.dcache.overall_miss_rate::total 0.029974 # miss rate for overall accesses +system.cpu0.dcache.ReadReq_avg_miss_latency::cpu0.data 14505.321584 # average ReadReq miss latency +system.cpu0.dcache.ReadReq_avg_miss_latency::total 14505.321584 # average ReadReq miss latency +system.cpu0.dcache.WriteReq_avg_miss_latency::cpu0.data 40159.378806 # average WriteReq miss latency +system.cpu0.dcache.WriteReq_avg_miss_latency::total 40159.378806 # average WriteReq miss latency +system.cpu0.dcache.LoadLockedReq_avg_miss_latency::cpu0.data 9827.753788 # average LoadLockedReq miss latency +system.cpu0.dcache.LoadLockedReq_avg_miss_latency::total 9827.753788 # average LoadLockedReq miss latency +system.cpu0.dcache.StoreCondReq_avg_miss_latency::cpu0.data 5915.322379 # average StoreCondReq miss latency +system.cpu0.dcache.StoreCondReq_avg_miss_latency::total 5915.322379 # average StoreCondReq miss latency +system.cpu0.dcache.demand_avg_miss_latency::cpu0.data 24339.217056 # average overall miss latency +system.cpu0.dcache.demand_avg_miss_latency::total 24339.217056 # average overall miss latency +system.cpu0.dcache.overall_avg_miss_latency::cpu0.data 24339.217056 # average overall miss latency +system.cpu0.dcache.overall_avg_miss_latency::total 24339.217056 # average overall miss latency system.cpu0.dcache.blocked_cycles::no_mshrs 0 # number of cycles access was blocked system.cpu0.dcache.blocked_cycles::no_targets 0 # number of cycles access was blocked system.cpu0.dcache.blocked::no_mshrs 0 # number of cycles access was blocked @@ -1474,66 +1529,66 @@ system.cpu0.dcache.avg_blocked_cycles::no_mshrs nan system.cpu0.dcache.avg_blocked_cycles::no_targets nan # average number of cycles each access was blocked system.cpu0.dcache.fast_writes 0 # number of fast writes performed system.cpu0.dcache.cache_copies 0 # number of cache copies performed -system.cpu0.dcache.writebacks::writebacks 376546 # number of writebacks -system.cpu0.dcache.writebacks::total 376546 # number of writebacks -system.cpu0.dcache.ReadReq_mshr_misses::cpu0.data 263803 # number of ReadReq MSHR misses -system.cpu0.dcache.ReadReq_mshr_misses::total 263803 # number of ReadReq MSHR misses -system.cpu0.dcache.WriteReq_mshr_misses::cpu0.data 176623 # number of WriteReq MSHR misses -system.cpu0.dcache.WriteReq_mshr_misses::total 176623 # number of WriteReq MSHR misses -system.cpu0.dcache.LoadLockedReq_mshr_misses::cpu0.data 9911 # number of LoadLockedReq MSHR misses -system.cpu0.dcache.LoadLockedReq_mshr_misses::total 9911 # number of LoadLockedReq MSHR misses -system.cpu0.dcache.StoreCondReq_mshr_misses::cpu0.data 7395 # number of StoreCondReq MSHR misses -system.cpu0.dcache.StoreCondReq_mshr_misses::total 7395 # number of StoreCondReq MSHR misses -system.cpu0.dcache.demand_mshr_misses::cpu0.data 440426 # number of demand (read+write) MSHR misses -system.cpu0.dcache.demand_mshr_misses::total 440426 # number of demand (read+write) MSHR misses -system.cpu0.dcache.overall_mshr_misses::cpu0.data 440426 # number of overall MSHR misses -system.cpu0.dcache.overall_mshr_misses::total 440426 # number of overall MSHR misses -system.cpu0.dcache.ReadReq_mshr_miss_latency::cpu0.data 3387671752 # number of ReadReq MSHR miss cycles -system.cpu0.dcache.ReadReq_mshr_miss_latency::total 3387671752 # number of ReadReq MSHR miss cycles -system.cpu0.dcache.WriteReq_mshr_miss_latency::cpu0.data 7508595954 # number of WriteReq MSHR miss cycles -system.cpu0.dcache.WriteReq_mshr_miss_latency::total 7508595954 # number of WriteReq MSHR miss cycles -system.cpu0.dcache.LoadLockedReq_mshr_miss_latency::cpu0.data 79710001 # number of LoadLockedReq MSHR miss cycles -system.cpu0.dcache.LoadLockedReq_mshr_miss_latency::total 79710001 # number of LoadLockedReq MSHR miss cycles -system.cpu0.dcache.StoreCondReq_mshr_miss_latency::cpu0.data 25901112 # number of StoreCondReq MSHR miss cycles -system.cpu0.dcache.StoreCondReq_mshr_miss_latency::total 25901112 # number of StoreCondReq MSHR miss cycles +system.cpu0.dcache.writebacks::writebacks 305829 # number of writebacks +system.cpu0.dcache.writebacks::total 305829 # number of writebacks +system.cpu0.dcache.ReadReq_mshr_misses::cpu0.data 227704 # number of ReadReq MSHR misses +system.cpu0.dcache.ReadReq_mshr_misses::total 227704 # number of ReadReq MSHR misses +system.cpu0.dcache.WriteReq_mshr_misses::cpu0.data 141542 # number of WriteReq MSHR misses +system.cpu0.dcache.WriteReq_mshr_misses::total 141542 # number of WriteReq MSHR misses +system.cpu0.dcache.LoadLockedReq_mshr_misses::cpu0.data 9305 # number of LoadLockedReq MSHR misses +system.cpu0.dcache.LoadLockedReq_mshr_misses::total 9305 # number of LoadLockedReq MSHR misses +system.cpu0.dcache.StoreCondReq_mshr_misses::cpu0.data 7514 # number of StoreCondReq MSHR misses +system.cpu0.dcache.StoreCondReq_mshr_misses::total 7514 # number of StoreCondReq MSHR misses +system.cpu0.dcache.demand_mshr_misses::cpu0.data 369246 # number of demand (read+write) MSHR misses +system.cpu0.dcache.demand_mshr_misses::total 369246 # number of demand (read+write) MSHR misses +system.cpu0.dcache.overall_mshr_misses::cpu0.data 369246 # number of overall MSHR misses +system.cpu0.dcache.overall_mshr_misses::total 369246 # number of overall MSHR misses +system.cpu0.dcache.ReadReq_mshr_miss_latency::cpu0.data 2845576254 # number of ReadReq MSHR miss cycles +system.cpu0.dcache.ReadReq_mshr_miss_latency::total 2845576254 # number of ReadReq MSHR miss cycles +system.cpu0.dcache.WriteReq_mshr_miss_latency::cpu0.data 5370172205 # number of WriteReq MSHR miss cycles +system.cpu0.dcache.WriteReq_mshr_miss_latency::total 5370172205 # number of WriteReq MSHR miss cycles +system.cpu0.dcache.LoadLockedReq_mshr_miss_latency::cpu0.data 72789751 # number of LoadLockedReq MSHR miss cycles +system.cpu0.dcache.LoadLockedReq_mshr_miss_latency::total 72789751 # number of LoadLockedReq MSHR miss cycles +system.cpu0.dcache.StoreCondReq_mshr_miss_latency::cpu0.data 29432437 # number of StoreCondReq MSHR miss cycles +system.cpu0.dcache.StoreCondReq_mshr_miss_latency::total 29432437 # number of StoreCondReq MSHR miss cycles system.cpu0.dcache.StoreCondFailReq_mshr_miss_latency::cpu0.data 1000 # number of StoreCondFailReq MSHR miss cycles system.cpu0.dcache.StoreCondFailReq_mshr_miss_latency::total 1000 # number of StoreCondFailReq MSHR miss cycles -system.cpu0.dcache.demand_mshr_miss_latency::cpu0.data 10896267706 # number of demand (read+write) MSHR miss cycles -system.cpu0.dcache.demand_mshr_miss_latency::total 10896267706 # number of demand (read+write) MSHR miss cycles -system.cpu0.dcache.overall_mshr_miss_latency::cpu0.data 10896267706 # number of overall MSHR miss cycles -system.cpu0.dcache.overall_mshr_miss_latency::total 10896267706 # number of overall MSHR miss cycles -system.cpu0.dcache.ReadReq_mshr_uncacheable_latency::cpu0.data 13765830000 # number of ReadReq MSHR uncacheable cycles -system.cpu0.dcache.ReadReq_mshr_uncacheable_latency::total 13765830000 # number of ReadReq MSHR uncacheable cycles -system.cpu0.dcache.WriteReq_mshr_uncacheable_latency::cpu0.data 25807312360 # number of WriteReq MSHR uncacheable cycles -system.cpu0.dcache.WriteReq_mshr_uncacheable_latency::total 25807312360 # number of WriteReq MSHR uncacheable cycles -system.cpu0.dcache.overall_mshr_uncacheable_latency::cpu0.data 39573142360 # number of overall MSHR uncacheable cycles -system.cpu0.dcache.overall_mshr_uncacheable_latency::total 39573142360 # number of overall MSHR uncacheable cycles -system.cpu0.dcache.ReadReq_mshr_miss_rate::cpu0.data 0.028063 # mshr miss rate for ReadReq accesses -system.cpu0.dcache.ReadReq_mshr_miss_rate::total 0.028063 # mshr miss rate for ReadReq accesses -system.cpu0.dcache.WriteReq_mshr_miss_rate::cpu0.data 0.026476 # mshr miss rate for WriteReq accesses -system.cpu0.dcache.WriteReq_mshr_miss_rate::total 0.026476 # mshr miss rate for WriteReq accesses -system.cpu0.dcache.LoadLockedReq_mshr_miss_rate::cpu0.data 0.059549 # mshr miss rate for LoadLockedReq accesses -system.cpu0.dcache.LoadLockedReq_mshr_miss_rate::total 0.059549 # mshr miss rate for LoadLockedReq accesses -system.cpu0.dcache.StoreCondReq_mshr_miss_rate::cpu0.data 0.044448 # mshr miss rate for StoreCondReq accesses -system.cpu0.dcache.StoreCondReq_mshr_miss_rate::total 0.044448 # mshr miss rate for StoreCondReq accesses -system.cpu0.dcache.demand_mshr_miss_rate::cpu0.data 0.027404 # mshr miss rate for demand accesses -system.cpu0.dcache.demand_mshr_miss_rate::total 0.027404 # mshr miss rate for demand accesses -system.cpu0.dcache.overall_mshr_miss_rate::cpu0.data 0.027404 # mshr miss rate for overall accesses -system.cpu0.dcache.overall_mshr_miss_rate::total 0.027404 # mshr miss rate for overall accesses -system.cpu0.dcache.ReadReq_avg_mshr_miss_latency::cpu0.data 12841.672581 # average ReadReq mshr miss latency -system.cpu0.dcache.ReadReq_avg_mshr_miss_latency::total 12841.672581 # average ReadReq mshr miss latency -system.cpu0.dcache.WriteReq_avg_mshr_miss_latency::cpu0.data 42511.994214 # average WriteReq mshr miss latency -system.cpu0.dcache.WriteReq_avg_mshr_miss_latency::total 42511.994214 # average WriteReq mshr miss latency -system.cpu0.dcache.LoadLockedReq_avg_mshr_miss_latency::cpu0.data 8042.579054 # average LoadLockedReq mshr miss latency -system.cpu0.dcache.LoadLockedReq_avg_mshr_miss_latency::total 8042.579054 # average LoadLockedReq mshr miss latency -system.cpu0.dcache.StoreCondReq_avg_mshr_miss_latency::cpu0.data 3502.516836 # average StoreCondReq mshr miss latency -system.cpu0.dcache.StoreCondReq_avg_mshr_miss_latency::total 3502.516836 # average StoreCondReq mshr miss latency +system.cpu0.dcache.demand_mshr_miss_latency::cpu0.data 8215748459 # number of demand (read+write) MSHR miss cycles +system.cpu0.dcache.demand_mshr_miss_latency::total 8215748459 # number of demand (read+write) MSHR miss cycles +system.cpu0.dcache.overall_mshr_miss_latency::cpu0.data 8215748459 # number of overall MSHR miss cycles +system.cpu0.dcache.overall_mshr_miss_latency::total 8215748459 # number of overall MSHR miss cycles +system.cpu0.dcache.ReadReq_mshr_uncacheable_latency::cpu0.data 13558596000 # number of ReadReq MSHR uncacheable cycles +system.cpu0.dcache.ReadReq_mshr_uncacheable_latency::total 13558596000 # number of ReadReq MSHR uncacheable cycles +system.cpu0.dcache.WriteReq_mshr_uncacheable_latency::cpu0.data 1167114500 # number of WriteReq MSHR uncacheable cycles +system.cpu0.dcache.WriteReq_mshr_uncacheable_latency::total 1167114500 # number of WriteReq MSHR uncacheable cycles +system.cpu0.dcache.overall_mshr_uncacheable_latency::cpu0.data 14725710500 # number of overall MSHR uncacheable cycles +system.cpu0.dcache.overall_mshr_uncacheable_latency::total 14725710500 # number of overall MSHR uncacheable cycles +system.cpu0.dcache.ReadReq_mshr_miss_rate::cpu0.data 0.033353 # mshr miss rate for ReadReq accesses +system.cpu0.dcache.ReadReq_mshr_miss_rate::total 0.033353 # mshr miss rate for ReadReq accesses +system.cpu0.dcache.WriteReq_mshr_miss_rate::cpu0.data 0.025773 # mshr miss rate for WriteReq accesses +system.cpu0.dcache.WriteReq_mshr_miss_rate::total 0.025773 # mshr miss rate for WriteReq accesses +system.cpu0.dcache.LoadLockedReq_mshr_miss_rate::cpu0.data 0.059177 # mshr miss rate for LoadLockedReq accesses +system.cpu0.dcache.LoadLockedReq_mshr_miss_rate::total 0.059177 # mshr miss rate for LoadLockedReq accesses +system.cpu0.dcache.StoreCondReq_mshr_miss_rate::cpu0.data 0.047817 # mshr miss rate for StoreCondReq accesses +system.cpu0.dcache.StoreCondReq_mshr_miss_rate::total 0.047817 # mshr miss rate for StoreCondReq accesses +system.cpu0.dcache.demand_mshr_miss_rate::cpu0.data 0.029974 # mshr miss rate for demand accesses +system.cpu0.dcache.demand_mshr_miss_rate::total 0.029974 # mshr miss rate for demand accesses +system.cpu0.dcache.overall_mshr_miss_rate::cpu0.data 0.029974 # mshr miss rate for overall accesses +system.cpu0.dcache.overall_mshr_miss_rate::total 0.029974 # mshr miss rate for overall accesses +system.cpu0.dcache.ReadReq_avg_mshr_miss_latency::cpu0.data 12496.821549 # average ReadReq mshr miss latency +system.cpu0.dcache.ReadReq_avg_mshr_miss_latency::total 12496.821549 # average ReadReq mshr miss latency +system.cpu0.dcache.WriteReq_avg_mshr_miss_latency::cpu0.data 37940.485545 # average WriteReq mshr miss latency +system.cpu0.dcache.WriteReq_avg_mshr_miss_latency::total 37940.485545 # average WriteReq mshr miss latency +system.cpu0.dcache.LoadLockedReq_avg_mshr_miss_latency::cpu0.data 7822.649221 # average LoadLockedReq mshr miss latency +system.cpu0.dcache.LoadLockedReq_avg_mshr_miss_latency::total 7822.649221 # average LoadLockedReq mshr miss latency +system.cpu0.dcache.StoreCondReq_avg_mshr_miss_latency::cpu0.data 3917.013175 # average StoreCondReq mshr miss latency +system.cpu0.dcache.StoreCondReq_avg_mshr_miss_latency::total 3917.013175 # average StoreCondReq mshr miss latency system.cpu0.dcache.StoreCondFailReq_avg_mshr_miss_latency::cpu0.data inf # average StoreCondFailReq mshr miss latency system.cpu0.dcache.StoreCondFailReq_avg_mshr_miss_latency::total inf # average StoreCondFailReq mshr miss latency -system.cpu0.dcache.demand_avg_mshr_miss_latency::cpu0.data 24740.291686 # average overall mshr miss latency -system.cpu0.dcache.demand_avg_mshr_miss_latency::total 24740.291686 # average overall mshr miss latency -system.cpu0.dcache.overall_avg_mshr_miss_latency::cpu0.data 24740.291686 # average overall mshr miss latency -system.cpu0.dcache.overall_avg_mshr_miss_latency::total 24740.291686 # average overall mshr miss latency +system.cpu0.dcache.demand_avg_mshr_miss_latency::cpu0.data 22250.067595 # average overall mshr miss latency +system.cpu0.dcache.demand_avg_mshr_miss_latency::total 22250.067595 # average overall mshr miss latency +system.cpu0.dcache.overall_avg_mshr_miss_latency::cpu0.data 22250.067595 # average overall mshr miss latency +system.cpu0.dcache.overall_avg_mshr_miss_latency::total 22250.067595 # average overall mshr miss latency system.cpu0.dcache.ReadReq_avg_mshr_uncacheable_latency::cpu0.data inf # average ReadReq mshr uncacheable latency system.cpu0.dcache.ReadReq_avg_mshr_uncacheable_latency::total inf # average ReadReq mshr uncacheable latency system.cpu0.dcache.WriteReq_avg_mshr_uncacheable_latency::cpu0.data inf # average WriteReq mshr uncacheable latency @@ -1543,26 +1598,26 @@ system.cpu0.dcache.overall_avg_mshr_uncacheable_latency::total inf system.cpu0.dcache.no_allocate_misses 0 # Number of misses that were no-allocate system.cpu1.dtb.inst_hits 0 # ITB inst hits system.cpu1.dtb.inst_misses 0 # ITB inst misses -system.cpu1.dtb.read_hits 5706417 # DTB read hits -system.cpu1.dtb.read_misses 3586 # DTB read misses -system.cpu1.dtb.write_hits 3873093 # DTB write hits -system.cpu1.dtb.write_misses 644 # DTB write misses +system.cpu1.dtb.read_hits 8311308 # DTB read hits +system.cpu1.dtb.read_misses 3642 # DTB read misses +system.cpu1.dtb.write_hits 5827742 # DTB write hits +system.cpu1.dtb.write_misses 1438 # DTB write misses system.cpu1.dtb.flush_tlb 4 # Number of times complete TLB was flushed system.cpu1.dtb.flush_tlb_mva 0 # Number of times TLB was flushed by MVA system.cpu1.dtb.flush_tlb_mva_asid 1439 # Number of times TLB was flushed by MVA & ASID system.cpu1.dtb.flush_tlb_asid 63 # Number of times TLB was flushed by ASID -system.cpu1.dtb.flush_entries 1989 # Number of entries that have been flushed from TLB +system.cpu1.dtb.flush_entries 1964 # Number of entries that have been flushed from TLB system.cpu1.dtb.align_faults 0 # Number of TLB faults due to alignment restrictions -system.cpu1.dtb.prefetch_faults 148 # Number of TLB faults due to prefetch +system.cpu1.dtb.prefetch_faults 139 # Number of TLB faults due to prefetch system.cpu1.dtb.domain_faults 0 # Number of TLB faults due to domain restrictions system.cpu1.dtb.perms_faults 248 # Number of TLB faults due to permissions restrictions -system.cpu1.dtb.read_accesses 5710003 # DTB read accesses -system.cpu1.dtb.write_accesses 3873737 # DTB write accesses +system.cpu1.dtb.read_accesses 8314950 # DTB read accesses +system.cpu1.dtb.write_accesses 5829180 # DTB write accesses system.cpu1.dtb.inst_accesses 0 # ITB inst accesses -system.cpu1.dtb.hits 9579510 # DTB hits -system.cpu1.dtb.misses 4230 # DTB misses -system.cpu1.dtb.accesses 9583740 # DTB accesses -system.cpu1.itb.inst_hits 19379017 # ITB inst hits +system.cpu1.dtb.hits 14139050 # DTB hits +system.cpu1.dtb.misses 5080 # DTB misses +system.cpu1.dtb.accesses 14144130 # DTB accesses +system.cpu1.itb.inst_hits 33191969 # ITB inst hits system.cpu1.itb.inst_misses 2171 # ITB inst misses system.cpu1.itb.read_hits 0 # DTB read hits system.cpu1.itb.read_misses 0 # DTB read misses @@ -1579,79 +1634,86 @@ system.cpu1.itb.domain_faults 0 # Nu system.cpu1.itb.perms_faults 0 # Number of TLB faults due to permissions restrictions system.cpu1.itb.read_accesses 0 # DTB read accesses system.cpu1.itb.write_accesses 0 # DTB write accesses -system.cpu1.itb.inst_accesses 19381188 # ITB inst accesses -system.cpu1.itb.hits 19379017 # DTB hits +system.cpu1.itb.inst_accesses 33194140 # ITB inst accesses +system.cpu1.itb.hits 33191969 # DTB hits system.cpu1.itb.misses 2171 # DTB misses -system.cpu1.itb.accesses 19381188 # DTB accesses -system.cpu1.numCycles 2390136116 # number of cpu cycles simulated +system.cpu1.itb.accesses 33194140 # DTB accesses +system.cpu1.numCycles 2390799575 # number of cpu cycles simulated system.cpu1.numWorkItemsStarted 0 # number of work items this cpu started system.cpu1.numWorkItemsCompleted 0 # number of work items this cpu completed -system.cpu1.committedInsts 18798461 # Number of instructions committed -system.cpu1.committedOps 24902767 # Number of ops (including micro ops) committed -system.cpu1.num_int_alu_accesses 22266699 # Number of integer alu accesses +system.cpu1.committedInsts 32581389 # Number of instructions committed +system.cpu1.committedOps 41092068 # Number of ops (including micro ops) committed +system.cpu1.num_int_alu_accesses 37316324 # Number of integer alu accesses system.cpu1.num_fp_alu_accesses 6793 # Number of float alu accesses -system.cpu1.num_func_calls 796691 # number of times a function call or return occured -system.cpu1.num_conditional_control_insts 2514546 # number of instructions that are conditional controls -system.cpu1.num_int_insts 22266699 # number of integer instructions +system.cpu1.num_func_calls 962102 # number of times a function call or return occured +system.cpu1.num_conditional_control_insts 3732829 # number of instructions that are conditional controls +system.cpu1.num_int_insts 37316324 # number of integer instructions system.cpu1.num_fp_insts 6793 # number of float instructions -system.cpu1.num_int_register_reads 130767489 # number of times the integer registers were read -system.cpu1.num_int_register_writes 23318960 # number of times the integer registers were written +system.cpu1.num_int_register_reads 213681333 # number of times the integer registers were read +system.cpu1.num_int_register_writes 39457808 # number of times the integer registers were written system.cpu1.num_fp_register_reads 4535 # number of times the floating registers were read system.cpu1.num_fp_register_writes 2260 # number of times the floating registers were written -system.cpu1.num_mem_refs 10014870 # number of memory refs -system.cpu1.num_load_insts 5983067 # Number of load instructions -system.cpu1.num_store_insts 4031803 # Number of store instructions -system.cpu1.num_idle_cycles 1969216562.004314 # Number of idle cycles -system.cpu1.num_busy_cycles 420919553.995686 # Number of busy cycles -system.cpu1.not_idle_fraction 0.176107 # Percentage of non-idle cycles -system.cpu1.idle_fraction 0.823893 # Percentage of idle cycles +system.cpu1.num_mem_refs 14676854 # number of memory refs +system.cpu1.num_load_insts 8633232 # Number of load instructions +system.cpu1.num_store_insts 6043622 # Number of store instructions +system.cpu1.num_idle_cycles 1874349488.166457 # Number of idle cycles +system.cpu1.num_busy_cycles 516450086.833543 # Number of busy cycles +system.cpu1.not_idle_fraction 0.216016 # Percentage of non-idle cycles +system.cpu1.idle_fraction 0.783984 # Percentage of idle cycles system.cpu1.kern.inst.arm 0 # number of arm instructions executed -system.cpu1.kern.inst.quiesce 39069 # number of quiesce instructions executed -system.cpu1.icache.tags.replacements 376769 # number of replacements -system.cpu1.icache.tags.tagsinuse 474.890792 # Cycle average of tags in use -system.cpu1.icache.tags.total_refs 19001732 # Total number of references to valid blocks. -system.cpu1.icache.tags.sampled_refs 377281 # Sample count of references to valid blocks. -system.cpu1.icache.tags.avg_refs 50.364932 # Average number of references to valid blocks. -system.cpu1.icache.tags.warmup_cycle 327211938000 # Cycle when the warmup percentage was hit. -system.cpu1.icache.tags.occ_blocks::cpu1.inst 474.890792 # Average occupied blocks per requestor -system.cpu1.icache.tags.occ_percent::cpu1.inst 0.927521 # Average percentage of cache occupancy -system.cpu1.icache.tags.occ_percent::total 0.927521 # Average percentage of cache occupancy -system.cpu1.icache.ReadReq_hits::cpu1.inst 19001732 # number of ReadReq hits -system.cpu1.icache.ReadReq_hits::total 19001732 # number of ReadReq hits -system.cpu1.icache.demand_hits::cpu1.inst 19001732 # number of demand (read+write) hits -system.cpu1.icache.demand_hits::total 19001732 # number of demand (read+write) hits -system.cpu1.icache.overall_hits::cpu1.inst 19001732 # number of overall hits -system.cpu1.icache.overall_hits::total 19001732 # number of overall hits -system.cpu1.icache.ReadReq_misses::cpu1.inst 377281 # number of ReadReq misses -system.cpu1.icache.ReadReq_misses::total 377281 # number of ReadReq misses -system.cpu1.icache.demand_misses::cpu1.inst 377281 # number of demand (read+write) misses -system.cpu1.icache.demand_misses::total 377281 # number of demand (read+write) misses -system.cpu1.icache.overall_misses::cpu1.inst 377281 # number of overall misses -system.cpu1.icache.overall_misses::total 377281 # number of overall misses -system.cpu1.icache.ReadReq_miss_latency::cpu1.inst 5163865212 # number of ReadReq miss cycles -system.cpu1.icache.ReadReq_miss_latency::total 5163865212 # number of ReadReq miss cycles -system.cpu1.icache.demand_miss_latency::cpu1.inst 5163865212 # number of demand (read+write) miss cycles -system.cpu1.icache.demand_miss_latency::total 5163865212 # number of demand (read+write) miss cycles -system.cpu1.icache.overall_miss_latency::cpu1.inst 5163865212 # number of overall miss cycles -system.cpu1.icache.overall_miss_latency::total 5163865212 # number of overall miss cycles -system.cpu1.icache.ReadReq_accesses::cpu1.inst 19379013 # number of ReadReq accesses(hits+misses) -system.cpu1.icache.ReadReq_accesses::total 19379013 # number of ReadReq accesses(hits+misses) -system.cpu1.icache.demand_accesses::cpu1.inst 19379013 # number of demand (read+write) accesses -system.cpu1.icache.demand_accesses::total 19379013 # number of demand (read+write) accesses -system.cpu1.icache.overall_accesses::cpu1.inst 19379013 # number of overall (read+write) accesses -system.cpu1.icache.overall_accesses::total 19379013 # number of overall (read+write) accesses -system.cpu1.icache.ReadReq_miss_rate::cpu1.inst 0.019469 # miss rate for ReadReq accesses -system.cpu1.icache.ReadReq_miss_rate::total 0.019469 # miss rate for ReadReq accesses -system.cpu1.icache.demand_miss_rate::cpu1.inst 0.019469 # miss rate for demand accesses -system.cpu1.icache.demand_miss_rate::total 0.019469 # miss rate for demand accesses -system.cpu1.icache.overall_miss_rate::cpu1.inst 0.019469 # miss rate for overall accesses -system.cpu1.icache.overall_miss_rate::total 0.019469 # miss rate for overall accesses -system.cpu1.icache.ReadReq_avg_miss_latency::cpu1.inst 13687.053448 # average ReadReq miss latency -system.cpu1.icache.ReadReq_avg_miss_latency::total 13687.053448 # average ReadReq miss latency -system.cpu1.icache.demand_avg_miss_latency::cpu1.inst 13687.053448 # average overall miss latency -system.cpu1.icache.demand_avg_miss_latency::total 13687.053448 # average overall miss latency -system.cpu1.icache.overall_avg_miss_latency::cpu1.inst 13687.053448 # average overall miss latency -system.cpu1.icache.overall_avg_miss_latency::total 13687.053448 # average overall miss latency +system.cpu1.kern.inst.quiesce 43916 # number of quiesce instructions executed +system.cpu1.icache.tags.replacements 469558 # number of replacements +system.cpu1.icache.tags.tagsinuse 478.567582 # Cycle average of tags in use +system.cpu1.icache.tags.total_refs 32721895 # Total number of references to valid blocks. +system.cpu1.icache.tags.sampled_refs 470070 # Sample count of references to valid blocks. +system.cpu1.icache.tags.avg_refs 69.610686 # Average number of references to valid blocks. +system.cpu1.icache.tags.warmup_cycle 93987592500 # Cycle when the warmup percentage was hit. +system.cpu1.icache.tags.occ_blocks::cpu1.inst 478.567582 # Average occupied blocks per requestor +system.cpu1.icache.tags.occ_percent::cpu1.inst 0.934702 # Average percentage of cache occupancy +system.cpu1.icache.tags.occ_percent::total 0.934702 # Average percentage of cache occupancy +system.cpu1.icache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu1.icache.tags.age_task_id_blocks_1024::2 448 # Occupied blocks per task id +system.cpu1.icache.tags.age_task_id_blocks_1024::3 63 # Occupied blocks per task id +system.cpu1.icache.tags.age_task_id_blocks_1024::4 1 # Occupied blocks per task id +system.cpu1.icache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu1.icache.tags.tag_accesses 33662035 # Number of tag accesses +system.cpu1.icache.tags.data_accesses 33662035 # Number of data accesses +system.cpu1.icache.ReadReq_hits::cpu1.inst 32721895 # number of ReadReq hits +system.cpu1.icache.ReadReq_hits::total 32721895 # number of ReadReq hits +system.cpu1.icache.demand_hits::cpu1.inst 32721895 # number of demand (read+write) hits +system.cpu1.icache.demand_hits::total 32721895 # number of demand (read+write) hits +system.cpu1.icache.overall_hits::cpu1.inst 32721895 # number of overall hits +system.cpu1.icache.overall_hits::total 32721895 # number of overall hits +system.cpu1.icache.ReadReq_misses::cpu1.inst 470070 # number of ReadReq misses +system.cpu1.icache.ReadReq_misses::total 470070 # number of ReadReq misses +system.cpu1.icache.demand_misses::cpu1.inst 470070 # number of demand (read+write) misses +system.cpu1.icache.demand_misses::total 470070 # number of demand (read+write) misses +system.cpu1.icache.overall_misses::cpu1.inst 470070 # number of overall misses +system.cpu1.icache.overall_misses::total 470070 # number of overall misses +system.cpu1.icache.ReadReq_miss_latency::cpu1.inst 6444934971 # number of ReadReq miss cycles +system.cpu1.icache.ReadReq_miss_latency::total 6444934971 # number of ReadReq miss cycles +system.cpu1.icache.demand_miss_latency::cpu1.inst 6444934971 # number of demand (read+write) miss cycles +system.cpu1.icache.demand_miss_latency::total 6444934971 # number of demand (read+write) miss cycles +system.cpu1.icache.overall_miss_latency::cpu1.inst 6444934971 # number of overall miss cycles +system.cpu1.icache.overall_miss_latency::total 6444934971 # number of overall miss cycles +system.cpu1.icache.ReadReq_accesses::cpu1.inst 33191965 # number of ReadReq accesses(hits+misses) +system.cpu1.icache.ReadReq_accesses::total 33191965 # number of ReadReq accesses(hits+misses) +system.cpu1.icache.demand_accesses::cpu1.inst 33191965 # number of demand (read+write) accesses +system.cpu1.icache.demand_accesses::total 33191965 # number of demand (read+write) accesses +system.cpu1.icache.overall_accesses::cpu1.inst 33191965 # number of overall (read+write) accesses +system.cpu1.icache.overall_accesses::total 33191965 # number of overall (read+write) accesses +system.cpu1.icache.ReadReq_miss_rate::cpu1.inst 0.014162 # miss rate for ReadReq accesses +system.cpu1.icache.ReadReq_miss_rate::total 0.014162 # miss rate for ReadReq accesses +system.cpu1.icache.demand_miss_rate::cpu1.inst 0.014162 # miss rate for demand accesses +system.cpu1.icache.demand_miss_rate::total 0.014162 # miss rate for demand accesses +system.cpu1.icache.overall_miss_rate::cpu1.inst 0.014162 # miss rate for overall accesses +system.cpu1.icache.overall_miss_rate::total 0.014162 # miss rate for overall accesses +system.cpu1.icache.ReadReq_avg_miss_latency::cpu1.inst 13710.585596 # average ReadReq miss latency +system.cpu1.icache.ReadReq_avg_miss_latency::total 13710.585596 # average ReadReq miss latency +system.cpu1.icache.demand_avg_miss_latency::cpu1.inst 13710.585596 # average overall miss latency +system.cpu1.icache.demand_avg_miss_latency::total 13710.585596 # average overall miss latency +system.cpu1.icache.overall_avg_miss_latency::cpu1.inst 13710.585596 # average overall miss latency +system.cpu1.icache.overall_avg_miss_latency::total 13710.585596 # average overall miss latency system.cpu1.icache.blocked_cycles::no_mshrs 0 # number of cycles access was blocked system.cpu1.icache.blocked_cycles::no_targets 0 # number of cycles access was blocked system.cpu1.icache.blocked::no_mshrs 0 # number of cycles access was blocked @@ -1660,120 +1722,126 @@ system.cpu1.icache.avg_blocked_cycles::no_mshrs nan system.cpu1.icache.avg_blocked_cycles::no_targets nan # average number of cycles each access was blocked system.cpu1.icache.fast_writes 0 # number of fast writes performed system.cpu1.icache.cache_copies 0 # number of cache copies performed -system.cpu1.icache.ReadReq_mshr_misses::cpu1.inst 377281 # number of ReadReq MSHR misses -system.cpu1.icache.ReadReq_mshr_misses::total 377281 # number of ReadReq MSHR misses -system.cpu1.icache.demand_mshr_misses::cpu1.inst 377281 # number of demand (read+write) MSHR misses -system.cpu1.icache.demand_mshr_misses::total 377281 # number of demand (read+write) MSHR misses -system.cpu1.icache.overall_mshr_misses::cpu1.inst 377281 # number of overall MSHR misses -system.cpu1.icache.overall_mshr_misses::total 377281 # number of overall MSHR misses -system.cpu1.icache.ReadReq_mshr_miss_latency::cpu1.inst 4407732788 # number of ReadReq MSHR miss cycles -system.cpu1.icache.ReadReq_mshr_miss_latency::total 4407732788 # number of ReadReq MSHR miss cycles -system.cpu1.icache.demand_mshr_miss_latency::cpu1.inst 4407732788 # number of demand (read+write) MSHR miss cycles -system.cpu1.icache.demand_mshr_miss_latency::total 4407732788 # number of demand (read+write) MSHR miss cycles -system.cpu1.icache.overall_mshr_miss_latency::cpu1.inst 4407732788 # number of overall MSHR miss cycles -system.cpu1.icache.overall_mshr_miss_latency::total 4407732788 # number of overall MSHR miss cycles +system.cpu1.icache.ReadReq_mshr_misses::cpu1.inst 470070 # number of ReadReq MSHR misses +system.cpu1.icache.ReadReq_mshr_misses::total 470070 # number of ReadReq MSHR misses +system.cpu1.icache.demand_mshr_misses::cpu1.inst 470070 # number of demand (read+write) MSHR misses +system.cpu1.icache.demand_mshr_misses::total 470070 # number of demand (read+write) MSHR misses +system.cpu1.icache.overall_mshr_misses::cpu1.inst 470070 # number of overall MSHR misses +system.cpu1.icache.overall_mshr_misses::total 470070 # number of overall MSHR misses +system.cpu1.icache.ReadReq_mshr_miss_latency::cpu1.inst 5502849027 # number of ReadReq MSHR miss cycles +system.cpu1.icache.ReadReq_mshr_miss_latency::total 5502849027 # number of ReadReq MSHR miss cycles +system.cpu1.icache.demand_mshr_miss_latency::cpu1.inst 5502849027 # number of demand (read+write) MSHR miss cycles +system.cpu1.icache.demand_mshr_miss_latency::total 5502849027 # number of demand (read+write) MSHR miss cycles +system.cpu1.icache.overall_mshr_miss_latency::cpu1.inst 5502849027 # number of overall MSHR miss cycles +system.cpu1.icache.overall_mshr_miss_latency::total 5502849027 # number of overall MSHR miss cycles system.cpu1.icache.ReadReq_mshr_uncacheable_latency::cpu1.inst 6483750 # number of ReadReq MSHR uncacheable cycles system.cpu1.icache.ReadReq_mshr_uncacheable_latency::total 6483750 # number of ReadReq MSHR uncacheable cycles system.cpu1.icache.overall_mshr_uncacheable_latency::cpu1.inst 6483750 # number of overall MSHR uncacheable cycles system.cpu1.icache.overall_mshr_uncacheable_latency::total 6483750 # number of overall MSHR uncacheable cycles -system.cpu1.icache.ReadReq_mshr_miss_rate::cpu1.inst 0.019469 # mshr miss rate for ReadReq accesses -system.cpu1.icache.ReadReq_mshr_miss_rate::total 0.019469 # mshr miss rate for ReadReq accesses -system.cpu1.icache.demand_mshr_miss_rate::cpu1.inst 0.019469 # mshr miss rate for demand accesses -system.cpu1.icache.demand_mshr_miss_rate::total 0.019469 # mshr miss rate for demand accesses -system.cpu1.icache.overall_mshr_miss_rate::cpu1.inst 0.019469 # mshr miss rate for overall accesses -system.cpu1.icache.overall_mshr_miss_rate::total 0.019469 # mshr miss rate for overall accesses -system.cpu1.icache.ReadReq_avg_mshr_miss_latency::cpu1.inst 11682.890970 # average ReadReq mshr miss latency -system.cpu1.icache.ReadReq_avg_mshr_miss_latency::total 11682.890970 # average ReadReq mshr miss latency -system.cpu1.icache.demand_avg_mshr_miss_latency::cpu1.inst 11682.890970 # average overall mshr miss latency -system.cpu1.icache.demand_avg_mshr_miss_latency::total 11682.890970 # average overall mshr miss latency -system.cpu1.icache.overall_avg_mshr_miss_latency::cpu1.inst 11682.890970 # average overall mshr miss latency -system.cpu1.icache.overall_avg_mshr_miss_latency::total 11682.890970 # average overall mshr miss latency +system.cpu1.icache.ReadReq_mshr_miss_rate::cpu1.inst 0.014162 # mshr miss rate for ReadReq accesses +system.cpu1.icache.ReadReq_mshr_miss_rate::total 0.014162 # mshr miss rate for ReadReq accesses +system.cpu1.icache.demand_mshr_miss_rate::cpu1.inst 0.014162 # mshr miss rate for demand accesses +system.cpu1.icache.demand_mshr_miss_rate::total 0.014162 # mshr miss rate for demand accesses +system.cpu1.icache.overall_mshr_miss_rate::cpu1.inst 0.014162 # mshr miss rate for overall accesses +system.cpu1.icache.overall_mshr_miss_rate::total 0.014162 # mshr miss rate for overall accesses +system.cpu1.icache.ReadReq_avg_mshr_miss_latency::cpu1.inst 11706.445906 # average ReadReq mshr miss latency +system.cpu1.icache.ReadReq_avg_mshr_miss_latency::total 11706.445906 # average ReadReq mshr miss latency +system.cpu1.icache.demand_avg_mshr_miss_latency::cpu1.inst 11706.445906 # average overall mshr miss latency +system.cpu1.icache.demand_avg_mshr_miss_latency::total 11706.445906 # average overall mshr miss latency +system.cpu1.icache.overall_avg_mshr_miss_latency::cpu1.inst 11706.445906 # average overall mshr miss latency +system.cpu1.icache.overall_avg_mshr_miss_latency::total 11706.445906 # average overall mshr miss latency system.cpu1.icache.ReadReq_avg_mshr_uncacheable_latency::cpu1.inst inf # average ReadReq mshr uncacheable latency system.cpu1.icache.ReadReq_avg_mshr_uncacheable_latency::total inf # average ReadReq mshr uncacheable latency system.cpu1.icache.overall_avg_mshr_uncacheable_latency::cpu1.inst inf # average overall mshr uncacheable latency system.cpu1.icache.overall_avg_mshr_uncacheable_latency::total inf # average overall mshr uncacheable latency system.cpu1.icache.no_allocate_misses 0 # Number of misses that were no-allocate -system.cpu1.dcache.tags.replacements 220436 # number of replacements -system.cpu1.dcache.tags.tagsinuse 471.379597 # Cycle average of tags in use -system.cpu1.dcache.tags.total_refs 8230755 # Total number of references to valid blocks. -system.cpu1.dcache.tags.sampled_refs 220801 # Sample count of references to valid blocks. -system.cpu1.dcache.tags.avg_refs 37.276801 # Average number of references to valid blocks. -system.cpu1.dcache.tags.warmup_cycle 106418022500 # Cycle when the warmup percentage was hit. -system.cpu1.dcache.tags.occ_blocks::cpu1.data 471.379597 # Average occupied blocks per requestor -system.cpu1.dcache.tags.occ_percent::cpu1.data 0.920663 # Average percentage of cache occupancy -system.cpu1.dcache.tags.occ_percent::total 0.920663 # Average percentage of cache occupancy -system.cpu1.dcache.ReadReq_hits::cpu1.data 4389351 # number of ReadReq hits -system.cpu1.dcache.ReadReq_hits::total 4389351 # number of ReadReq hits -system.cpu1.dcache.WriteReq_hits::cpu1.data 3673214 # number of WriteReq hits -system.cpu1.dcache.WriteReq_hits::total 3673214 # number of WriteReq hits -system.cpu1.dcache.LoadLockedReq_hits::cpu1.data 73456 # number of LoadLockedReq hits -system.cpu1.dcache.LoadLockedReq_hits::total 73456 # number of LoadLockedReq hits -system.cpu1.dcache.StoreCondReq_hits::cpu1.data 73714 # number of StoreCondReq hits -system.cpu1.dcache.StoreCondReq_hits::total 73714 # number of StoreCondReq hits -system.cpu1.dcache.demand_hits::cpu1.data 8062565 # number of demand (read+write) hits -system.cpu1.dcache.demand_hits::total 8062565 # number of demand (read+write) hits -system.cpu1.dcache.overall_hits::cpu1.data 8062565 # number of overall hits -system.cpu1.dcache.overall_hits::total 8062565 # number of overall hits -system.cpu1.dcache.ReadReq_misses::cpu1.data 133803 # number of ReadReq misses -system.cpu1.dcache.ReadReq_misses::total 133803 # number of ReadReq misses -system.cpu1.dcache.WriteReq_misses::cpu1.data 112797 # number of WriteReq misses -system.cpu1.dcache.WriteReq_misses::total 112797 # number of WriteReq misses -system.cpu1.dcache.LoadLockedReq_misses::cpu1.data 9752 # number of LoadLockedReq misses -system.cpu1.dcache.LoadLockedReq_misses::total 9752 # number of LoadLockedReq misses -system.cpu1.dcache.StoreCondReq_misses::cpu1.data 9418 # number of StoreCondReq misses -system.cpu1.dcache.StoreCondReq_misses::total 9418 # number of StoreCondReq misses -system.cpu1.dcache.demand_misses::cpu1.data 246600 # number of demand (read+write) misses -system.cpu1.dcache.demand_misses::total 246600 # number of demand (read+write) misses -system.cpu1.dcache.overall_misses::cpu1.data 246600 # number of overall misses -system.cpu1.dcache.overall_misses::total 246600 # number of overall misses -system.cpu1.dcache.ReadReq_miss_latency::cpu1.data 1647983739 # number of ReadReq miss cycles -system.cpu1.dcache.ReadReq_miss_latency::total 1647983739 # number of ReadReq miss cycles -system.cpu1.dcache.WriteReq_miss_latency::cpu1.data 4345457226 # number of WriteReq miss cycles -system.cpu1.dcache.WriteReq_miss_latency::total 4345457226 # number of WriteReq miss cycles -system.cpu1.dcache.LoadLockedReq_miss_latency::cpu1.data 77502998 # number of LoadLockedReq miss cycles -system.cpu1.dcache.LoadLockedReq_miss_latency::total 77502998 # number of LoadLockedReq miss cycles -system.cpu1.dcache.StoreCondReq_miss_latency::cpu1.data 49351478 # number of StoreCondReq miss cycles -system.cpu1.dcache.StoreCondReq_miss_latency::total 49351478 # number of StoreCondReq miss cycles -system.cpu1.dcache.demand_miss_latency::cpu1.data 5993440965 # number of demand (read+write) miss cycles -system.cpu1.dcache.demand_miss_latency::total 5993440965 # number of demand (read+write) miss cycles -system.cpu1.dcache.overall_miss_latency::cpu1.data 5993440965 # number of overall miss cycles -system.cpu1.dcache.overall_miss_latency::total 5993440965 # number of overall miss cycles -system.cpu1.dcache.ReadReq_accesses::cpu1.data 4523154 # number of ReadReq accesses(hits+misses) -system.cpu1.dcache.ReadReq_accesses::total 4523154 # number of ReadReq accesses(hits+misses) -system.cpu1.dcache.WriteReq_accesses::cpu1.data 3786011 # number of WriteReq accesses(hits+misses) -system.cpu1.dcache.WriteReq_accesses::total 3786011 # number of WriteReq accesses(hits+misses) -system.cpu1.dcache.LoadLockedReq_accesses::cpu1.data 83208 # number of LoadLockedReq accesses(hits+misses) -system.cpu1.dcache.LoadLockedReq_accesses::total 83208 # number of LoadLockedReq accesses(hits+misses) -system.cpu1.dcache.StoreCondReq_accesses::cpu1.data 83132 # number of StoreCondReq accesses(hits+misses) -system.cpu1.dcache.StoreCondReq_accesses::total 83132 # number of StoreCondReq accesses(hits+misses) -system.cpu1.dcache.demand_accesses::cpu1.data 8309165 # number of demand (read+write) accesses -system.cpu1.dcache.demand_accesses::total 8309165 # number of demand (read+write) accesses -system.cpu1.dcache.overall_accesses::cpu1.data 8309165 # number of overall (read+write) accesses -system.cpu1.dcache.overall_accesses::total 8309165 # number of overall (read+write) accesses -system.cpu1.dcache.ReadReq_miss_rate::cpu1.data 0.029582 # miss rate for ReadReq accesses -system.cpu1.dcache.ReadReq_miss_rate::total 0.029582 # miss rate for ReadReq accesses -system.cpu1.dcache.WriteReq_miss_rate::cpu1.data 0.029793 # miss rate for WriteReq accesses -system.cpu1.dcache.WriteReq_miss_rate::total 0.029793 # miss rate for WriteReq accesses -system.cpu1.dcache.LoadLockedReq_miss_rate::cpu1.data 0.117200 # miss rate for LoadLockedReq accesses -system.cpu1.dcache.LoadLockedReq_miss_rate::total 0.117200 # miss rate for LoadLockedReq accesses -system.cpu1.dcache.StoreCondReq_miss_rate::cpu1.data 0.113290 # miss rate for StoreCondReq accesses -system.cpu1.dcache.StoreCondReq_miss_rate::total 0.113290 # miss rate for StoreCondReq accesses -system.cpu1.dcache.demand_miss_rate::cpu1.data 0.029678 # miss rate for demand accesses -system.cpu1.dcache.demand_miss_rate::total 0.029678 # miss rate for demand accesses -system.cpu1.dcache.overall_miss_rate::cpu1.data 0.029678 # miss rate for overall accesses -system.cpu1.dcache.overall_miss_rate::total 0.029678 # miss rate for overall accesses -system.cpu1.dcache.ReadReq_avg_miss_latency::cpu1.data 12316.493195 # average ReadReq miss latency -system.cpu1.dcache.ReadReq_avg_miss_latency::total 12316.493195 # average ReadReq miss latency -system.cpu1.dcache.WriteReq_avg_miss_latency::cpu1.data 38524.581558 # average WriteReq miss latency -system.cpu1.dcache.WriteReq_avg_miss_latency::total 38524.581558 # average WriteReq miss latency -system.cpu1.dcache.LoadLockedReq_avg_miss_latency::cpu1.data 7947.395201 # average LoadLockedReq miss latency -system.cpu1.dcache.LoadLockedReq_avg_miss_latency::total 7947.395201 # average LoadLockedReq miss latency -system.cpu1.dcache.StoreCondReq_avg_miss_latency::cpu1.data 5240.122956 # average StoreCondReq miss latency -system.cpu1.dcache.StoreCondReq_avg_miss_latency::total 5240.122956 # average StoreCondReq miss latency -system.cpu1.dcache.demand_avg_miss_latency::cpu1.data 24304.302372 # average overall miss latency -system.cpu1.dcache.demand_avg_miss_latency::total 24304.302372 # average overall miss latency -system.cpu1.dcache.overall_avg_miss_latency::cpu1.data 24304.302372 # average overall miss latency -system.cpu1.dcache.overall_avg_miss_latency::total 24304.302372 # average overall miss latency +system.cpu1.dcache.tags.replacements 292078 # number of replacements +system.cpu1.dcache.tags.tagsinuse 471.633961 # Cycle average of tags in use +system.cpu1.dcache.tags.total_refs 11962120 # Total number of references to valid blocks. +system.cpu1.dcache.tags.sampled_refs 292453 # Sample count of references to valid blocks. +system.cpu1.dcache.tags.avg_refs 40.902709 # Average number of references to valid blocks. +system.cpu1.dcache.tags.warmup_cycle 85275256250 # Cycle when the warmup percentage was hit. +system.cpu1.dcache.tags.occ_blocks::cpu1.data 471.633961 # Average occupied blocks per requestor +system.cpu1.dcache.tags.occ_percent::cpu1.data 0.921160 # Average percentage of cache occupancy +system.cpu1.dcache.tags.occ_percent::total 0.921160 # Average percentage of cache occupancy +system.cpu1.dcache.tags.occ_task_id_blocks::1024 375 # Occupied blocks per task id +system.cpu1.dcache.tags.age_task_id_blocks_1024::2 361 # Occupied blocks per task id +system.cpu1.dcache.tags.age_task_id_blocks_1024::3 14 # Occupied blocks per task id +system.cpu1.dcache.tags.occ_task_id_percent::1024 0.732422 # Percentage of cache occupancy per task id +system.cpu1.dcache.tags.tag_accesses 49437007 # Number of tag accesses +system.cpu1.dcache.tags.data_accesses 49437007 # Number of data accesses +system.cpu1.dcache.ReadReq_hits::cpu1.data 6946722 # number of ReadReq hits +system.cpu1.dcache.ReadReq_hits::total 6946722 # number of ReadReq hits +system.cpu1.dcache.WriteReq_hits::cpu1.data 4827432 # number of WriteReq hits +system.cpu1.dcache.WriteReq_hits::total 4827432 # number of WriteReq hits +system.cpu1.dcache.LoadLockedReq_hits::cpu1.data 81845 # number of LoadLockedReq hits +system.cpu1.dcache.LoadLockedReq_hits::total 81845 # number of LoadLockedReq hits +system.cpu1.dcache.StoreCondReq_hits::cpu1.data 82747 # number of StoreCondReq hits +system.cpu1.dcache.StoreCondReq_hits::total 82747 # number of StoreCondReq hits +system.cpu1.dcache.demand_hits::cpu1.data 11774154 # number of demand (read+write) hits +system.cpu1.dcache.demand_hits::total 11774154 # number of demand (read+write) hits +system.cpu1.dcache.overall_hits::cpu1.data 11774154 # number of overall hits +system.cpu1.dcache.overall_hits::total 11774154 # number of overall hits +system.cpu1.dcache.ReadReq_misses::cpu1.data 170562 # number of ReadReq misses +system.cpu1.dcache.ReadReq_misses::total 170562 # number of ReadReq misses +system.cpu1.dcache.WriteReq_misses::cpu1.data 149956 # number of WriteReq misses +system.cpu1.dcache.WriteReq_misses::total 149956 # number of WriteReq misses +system.cpu1.dcache.LoadLockedReq_misses::cpu1.data 11055 # number of LoadLockedReq misses +system.cpu1.dcache.LoadLockedReq_misses::total 11055 # number of LoadLockedReq misses +system.cpu1.dcache.StoreCondReq_misses::cpu1.data 10053 # number of StoreCondReq misses +system.cpu1.dcache.StoreCondReq_misses::total 10053 # number of StoreCondReq misses +system.cpu1.dcache.demand_misses::cpu1.data 320518 # number of demand (read+write) misses +system.cpu1.dcache.demand_misses::total 320518 # number of demand (read+write) misses +system.cpu1.dcache.overall_misses::cpu1.data 320518 # number of overall misses +system.cpu1.dcache.overall_misses::total 320518 # number of overall misses +system.cpu1.dcache.ReadReq_miss_latency::cpu1.data 2219519248 # number of ReadReq miss cycles +system.cpu1.dcache.ReadReq_miss_latency::total 2219519248 # number of ReadReq miss cycles +system.cpu1.dcache.WriteReq_miss_latency::cpu1.data 6569366202 # number of WriteReq miss cycles +system.cpu1.dcache.WriteReq_miss_latency::total 6569366202 # number of WriteReq miss cycles +system.cpu1.dcache.LoadLockedReq_miss_latency::cpu1.data 92844750 # number of LoadLockedReq miss cycles +system.cpu1.dcache.LoadLockedReq_miss_latency::total 92844750 # number of LoadLockedReq miss cycles +system.cpu1.dcache.StoreCondReq_miss_latency::cpu1.data 52203482 # number of StoreCondReq miss cycles +system.cpu1.dcache.StoreCondReq_miss_latency::total 52203482 # number of StoreCondReq miss cycles +system.cpu1.dcache.demand_miss_latency::cpu1.data 8788885450 # number of demand (read+write) miss cycles +system.cpu1.dcache.demand_miss_latency::total 8788885450 # number of demand (read+write) miss cycles +system.cpu1.dcache.overall_miss_latency::cpu1.data 8788885450 # number of overall miss cycles +system.cpu1.dcache.overall_miss_latency::total 8788885450 # number of overall miss cycles +system.cpu1.dcache.ReadReq_accesses::cpu1.data 7117284 # number of ReadReq accesses(hits+misses) +system.cpu1.dcache.ReadReq_accesses::total 7117284 # number of ReadReq accesses(hits+misses) +system.cpu1.dcache.WriteReq_accesses::cpu1.data 4977388 # number of WriteReq accesses(hits+misses) +system.cpu1.dcache.WriteReq_accesses::total 4977388 # number of WriteReq accesses(hits+misses) +system.cpu1.dcache.LoadLockedReq_accesses::cpu1.data 92900 # number of LoadLockedReq accesses(hits+misses) +system.cpu1.dcache.LoadLockedReq_accesses::total 92900 # number of LoadLockedReq accesses(hits+misses) +system.cpu1.dcache.StoreCondReq_accesses::cpu1.data 92800 # number of StoreCondReq accesses(hits+misses) +system.cpu1.dcache.StoreCondReq_accesses::total 92800 # number of StoreCondReq accesses(hits+misses) +system.cpu1.dcache.demand_accesses::cpu1.data 12094672 # number of demand (read+write) accesses +system.cpu1.dcache.demand_accesses::total 12094672 # number of demand (read+write) accesses +system.cpu1.dcache.overall_accesses::cpu1.data 12094672 # number of overall (read+write) accesses +system.cpu1.dcache.overall_accesses::total 12094672 # number of overall (read+write) accesses +system.cpu1.dcache.ReadReq_miss_rate::cpu1.data 0.023964 # miss rate for ReadReq accesses +system.cpu1.dcache.ReadReq_miss_rate::total 0.023964 # miss rate for ReadReq accesses +system.cpu1.dcache.WriteReq_miss_rate::cpu1.data 0.030127 # miss rate for WriteReq accesses +system.cpu1.dcache.WriteReq_miss_rate::total 0.030127 # miss rate for WriteReq accesses +system.cpu1.dcache.LoadLockedReq_miss_rate::cpu1.data 0.118999 # miss rate for LoadLockedReq accesses +system.cpu1.dcache.LoadLockedReq_miss_rate::total 0.118999 # miss rate for LoadLockedReq accesses +system.cpu1.dcache.StoreCondReq_miss_rate::cpu1.data 0.108330 # miss rate for StoreCondReq accesses +system.cpu1.dcache.StoreCondReq_miss_rate::total 0.108330 # miss rate for StoreCondReq accesses +system.cpu1.dcache.demand_miss_rate::cpu1.data 0.026501 # miss rate for demand accesses +system.cpu1.dcache.demand_miss_rate::total 0.026501 # miss rate for demand accesses +system.cpu1.dcache.overall_miss_rate::cpu1.data 0.026501 # miss rate for overall accesses +system.cpu1.dcache.overall_miss_rate::total 0.026501 # miss rate for overall accesses +system.cpu1.dcache.ReadReq_avg_miss_latency::cpu1.data 13012.976208 # average ReadReq miss latency +system.cpu1.dcache.ReadReq_avg_miss_latency::total 13012.976208 # average ReadReq miss latency +system.cpu1.dcache.WriteReq_avg_miss_latency::cpu1.data 43808.625210 # average WriteReq miss latency +system.cpu1.dcache.WriteReq_avg_miss_latency::total 43808.625210 # average WriteReq miss latency +system.cpu1.dcache.LoadLockedReq_avg_miss_latency::cpu1.data 8398.439620 # average LoadLockedReq miss latency +system.cpu1.dcache.LoadLockedReq_avg_miss_latency::total 8398.439620 # average LoadLockedReq miss latency +system.cpu1.dcache.StoreCondReq_avg_miss_latency::cpu1.data 5192.826221 # average StoreCondReq miss latency +system.cpu1.dcache.StoreCondReq_avg_miss_latency::total 5192.826221 # average StoreCondReq miss latency +system.cpu1.dcache.demand_avg_miss_latency::cpu1.data 27420.879483 # average overall miss latency +system.cpu1.dcache.demand_avg_miss_latency::total 27420.879483 # average overall miss latency +system.cpu1.dcache.overall_avg_miss_latency::cpu1.data 27420.879483 # average overall miss latency +system.cpu1.dcache.overall_avg_miss_latency::total 27420.879483 # average overall miss latency system.cpu1.dcache.blocked_cycles::no_mshrs 0 # number of cycles access was blocked system.cpu1.dcache.blocked_cycles::no_targets 0 # number of cycles access was blocked system.cpu1.dcache.blocked::no_mshrs 0 # number of cycles access was blocked @@ -1782,66 +1850,66 @@ system.cpu1.dcache.avg_blocked_cycles::no_mshrs nan system.cpu1.dcache.avg_blocked_cycles::no_targets nan # average number of cycles each access was blocked system.cpu1.dcache.fast_writes 0 # number of fast writes performed system.cpu1.dcache.cache_copies 0 # number of cache copies performed -system.cpu1.dcache.writebacks::writebacks 199592 # number of writebacks -system.cpu1.dcache.writebacks::total 199592 # number of writebacks -system.cpu1.dcache.ReadReq_mshr_misses::cpu1.data 133803 # number of ReadReq MSHR misses -system.cpu1.dcache.ReadReq_mshr_misses::total 133803 # number of ReadReq MSHR misses -system.cpu1.dcache.WriteReq_mshr_misses::cpu1.data 112797 # number of WriteReq MSHR misses -system.cpu1.dcache.WriteReq_mshr_misses::total 112797 # number of WriteReq MSHR misses -system.cpu1.dcache.LoadLockedReq_mshr_misses::cpu1.data 9752 # number of LoadLockedReq MSHR misses -system.cpu1.dcache.LoadLockedReq_mshr_misses::total 9752 # number of LoadLockedReq MSHR misses -system.cpu1.dcache.StoreCondReq_mshr_misses::cpu1.data 9415 # number of StoreCondReq MSHR misses -system.cpu1.dcache.StoreCondReq_mshr_misses::total 9415 # number of StoreCondReq MSHR misses -system.cpu1.dcache.demand_mshr_misses::cpu1.data 246600 # number of demand (read+write) MSHR misses -system.cpu1.dcache.demand_mshr_misses::total 246600 # number of demand (read+write) MSHR misses -system.cpu1.dcache.overall_mshr_misses::cpu1.data 246600 # number of overall MSHR misses -system.cpu1.dcache.overall_mshr_misses::total 246600 # number of overall MSHR misses -system.cpu1.dcache.ReadReq_mshr_miss_latency::cpu1.data 1380025261 # number of ReadReq MSHR miss cycles -system.cpu1.dcache.ReadReq_mshr_miss_latency::total 1380025261 # number of ReadReq MSHR miss cycles -system.cpu1.dcache.WriteReq_mshr_miss_latency::cpu1.data 4109897774 # number of WriteReq MSHR miss cycles -system.cpu1.dcache.WriteReq_mshr_miss_latency::total 4109897774 # number of WriteReq MSHR miss cycles -system.cpu1.dcache.LoadLockedReq_mshr_miss_latency::cpu1.data 57992002 # number of LoadLockedReq MSHR miss cycles -system.cpu1.dcache.LoadLockedReq_mshr_miss_latency::total 57992002 # number of LoadLockedReq MSHR miss cycles -system.cpu1.dcache.StoreCondReq_mshr_miss_latency::cpu1.data 30524522 # number of StoreCondReq MSHR miss cycles -system.cpu1.dcache.StoreCondReq_mshr_miss_latency::total 30524522 # number of StoreCondReq MSHR miss cycles -system.cpu1.dcache.StoreCondFailReq_mshr_miss_latency::cpu1.data 2000 # number of StoreCondFailReq MSHR miss cycles -system.cpu1.dcache.StoreCondFailReq_mshr_miss_latency::total 2000 # number of StoreCondFailReq MSHR miss cycles -system.cpu1.dcache.demand_mshr_miss_latency::cpu1.data 5489923035 # number of demand (read+write) MSHR miss cycles -system.cpu1.dcache.demand_mshr_miss_latency::total 5489923035 # number of demand (read+write) MSHR miss cycles -system.cpu1.dcache.overall_mshr_miss_latency::cpu1.data 5489923035 # number of overall MSHR miss cycles -system.cpu1.dcache.overall_mshr_miss_latency::total 5489923035 # number of overall MSHR miss cycles -system.cpu1.dcache.ReadReq_mshr_uncacheable_latency::cpu1.data 168387761500 # number of ReadReq MSHR uncacheable cycles -system.cpu1.dcache.ReadReq_mshr_uncacheable_latency::total 168387761500 # number of ReadReq MSHR uncacheable cycles -system.cpu1.dcache.WriteReq_mshr_uncacheable_latency::cpu1.data 531061000 # number of WriteReq MSHR uncacheable cycles -system.cpu1.dcache.WriteReq_mshr_uncacheable_latency::total 531061000 # number of WriteReq MSHR uncacheable cycles -system.cpu1.dcache.overall_mshr_uncacheable_latency::cpu1.data 168918822500 # number of overall MSHR uncacheable cycles -system.cpu1.dcache.overall_mshr_uncacheable_latency::total 168918822500 # number of overall MSHR uncacheable cycles -system.cpu1.dcache.ReadReq_mshr_miss_rate::cpu1.data 0.029582 # mshr miss rate for ReadReq accesses -system.cpu1.dcache.ReadReq_mshr_miss_rate::total 0.029582 # mshr miss rate for ReadReq accesses -system.cpu1.dcache.WriteReq_mshr_miss_rate::cpu1.data 0.029793 # mshr miss rate for WriteReq accesses -system.cpu1.dcache.WriteReq_mshr_miss_rate::total 0.029793 # mshr miss rate for WriteReq accesses -system.cpu1.dcache.LoadLockedReq_mshr_miss_rate::cpu1.data 0.117200 # mshr miss rate for LoadLockedReq accesses -system.cpu1.dcache.LoadLockedReq_mshr_miss_rate::total 0.117200 # mshr miss rate for LoadLockedReq accesses -system.cpu1.dcache.StoreCondReq_mshr_miss_rate::cpu1.data 0.113254 # mshr miss rate for StoreCondReq accesses -system.cpu1.dcache.StoreCondReq_mshr_miss_rate::total 0.113254 # mshr miss rate for StoreCondReq accesses -system.cpu1.dcache.demand_mshr_miss_rate::cpu1.data 0.029678 # mshr miss rate for demand accesses -system.cpu1.dcache.demand_mshr_miss_rate::total 0.029678 # mshr miss rate for demand accesses -system.cpu1.dcache.overall_mshr_miss_rate::cpu1.data 0.029678 # mshr miss rate for overall accesses -system.cpu1.dcache.overall_mshr_miss_rate::total 0.029678 # mshr miss rate for overall accesses -system.cpu1.dcache.ReadReq_avg_mshr_miss_latency::cpu1.data 10313.858890 # average ReadReq mshr miss latency -system.cpu1.dcache.ReadReq_avg_mshr_miss_latency::total 10313.858890 # average ReadReq mshr miss latency -system.cpu1.dcache.WriteReq_avg_mshr_miss_latency::cpu1.data 36436.233003 # average WriteReq mshr miss latency -system.cpu1.dcache.WriteReq_avg_mshr_miss_latency::total 36436.233003 # average WriteReq mshr miss latency -system.cpu1.dcache.LoadLockedReq_avg_mshr_miss_latency::cpu1.data 5946.677810 # average LoadLockedReq mshr miss latency -system.cpu1.dcache.LoadLockedReq_avg_mshr_miss_latency::total 5946.677810 # average LoadLockedReq mshr miss latency -system.cpu1.dcache.StoreCondReq_avg_mshr_miss_latency::cpu1.data 3242.115985 # average StoreCondReq mshr miss latency -system.cpu1.dcache.StoreCondReq_avg_mshr_miss_latency::total 3242.115985 # average StoreCondReq mshr miss latency +system.cpu1.dcache.writebacks::writebacks 265016 # number of writebacks +system.cpu1.dcache.writebacks::total 265016 # number of writebacks +system.cpu1.dcache.ReadReq_mshr_misses::cpu1.data 170562 # number of ReadReq MSHR misses +system.cpu1.dcache.ReadReq_mshr_misses::total 170562 # number of ReadReq MSHR misses +system.cpu1.dcache.WriteReq_mshr_misses::cpu1.data 149956 # number of WriteReq MSHR misses +system.cpu1.dcache.WriteReq_mshr_misses::total 149956 # number of WriteReq MSHR misses +system.cpu1.dcache.LoadLockedReq_mshr_misses::cpu1.data 11055 # number of LoadLockedReq MSHR misses +system.cpu1.dcache.LoadLockedReq_mshr_misses::total 11055 # number of LoadLockedReq MSHR misses +system.cpu1.dcache.StoreCondReq_mshr_misses::cpu1.data 10052 # number of StoreCondReq MSHR misses +system.cpu1.dcache.StoreCondReq_mshr_misses::total 10052 # number of StoreCondReq MSHR misses +system.cpu1.dcache.demand_mshr_misses::cpu1.data 320518 # number of demand (read+write) MSHR misses +system.cpu1.dcache.demand_mshr_misses::total 320518 # number of demand (read+write) MSHR misses +system.cpu1.dcache.overall_mshr_misses::cpu1.data 320518 # number of overall MSHR misses +system.cpu1.dcache.overall_mshr_misses::total 320518 # number of overall MSHR misses +system.cpu1.dcache.ReadReq_mshr_miss_latency::cpu1.data 1877722752 # number of ReadReq MSHR miss cycles +system.cpu1.dcache.ReadReq_mshr_miss_latency::total 1877722752 # number of ReadReq MSHR miss cycles +system.cpu1.dcache.WriteReq_mshr_miss_latency::cpu1.data 6246095798 # number of WriteReq MSHR miss cycles +system.cpu1.dcache.WriteReq_mshr_miss_latency::total 6246095798 # number of WriteReq MSHR miss cycles +system.cpu1.dcache.LoadLockedReq_mshr_miss_latency::cpu1.data 70722250 # number of LoadLockedReq MSHR miss cycles +system.cpu1.dcache.LoadLockedReq_mshr_miss_latency::total 70722250 # number of LoadLockedReq MSHR miss cycles +system.cpu1.dcache.StoreCondReq_mshr_miss_latency::cpu1.data 32100518 # number of StoreCondReq MSHR miss cycles +system.cpu1.dcache.StoreCondReq_mshr_miss_latency::total 32100518 # number of StoreCondReq MSHR miss cycles +system.cpu1.dcache.StoreCondFailReq_mshr_miss_latency::cpu1.data 1000 # number of StoreCondFailReq MSHR miss cycles +system.cpu1.dcache.StoreCondFailReq_mshr_miss_latency::total 1000 # number of StoreCondFailReq MSHR miss cycles +system.cpu1.dcache.demand_mshr_miss_latency::cpu1.data 8123818550 # number of demand (read+write) MSHR miss cycles +system.cpu1.dcache.demand_mshr_miss_latency::total 8123818550 # number of demand (read+write) MSHR miss cycles +system.cpu1.dcache.overall_mshr_miss_latency::cpu1.data 8123818550 # number of overall MSHR miss cycles +system.cpu1.dcache.overall_mshr_miss_latency::total 8123818550 # number of overall MSHR miss cycles +system.cpu1.dcache.ReadReq_mshr_uncacheable_latency::cpu1.data 168605274000 # number of ReadReq MSHR uncacheable cycles +system.cpu1.dcache.ReadReq_mshr_uncacheable_latency::total 168605274000 # number of ReadReq MSHR uncacheable cycles +system.cpu1.dcache.WriteReq_mshr_uncacheable_latency::cpu1.data 25182596842 # number of WriteReq MSHR uncacheable cycles +system.cpu1.dcache.WriteReq_mshr_uncacheable_latency::total 25182596842 # number of WriteReq MSHR uncacheable cycles +system.cpu1.dcache.overall_mshr_uncacheable_latency::cpu1.data 193787870842 # number of overall MSHR uncacheable cycles +system.cpu1.dcache.overall_mshr_uncacheable_latency::total 193787870842 # number of overall MSHR uncacheable cycles +system.cpu1.dcache.ReadReq_mshr_miss_rate::cpu1.data 0.023964 # mshr miss rate for ReadReq accesses +system.cpu1.dcache.ReadReq_mshr_miss_rate::total 0.023964 # mshr miss rate for ReadReq accesses +system.cpu1.dcache.WriteReq_mshr_miss_rate::cpu1.data 0.030127 # mshr miss rate for WriteReq accesses +system.cpu1.dcache.WriteReq_mshr_miss_rate::total 0.030127 # mshr miss rate for WriteReq accesses +system.cpu1.dcache.LoadLockedReq_mshr_miss_rate::cpu1.data 0.118999 # mshr miss rate for LoadLockedReq accesses +system.cpu1.dcache.LoadLockedReq_mshr_miss_rate::total 0.118999 # mshr miss rate for LoadLockedReq accesses +system.cpu1.dcache.StoreCondReq_mshr_miss_rate::cpu1.data 0.108319 # mshr miss rate for StoreCondReq accesses +system.cpu1.dcache.StoreCondReq_mshr_miss_rate::total 0.108319 # mshr miss rate for StoreCondReq accesses +system.cpu1.dcache.demand_mshr_miss_rate::cpu1.data 0.026501 # mshr miss rate for demand accesses +system.cpu1.dcache.demand_mshr_miss_rate::total 0.026501 # mshr miss rate for demand accesses +system.cpu1.dcache.overall_mshr_miss_rate::cpu1.data 0.026501 # mshr miss rate for overall accesses +system.cpu1.dcache.overall_mshr_miss_rate::total 0.026501 # mshr miss rate for overall accesses +system.cpu1.dcache.ReadReq_avg_mshr_miss_latency::cpu1.data 11009.033384 # average ReadReq mshr miss latency +system.cpu1.dcache.ReadReq_avg_mshr_miss_latency::total 11009.033384 # average ReadReq mshr miss latency +system.cpu1.dcache.WriteReq_avg_mshr_miss_latency::cpu1.data 41652.856825 # average WriteReq mshr miss latency +system.cpu1.dcache.WriteReq_avg_mshr_miss_latency::total 41652.856825 # average WriteReq mshr miss latency +system.cpu1.dcache.LoadLockedReq_avg_mshr_miss_latency::cpu1.data 6397.308910 # average LoadLockedReq mshr miss latency +system.cpu1.dcache.LoadLockedReq_avg_mshr_miss_latency::total 6397.308910 # average LoadLockedReq mshr miss latency +system.cpu1.dcache.StoreCondReq_avg_mshr_miss_latency::cpu1.data 3193.445881 # average StoreCondReq mshr miss latency +system.cpu1.dcache.StoreCondReq_avg_mshr_miss_latency::total 3193.445881 # average StoreCondReq mshr miss latency system.cpu1.dcache.StoreCondFailReq_avg_mshr_miss_latency::cpu1.data inf # average StoreCondFailReq mshr miss latency system.cpu1.dcache.StoreCondFailReq_avg_mshr_miss_latency::total inf # average StoreCondFailReq mshr miss latency -system.cpu1.dcache.demand_avg_mshr_miss_latency::cpu1.data 22262.461618 # average overall mshr miss latency -system.cpu1.dcache.demand_avg_mshr_miss_latency::total 22262.461618 # average overall mshr miss latency -system.cpu1.dcache.overall_avg_mshr_miss_latency::cpu1.data 22262.461618 # average overall mshr miss latency -system.cpu1.dcache.overall_avg_mshr_miss_latency::total 22262.461618 # average overall mshr miss latency +system.cpu1.dcache.demand_avg_mshr_miss_latency::cpu1.data 25345.904286 # average overall mshr miss latency +system.cpu1.dcache.demand_avg_mshr_miss_latency::total 25345.904286 # average overall mshr miss latency +system.cpu1.dcache.overall_avg_mshr_miss_latency::cpu1.data 25345.904286 # average overall mshr miss latency +system.cpu1.dcache.overall_avg_mshr_miss_latency::total 25345.904286 # average overall mshr miss latency system.cpu1.dcache.ReadReq_avg_mshr_uncacheable_latency::cpu1.data inf # average ReadReq mshr uncacheable latency system.cpu1.dcache.ReadReq_avg_mshr_uncacheable_latency::total inf # average ReadReq mshr uncacheable latency system.cpu1.dcache.WriteReq_avg_mshr_uncacheable_latency::cpu1.data inf # average WriteReq mshr uncacheable latency @@ -1855,6 +1923,8 @@ system.iocache.tags.total_refs 0 # To system.iocache.tags.sampled_refs 0 # Sample count of references to valid blocks. system.iocache.tags.avg_refs nan # Average number of references to valid blocks. system.iocache.tags.warmup_cycle 0 # Cycle when the warmup percentage was hit. +system.iocache.tags.tag_accesses 0 # Number of tag accesses +system.iocache.tags.data_accesses 0 # Number of data accesses system.iocache.blocked_cycles::no_mshrs 0 # number of cycles access was blocked system.iocache.blocked_cycles::no_targets 0 # number of cycles access was blocked system.iocache.blocked::no_mshrs 0 # number of cycles access was blocked @@ -1863,10 +1933,10 @@ system.iocache.avg_blocked_cycles::no_mshrs nan # system.iocache.avg_blocked_cycles::no_targets nan # average number of cycles each access was blocked system.iocache.fast_writes 0 # number of fast writes performed system.iocache.cache_copies 0 # number of cache copies performed -system.iocache.ReadReq_mshr_uncacheable_latency::realview.clcd 651879453001 # number of ReadReq MSHR uncacheable cycles -system.iocache.ReadReq_mshr_uncacheable_latency::total 651879453001 # number of ReadReq MSHR uncacheable cycles -system.iocache.overall_mshr_uncacheable_latency::realview.clcd 651879453001 # number of overall MSHR uncacheable cycles -system.iocache.overall_mshr_uncacheable_latency::total 651879453001 # number of overall MSHR uncacheable cycles +system.iocache.ReadReq_mshr_uncacheable_latency::realview.clcd 651789578751 # number of ReadReq MSHR uncacheable cycles +system.iocache.ReadReq_mshr_uncacheable_latency::total 651789578751 # number of ReadReq MSHR uncacheable cycles +system.iocache.overall_mshr_uncacheable_latency::realview.clcd 651789578751 # number of overall MSHR uncacheable cycles +system.iocache.overall_mshr_uncacheable_latency::total 651789578751 # number of overall MSHR uncacheable cycles system.iocache.ReadReq_avg_mshr_uncacheable_latency::realview.clcd inf # average ReadReq mshr uncacheable latency system.iocache.ReadReq_avg_mshr_uncacheable_latency::total inf # average ReadReq mshr uncacheable latency system.iocache.overall_avg_mshr_uncacheable_latency::realview.clcd inf # average overall mshr uncacheable latency diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing/config.ini b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing/config.ini index 925b86307..ea47afb6b 100644 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing/config.ini +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing/config.ini @@ -12,7 +12,7 @@ time_sync_spin_threshold=100000000 type=LinuxArmSystem children=bridge cf0 clk_domain cpu cpu_clk_domain intrctrl iobus iocache membus physmem realview terminal vncserver voltage_domain atags_addr=256 -boot_loader=/scratch/nilay/GEM5/system/binaries/boot.arm +boot_loader=/dist/binaries/boot.arm boot_osflags=earlyprintk console=ttyAMA0 lpj=19988480 norandmaps rw loglevel=8 mem=128MB root=/dev/sda1 cache_line_size=64 clk_domain=system.clk_domain @@ -23,7 +23,7 @@ eventq_index=0 flags_addr=268435504 gic_cpu_addr=520093952 init_param=0 -kernel=/scratch/nilay/GEM5/system/binaries/vmlinux.arm.smp.fb.2.6.38.8 +kernel=/dist/binaries/vmlinux.arm.smp.fb.2.6.38.8 load_addr_mask=268435455 machine_type=RealView_PBX mem_mode=timing @@ -75,7 +75,7 @@ table_size=65536 [system.cf0.image.child] type=RawDiskImage eventq_index=0 -image_file=/scratch/nilay/GEM5/system/disks/linux-arm-ael.img +image_file=/dist/disks/linux-arm-ael.img read_only=true [system.clk_domain] @@ -130,6 +130,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu.dcache.tags @@ -146,6 +147,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu.dtb] @@ -178,6 +180,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu.icache.tags @@ -194,6 +197,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu.interrupts] @@ -248,6 +252,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=4194304 system=system tags=system.cpu.l2cache.tags @@ -264,6 +269,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=20 +sequential_access=false size=4194304 [system.cpu.toL2Bus] @@ -317,6 +323,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=50 +sequential_access=false size=1024 system=system tags=system.iocache.tags @@ -333,6 +340,7 @@ block_size=64 clk_domain=system.clk_domain eventq_index=0 hit_latency=50 +sequential_access=false size=1024 [system.membus] diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing/simerr b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing/simerr index eda827fb8..41742298b 100755 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing/simerr +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing/simerr @@ -1,7 +1,6 @@ warn: Sockets disabled, not accepting vnc client connections warn: Sockets disabled, not accepting terminal connections warn: Sockets disabled, not accepting gdb connections -warn: DTB file specified, but no device tree support in kernel warn: The clidr register always reports 0 caches. warn: clidr LoUIS field of 0b001 to match current ARM implementations. warn: The csselr register isn't implemented. @@ -14,4 +13,3 @@ warn: instruction 'mcr icimvau' unimplemented warn: LCD dual screen mode not supported warn: instruction 'mcr icialluis' unimplemented warn: instruction 'mcr bpiallis' unimplemented -hack: be nice to actually delete the event here diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing/simout b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing/simout index b95a8c30f..866b5bc98 100755 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing/simout +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing/simout @@ -1,14 +1,12 @@ -Redirecting stdout to build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-simple-timing/simout -Redirecting stderr to build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-simple-timing/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 07:58:15 -gem5 started Sep 22 2013 08:14:19 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:24:06 +gem5 started Jan 22 2014 17:31:30 +gem5 executing on u200540-lin command line: build/ARM/gem5.opt -d build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-simple-timing -re tests/run.py build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-simple-timing Global frequency set at 1000000000000 ticks per second -info: kernel located at: /dist/m5/system/binaries/vmlinux.arm.smp.fb.2.6.38.8 +info: kernel located at: /dist/binaries/vmlinux.arm.smp.fb.2.6.38.8 info: Using bootloader at address 0x80000000 info: Entering event queue @ 0. Starting simulation... -Exiting @ tick 2615716222000 because m5_exit instruction encountered +Exiting @ tick 2616536483000 because m5_exit instruction encountered diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing/stats.txt b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing/stats.txt index df8a2beae..9c560044d 100644 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing/stats.txt +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-simple-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 2.616536 # Nu sim_ticks 2616536483000 # Number of ticks simulated final_tick 2616536483000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 343075 # Simulator instruction rate (inst/s) -host_op_rate 436578 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 14912044248 # Simulator tick rate (ticks/s) -host_mem_usage 444348 # Number of bytes of host memory used -host_seconds 175.46 # Real time elapsed on the host +host_inst_rate 577538 # Simulator instruction rate (inst/s) +host_op_rate 734941 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 25103147507 # Simulator tick rate (ticks/s) +host_mem_usage 400220 # Number of bytes of host memory used +host_seconds 104.23 # Real time elapsed on the host sim_insts 60197580 # Number of instructions simulated sim_ops 76603973 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::realview.clcd 122683392 # Number of bytes read from this memory system.physmem.bytes_read::cpu.dtb.walker 320 # Number of bytes read from this memory system.physmem.bytes_read::cpu.itb.walker 128 # Number of bytes read from this memory @@ -811,6 +813,7 @@ system.iobus.respLayer0.occupancy 2374820000 # La system.iobus.respLayer0.utilization 0.1 # Layer utilization (%) system.iobus.respLayer1.occupancy 42035380750 # Layer occupancy (ticks) system.iobus.respLayer1.utilization 1.6 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.dtb.inst_hits 0 # ITB inst hits system.cpu.dtb.inst_misses 0 # ITB inst misses system.cpu.dtb.read_hits 14995644 # DTB read hits @@ -886,6 +889,14 @@ system.cpu.icache.tags.warmup_cycle 19982971250 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 510.868538 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.997790 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.997790 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 44 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 195 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::2 267 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::3 6 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 62348185 # Number of tag accesses +system.cpu.icache.tags.data_accesses 62348185 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 60634641 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 60634641 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 60634641 # number of demand (read+write) hits @@ -980,6 +991,18 @@ system.cpu.l2cache.tags.occ_percent::cpu.itb.walker 0.000000 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.106711 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.092147 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.774455 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1023 4 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_blocks::1024 65378 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1023::4 4 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 28 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 22 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::2 2163 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::3 6898 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::4 56267 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1023 0.000061 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.997589 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 17137304 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 17137304 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.dtb.walker 8705 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::cpu.itb.walker 3532 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::cpu.inst 844551 # number of ReadReq hits @@ -1210,6 +1233,13 @@ system.cpu.dcache.tags.warmup_cycle 664004250 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 511.876746 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.999759 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.999759 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 74 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 329 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::2 109 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 97755015 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 97755015 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 13195741 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 13195741 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 9972594 # number of WriteReq hits @@ -1375,6 +1405,8 @@ system.iocache.tags.total_refs 0 # To system.iocache.tags.sampled_refs 0 # Sample count of references to valid blocks. system.iocache.tags.avg_refs nan # Average number of references to valid blocks. system.iocache.tags.warmup_cycle 0 # Cycle when the warmup percentage was hit. +system.iocache.tags.tag_accesses 0 # Number of tag accesses +system.iocache.tags.data_accesses 0 # Number of data accesses system.iocache.blocked_cycles::no_mshrs 0 # number of cycles access was blocked system.iocache.blocked_cycles::no_targets 0 # number of cycles access was blocked system.iocache.blocked::no_mshrs 0 # number of cycles access was blocked diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-switcheroo-atomic/config.ini b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-switcheroo-atomic/config.ini index 44d2483e8..4f02f4af8 100644 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-switcheroo-atomic/config.ini +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-switcheroo-atomic/config.ini @@ -12,7 +12,7 @@ time_sync_spin_threshold=100000000 type=LinuxArmSystem children=bridge cf0 clk_domain cpu0 cpu1 cpu_clk_domain intrctrl iobus iocache l2c membus physmem realview terminal toL2Bus vncserver voltage_domain atags_addr=256 -boot_loader=/scratch/nilay/GEM5/system/binaries/boot.arm +boot_loader=/dist/binaries/boot.arm boot_osflags=earlyprintk console=ttyAMA0 lpj=19988480 norandmaps rw loglevel=8 mem=128MB root=/dev/sda1 cache_line_size=64 clk_domain=system.clk_domain @@ -23,12 +23,12 @@ eventq_index=0 flags_addr=268435504 gic_cpu_addr=520093952 init_param=0 -kernel=/scratch/nilay/GEM5/system/binaries/vmlinux.arm.smp.fb.2.6.38.8 +kernel=/dist/binaries/vmlinux.arm.smp.fb.2.6.38.8 load_addr_mask=268435455 machine_type=RealView_PBX mem_mode=atomic mem_ranges=0:134217727 -memories=system.realview.nvmem system.physmem +memories=system.physmem system.realview.nvmem multi_proc=true num_work_ids=16 panic_on_oops=true @@ -75,7 +75,7 @@ table_size=65536 [system.cf0.image.child] type=RawDiskImage eventq_index=0 -image_file=/scratch/nilay/GEM5/system/disks/linux-arm-ael.img +image_file=/dist/disks/linux-arm-ael.img read_only=true [system.clk_domain] @@ -137,6 +137,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu0.dcache.tags @@ -153,6 +154,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu0.dtb] @@ -185,6 +187,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu0.icache.tags @@ -201,6 +204,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu0.interrupts] @@ -366,6 +370,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=50 +sequential_access=false size=1024 system=system tags=system.iocache.tags @@ -382,6 +387,7 @@ block_size=64 clk_domain=system.clk_domain eventq_index=0 hit_latency=50 +sequential_access=false size=1024 [system.l2c] @@ -399,6 +405,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=4194304 system=system tags=system.l2c.tags @@ -415,6 +422,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=20 +sequential_access=false size=4194304 [system.membus] diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-switcheroo-atomic/simerr b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-switcheroo-atomic/simerr index 06edbeba7..38a425305 100755 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-switcheroo-atomic/simerr +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-switcheroo-atomic/simerr @@ -1,7 +1,6 @@ warn: Sockets disabled, not accepting vnc client connections warn: Sockets disabled, not accepting terminal connections warn: Sockets disabled, not accepting gdb connections -warn: DTB file specified, but no device tree support in kernel warn: The clidr register always reports 0 caches. warn: clidr LoUIS field of 0b001 to match current ARM implementations. warn: The csselr register isn't implemented. @@ -11,7 +10,6 @@ warn: instruction 'mcr icialluis' unimplemented warn: instruction 'mcr dccimvac' unimplemented warn: instruction 'mcr dccmvau' unimplemented warn: instruction 'mcr icimvau' unimplemented -hack: be nice to actually delete the event here warn: LCD dual screen mode not supported warn: instruction 'mcr icialluis' unimplemented warn: instruction 'mcr bpiallis' unimplemented diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-switcheroo-atomic/simout b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-switcheroo-atomic/simout index 9b6e36065..312a2d840 100755 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-switcheroo-atomic/simout +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-switcheroo-atomic/simout @@ -1,9349 +1,8 @@ -Redirecting stdout to build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-switcheroo-atomic/simout -Redirecting stderr to build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-switcheroo-atomic/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 07:58:15 -gem5 started Sep 22 2013 07:58:36 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:24:06 +gem5 started Jan 22 2014 17:32:17 +gem5 executing on u200540-lin command line: build/ARM/gem5.opt -d build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-switcheroo-atomic -re tests/run.py build/ARM/tests/opt/quick/fs/10.linux-boot/arm/linux/realview-switcheroo-atomic Global frequency set at 1000000000000 ticks per second -info: kernel located at: /dist/m5/system/binaries/vmlinux.arm.smp.fb.2.6.38.8 -info: Using bootloader at address 0x80000000 -info: Entering event queue @ 0. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1000000000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2000000000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 3000000000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 4000000000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 5000000000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -info: Entering event queue @ 6000000000. Starting simulation... -switching cpus -info: Entering event queue @ 6000003500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 7000003500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 8000003500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -info: Entering event queue @ 9000003500. Starting simulation... -switching cpus -info: Entering event queue @ 9000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 10000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 11000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 12000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 13000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 14000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 15000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 16000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 17000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 18000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 19000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 20000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 21000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 22000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 23000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 24000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 25000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 26000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 27000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 28000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 29000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 30000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 31000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 32000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 33000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 34000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 35000006500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -info: Entering event queue @ 36000006500. Starting simulation... -switching cpus -info: Entering event queue @ 36000007000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 37000007000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 38000007000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 39000007000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 40000007000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 41000007000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 42000007000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 43000007000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -info: Entering event queue @ 44000007000. Starting simulation... -switching cpus -info: Entering event queue @ 44000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 45000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 46000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 47000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 48000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 49000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 50000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 51000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 52000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 53000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 54000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 55000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 56000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 57000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 58000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 59000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 60000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 61000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 62000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 63000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 64000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 65000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 66000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 67000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 68000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 69000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 70000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 71000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 72000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 73000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 74000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 75000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 76000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 77000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 78000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 79000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 80000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 81000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 82000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 83000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 84000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 85000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 86000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 87000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 88000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 89000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 90000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 91000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 92000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 93000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 94000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 95000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 96000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 97000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 98000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 99000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 100000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 101000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 102000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 103000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 104000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 105000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 106000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 107000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 108000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 109000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 110000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 111000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 112000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 113000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 114000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 115000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 116000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 117000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 118000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 119000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 120000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 121000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 122000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 123000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 124000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 125000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 126000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 127000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 128000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 129000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 130000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 131000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 132000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 133000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 134000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 135000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 136000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 137000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 138000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 139000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 140000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 141000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 142000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 143000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 144000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 145000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 146000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 147000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 148000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 149000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 150000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 151000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 152000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 153000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 154000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 155000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 156000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 157000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 158000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 159000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 160000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 161000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 162000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 163000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 164000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 165000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 166000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 167000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 168000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 169000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 170000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 171000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 172000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 173000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 174000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 175000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 176000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 177000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 178000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 179000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 180000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 181000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 182000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 183000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 184000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 185000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 186000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 187000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 188000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 189000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 190000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 191000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 192000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 193000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 194000010500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -info: Entering event queue @ 195000010500. Starting simulation... -switching cpus -info: Entering event queue @ 195000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 196000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 197000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 198000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 199000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 200000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 201000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 202000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 203000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 204000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 205000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 206000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 207000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 208000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 209000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 210000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 211000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 212000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 213000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 214000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 215000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 216000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 217000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 218000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 219000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 220000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 221000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 222000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 223000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 224000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 225000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 226000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 227000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 228000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 229000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 230000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 231000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 232000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 233000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 234000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 235000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 236000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 237000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 238000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 239000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 240000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 241000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 242000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 243000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 244000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 245000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 246000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 247000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 248000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 249000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 250000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 251000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 252000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 253000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 254000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 255000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 256000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 257000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 258000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 259000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 260000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 261000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 262000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 263000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 264000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 265000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 266000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 267000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 268000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 269000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 270000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 271000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 272000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 273000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 274000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 275000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 276000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 277000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 278000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 279000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 280000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 281000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 282000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 283000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 284000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 285000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 286000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 287000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 288000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 289000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 290000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 291000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 292000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 293000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 294000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 295000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 296000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 297000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 298000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 299000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 300000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 301000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 302000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 303000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 304000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 305000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 306000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 307000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 308000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 309000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 310000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 311000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 312000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 313000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 314000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 315000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 316000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 317000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 318000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 319000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 320000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 321000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 322000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 323000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 324000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 325000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 326000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 327000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 328000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 329000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 330000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 331000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 332000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 333000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 334000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 335000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 336000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 337000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 338000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 339000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 340000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 341000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 342000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 343000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 344000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 345000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 346000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 347000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 348000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 349000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 350000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 351000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 352000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 353000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 354000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 355000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 356000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 357000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 358000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 359000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 360000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 361000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 362000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 363000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 364000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 365000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 366000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 367000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 368000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 369000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 370000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 371000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 372000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 373000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 374000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 375000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 376000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 377000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 378000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 379000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 380000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 381000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 382000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 383000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 384000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 385000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 386000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 387000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 388000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 389000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 390000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 391000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 392000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 393000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 394000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 395000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 396000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 397000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 398000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 399000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 400000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 401000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 402000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 403000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 404000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 405000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 406000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 407000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 408000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 409000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 410000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 411000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 412000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 413000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 414000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 415000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 416000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 417000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 418000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 419000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 420000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 421000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 422000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 423000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 424000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 425000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 426000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 427000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 428000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 429000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 430000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 431000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 432000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 433000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 434000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 435000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 436000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 437000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 438000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 439000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 440000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 441000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 442000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 443000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 444000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 445000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 446000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 447000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 448000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 449000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 450000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 451000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 452000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 453000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 454000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 455000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 456000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 457000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 458000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 459000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 460000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 461000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 462000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 463000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 464000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 465000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 466000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 467000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 468000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 469000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 470000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 471000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 472000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 473000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 474000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 475000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 476000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 477000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 478000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 479000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 480000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 481000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 482000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 483000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 484000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 485000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 486000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 487000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 488000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 489000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 490000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 491000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 492000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 493000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 494000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 495000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 496000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 497000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 498000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 499000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 500000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 501000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 502000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 503000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 504000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 505000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 506000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 507000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 508000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 509000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 510000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 511000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 512000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 513000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 514000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 515000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 516000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 517000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 518000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 519000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 520000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 521000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 522000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 523000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 524000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 525000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 526000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 527000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 528000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 529000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 530000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 531000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 532000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 533000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 534000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 535000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 536000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 537000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 538000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 539000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 540000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 541000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 542000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 543000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 544000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 545000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 546000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 547000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 548000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 549000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 550000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 551000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 552000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 553000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 554000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 555000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 556000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 557000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 558000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 559000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 560000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 561000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 562000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 563000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 564000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 565000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 566000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 567000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 568000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 569000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 570000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 571000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 572000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 573000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 574000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 575000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 576000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 577000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 578000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 579000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 580000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 581000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 582000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 583000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 584000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 585000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 586000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 587000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 588000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 589000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 590000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 591000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 592000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 593000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 594000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 595000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 596000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 597000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 598000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 599000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 600000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 601000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 602000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 603000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 604000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 605000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 606000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 607000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 608000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 609000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 610000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 611000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 612000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 613000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 614000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 615000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 616000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 617000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 618000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 619000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 620000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 621000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 622000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 623000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 624000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 625000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 626000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 627000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 628000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 629000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 630000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 631000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 632000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 633000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 634000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 635000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 636000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 637000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 638000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 639000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 640000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 641000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 642000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 643000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 644000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 645000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 646000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 647000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 648000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 649000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 650000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 651000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 652000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 653000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 654000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 655000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 656000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 657000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 658000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 659000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 660000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 661000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 662000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 663000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 664000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 665000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 666000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 667000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 668000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 669000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 670000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 671000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 672000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 673000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 674000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 675000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 676000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 677000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 678000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 679000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 680000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 681000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 682000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 683000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 684000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 685000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 686000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 687000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 688000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 689000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 690000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 691000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 692000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 693000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 694000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 695000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 696000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 697000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 698000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 699000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 700000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 701000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 702000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 703000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 704000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 705000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 706000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 707000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 708000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 709000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 710000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 711000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 712000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 713000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 714000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 715000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 716000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 717000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 718000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 719000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 720000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 721000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 722000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 723000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 724000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 725000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 726000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 727000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 728000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 729000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 730000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 731000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 732000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 733000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 734000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 735000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 736000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 737000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 738000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 739000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 740000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 741000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 742000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 743000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 744000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 745000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 746000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 747000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 748000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 749000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 750000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 751000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 752000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 753000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 754000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 755000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 756000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 757000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 758000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 759000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 760000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 761000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 762000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 763000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 764000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 765000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 766000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 767000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 768000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 769000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 770000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 771000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 772000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 773000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 774000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 775000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 776000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 777000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 778000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 779000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 780000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 781000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 782000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 783000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 784000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 785000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 786000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 787000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 788000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 789000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 790000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 791000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 792000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 793000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 794000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 795000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 796000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 797000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 798000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 799000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 800000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 801000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 802000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 803000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 804000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 805000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 806000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 807000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 808000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 809000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 810000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 811000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 812000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 813000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 814000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 815000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 816000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 817000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 818000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 819000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 820000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 821000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 822000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 823000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 824000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 825000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 826000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 827000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 828000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 829000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 830000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 831000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 832000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 833000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 834000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 835000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 836000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 837000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 838000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 839000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 840000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 841000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 842000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 843000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 844000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 845000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 846000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 847000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 848000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 849000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 850000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 851000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 852000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 853000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 854000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 855000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 856000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 857000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 858000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 859000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 860000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 861000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 862000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 863000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 864000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 865000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 866000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 867000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 868000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 869000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 870000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 871000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 872000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 873000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 874000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 875000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 876000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 877000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 878000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 879000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 880000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 881000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 882000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 883000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 884000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 885000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 886000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 887000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 888000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 889000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 890000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 891000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 892000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 893000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 894000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 895000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 896000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 897000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 898000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 899000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 900000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 901000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 902000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 903000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 904000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 905000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 906000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 907000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 908000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 909000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 910000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 911000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 912000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 913000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 914000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 915000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 916000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 917000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 918000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 919000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 920000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 921000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 922000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 923000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 924000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 925000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 926000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 927000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 928000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 929000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 930000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 931000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 932000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 933000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 934000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 935000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 936000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 937000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 938000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 939000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 940000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 941000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 942000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 943000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 944000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 945000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 946000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 947000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 948000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 949000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 950000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 951000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 952000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 953000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 954000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 955000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 956000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 957000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 958000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 959000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 960000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 961000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 962000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 963000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 964000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 965000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 966000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 967000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 968000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 969000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 970000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 971000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 972000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 973000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 974000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 975000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 976000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 977000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 978000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 979000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 980000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 981000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 982000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 983000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 984000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 985000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 986000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 987000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 988000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 989000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 990000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 991000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 992000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 993000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 994000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 995000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 996000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 997000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 998000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 999000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1000000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1001000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1002000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1003000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1004000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1005000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1006000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1007000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1008000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1009000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1010000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1011000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1012000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1013000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1014000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1015000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1016000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1017000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1018000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1019000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1020000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1021000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1022000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1023000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1024000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1025000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1026000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1027000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1028000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1029000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1030000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1031000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1032000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1033000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1034000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1035000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1036000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1037000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1038000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1039000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1040000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1041000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1042000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1043000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1044000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1045000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1046000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1047000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1048000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1049000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1050000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1051000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1052000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1053000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1054000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1055000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1056000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1057000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1058000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1059000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1060000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1061000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1062000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1063000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1064000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1065000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1066000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1067000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1068000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1069000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1070000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1071000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1072000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1073000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1074000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1075000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1076000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1077000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1078000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1079000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1080000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1081000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1082000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1083000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1084000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1085000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1086000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1087000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1088000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1089000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1090000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1091000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1092000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1093000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1094000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1095000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1096000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1097000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1098000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1099000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1100000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1101000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1102000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1103000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1104000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1105000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1106000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1107000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1108000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1109000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1110000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1111000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1112000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1113000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1114000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1115000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1116000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1117000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1118000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1119000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1120000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1121000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1122000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1123000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1124000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1125000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1126000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1127000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1128000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1129000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1130000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1131000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1132000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1133000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1134000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1135000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1136000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1137000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1138000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1139000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1140000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1141000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1142000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1143000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1144000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1145000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1146000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1147000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1148000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1149000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1150000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1151000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1152000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1153000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1154000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1155000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1156000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1157000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1158000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1159000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1160000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1161000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1162000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1163000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1164000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1165000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1166000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1167000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1168000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1169000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1170000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1171000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1172000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1173000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1174000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1175000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1176000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1177000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1178000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1179000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1180000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1181000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1182000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1183000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1184000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1185000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1186000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1187000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1188000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1189000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1190000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1191000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1192000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1193000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1194000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1195000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1196000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1197000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1198000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1199000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1200000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1201000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1202000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1203000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1204000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1205000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1206000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1207000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1208000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1209000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1210000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1211000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1212000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1213000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1214000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1215000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1216000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1217000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1218000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1219000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1220000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1221000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1222000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1223000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1224000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1225000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1226000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1227000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1228000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1229000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1230000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1231000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1232000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1233000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1234000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1235000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1236000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1237000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1238000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1239000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1240000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1241000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1242000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1243000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1244000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1245000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1246000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1247000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1248000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1249000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1250000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1251000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1252000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1253000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1254000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1255000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1256000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1257000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1258000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1259000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1260000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1261000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1262000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1263000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1264000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1265000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1266000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1267000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1268000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1269000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1270000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1271000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1272000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1273000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1274000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1275000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1276000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1277000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1278000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1279000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1280000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1281000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1282000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1283000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1284000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1285000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1286000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1287000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1288000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1289000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1290000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1291000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1292000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1293000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1294000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1295000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1296000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1297000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1298000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1299000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1300000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1301000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1302000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1303000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1304000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1305000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1306000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1307000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1308000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1309000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1310000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1311000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1312000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1313000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1314000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1315000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1316000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1317000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1318000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1319000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1320000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1321000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1322000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1323000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1324000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1325000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1326000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1327000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1328000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1329000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1330000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1331000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1332000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1333000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1334000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1335000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1336000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1337000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1338000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1339000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1340000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1341000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1342000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1343000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1344000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1345000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1346000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1347000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1348000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1349000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1350000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1351000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1352000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1353000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1354000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1355000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1356000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1357000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1358000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1359000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1360000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1361000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1362000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1363000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1364000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1365000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1366000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1367000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1368000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1369000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1370000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1371000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1372000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1373000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1374000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1375000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1376000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1377000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1378000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1379000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1380000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1381000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1382000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1383000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1384000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1385000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1386000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1387000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1388000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1389000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1390000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1391000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1392000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1393000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1394000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1395000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1396000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1397000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1398000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1399000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1400000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1401000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1402000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1403000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1404000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1405000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1406000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1407000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1408000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1409000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1410000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1411000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1412000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1413000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1414000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1415000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1416000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1417000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1418000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1419000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1420000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1421000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1422000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1423000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1424000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1425000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1426000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1427000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1428000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1429000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1430000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1431000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1432000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1433000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1434000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1435000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1436000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1437000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1438000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1439000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1440000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1441000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1442000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1443000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1444000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1445000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1446000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1447000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1448000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1449000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1450000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1451000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1452000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1453000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1454000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1455000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1456000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1457000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1458000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1459000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1460000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1461000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1462000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1463000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1464000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1465000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1466000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1467000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1468000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1469000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1470000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1471000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1472000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1473000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1474000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1475000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1476000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1477000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1478000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1479000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1480000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1481000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1482000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1483000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1484000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1485000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1486000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1487000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1488000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1489000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1490000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1491000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1492000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1493000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1494000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1495000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1496000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1497000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1498000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1499000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1500000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1501000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1502000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1503000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1504000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1505000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1506000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1507000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1508000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1509000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1510000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1511000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1512000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1513000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1514000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1515000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1516000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1517000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1518000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1519000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1520000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1521000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1522000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1523000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1524000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1525000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1526000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1527000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1528000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1529000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1530000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1531000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1532000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1533000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1534000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1535000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1536000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1537000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1538000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1539000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1540000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1541000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1542000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1543000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1544000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1545000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1546000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1547000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1548000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1549000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1550000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1551000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1552000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1553000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1554000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1555000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1556000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1557000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1558000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1559000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1560000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1561000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1562000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1563000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1564000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1565000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1566000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1567000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1568000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1569000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1570000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1571000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1572000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1573000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1574000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1575000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1576000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1577000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1578000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1579000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1580000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1581000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1582000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1583000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1584000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1585000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1586000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1587000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1588000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1589000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1590000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1591000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1592000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1593000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1594000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1595000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1596000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1597000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1598000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1599000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1600000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1601000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1602000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1603000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1604000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1605000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1606000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1607000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1608000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1609000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1610000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1611000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1612000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1613000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1614000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1615000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1616000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1617000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1618000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1619000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1620000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1621000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1622000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1623000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1624000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1625000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1626000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1627000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1628000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1629000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1630000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1631000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1632000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1633000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1634000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1635000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1636000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1637000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1638000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1639000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1640000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1641000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1642000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1643000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1644000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1645000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1646000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1647000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1648000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1649000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1650000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1651000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1652000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1653000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1654000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1655000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1656000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1657000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1658000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1659000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1660000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1661000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1662000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1663000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1664000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1665000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1666000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1667000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1668000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1669000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1670000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1671000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1672000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1673000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1674000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1675000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1676000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1677000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1678000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1679000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1680000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1681000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1682000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1683000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1684000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1685000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1686000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1687000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1688000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1689000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1690000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1691000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1692000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1693000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1694000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1695000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1696000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1697000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1698000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1699000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1700000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1701000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1702000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1703000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1704000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1705000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1706000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1707000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1708000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1709000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1710000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1711000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1712000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1713000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1714000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1715000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1716000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1717000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1718000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1719000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1720000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1721000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1722000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1723000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1724000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1725000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1726000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1727000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1728000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1729000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1730000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1731000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1732000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1733000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1734000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1735000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1736000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1737000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1738000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1739000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1740000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1741000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1742000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1743000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1744000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1745000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1746000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1747000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1748000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1749000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1750000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1751000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1752000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1753000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1754000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1755000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1756000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1757000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1758000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1759000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1760000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1761000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1762000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1763000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1764000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1765000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1766000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1767000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1768000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1769000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1770000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1771000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1772000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1773000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1774000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1775000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1776000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1777000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1778000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1779000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1780000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1781000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1782000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1783000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1784000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1785000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1786000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1787000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1788000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1789000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1790000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1791000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1792000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1793000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1794000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1795000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1796000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1797000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1798000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1799000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1800000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1801000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1802000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1803000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1804000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1805000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1806000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1807000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1808000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1809000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1810000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1811000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1812000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1813000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1814000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1815000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1816000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1817000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1818000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1819000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1820000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1821000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1822000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1823000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1824000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1825000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1826000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1827000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1828000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1829000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1830000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1831000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1832000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1833000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1834000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1835000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1836000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1837000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1838000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1839000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1840000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1841000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1842000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1843000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1844000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1845000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1846000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1847000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1848000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1849000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1850000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1851000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1852000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1853000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1854000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1855000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1856000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1857000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1858000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1859000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1860000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1861000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1862000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1863000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1864000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1865000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1866000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1867000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1868000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1869000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1870000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1871000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1872000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1873000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1874000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1875000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1876000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1877000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1878000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1879000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1880000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1881000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1882000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1883000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1884000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1885000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1886000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1887000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1888000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1889000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1890000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1891000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1892000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1893000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1894000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1895000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1896000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1897000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1898000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1899000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1900000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1901000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1902000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1903000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1904000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1905000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1906000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1907000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1908000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1909000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1910000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1911000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1912000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1913000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1914000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1915000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1916000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1917000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1918000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1919000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1920000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1921000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1922000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1923000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1924000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1925000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1926000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1927000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1928000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1929000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1930000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1931000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1932000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1933000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1934000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1935000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1936000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1937000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1938000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1939000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1940000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1941000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1942000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1943000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1944000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1945000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1946000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1947000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1948000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1949000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1950000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1951000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1952000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1953000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1954000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1955000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1956000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1957000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1958000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1959000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1960000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1961000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1962000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1963000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1964000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1965000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1966000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1967000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1968000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1969000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1970000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1971000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1972000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1973000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1974000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1975000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1976000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1977000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1978000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1979000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1980000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1981000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1982000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1983000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1984000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1985000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1986000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1987000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1988000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1989000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1990000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1991000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1992000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1993000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1994000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1995000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1996000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1997000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1998000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 1999000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2000000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2001000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2002000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2003000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2004000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2005000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2006000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2007000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2008000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2009000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2010000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2011000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2012000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2013000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2014000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2015000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2016000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2017000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2018000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2019000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2020000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2021000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2022000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2023000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2024000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2025000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2026000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2027000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2028000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2029000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2030000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2031000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2032000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2033000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2034000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2035000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2036000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2037000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2038000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2039000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2040000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2041000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2042000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2043000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2044000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2045000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2046000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2047000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2048000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2049000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2050000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2051000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2052000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2053000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2054000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2055000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2056000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2057000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2058000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2059000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2060000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2061000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2062000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2063000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2064000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2065000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2066000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2067000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2068000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2069000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2070000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2071000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2072000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2073000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2074000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2075000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2076000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2077000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2078000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2079000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2080000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2081000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2082000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2083000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2084000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2085000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2086000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2087000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2088000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2089000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2090000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2091000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2092000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2093000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2094000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2095000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2096000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2097000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2098000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2099000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2100000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2101000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2102000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2103000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2104000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2105000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2106000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2107000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2108000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2109000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2110000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2111000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2112000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2113000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2114000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2115000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2116000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2117000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2118000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2119000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2120000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2121000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2122000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2123000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2124000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2125000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2126000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2127000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2128000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2129000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2130000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2131000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2132000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2133000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2134000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2135000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2136000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2137000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2138000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2139000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2140000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2141000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2142000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2143000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2144000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2145000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2146000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2147000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2148000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2149000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2150000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2151000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2152000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2153000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2154000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2155000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2156000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2157000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2158000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2159000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2160000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2161000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2162000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2163000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2164000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2165000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2166000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2167000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2168000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2169000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2170000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2171000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2172000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2173000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2174000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2175000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2176000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2177000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2178000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2179000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2180000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2181000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2182000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2183000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2184000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2185000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2186000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2187000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2188000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2189000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2190000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2191000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2192000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2193000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2194000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2195000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2196000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2197000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2198000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2199000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2200000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2201000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2202000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2203000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2204000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2205000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2206000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2207000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2208000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2209000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2210000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2211000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2212000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2213000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2214000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2215000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2216000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2217000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2218000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2219000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2220000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2221000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2222000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2223000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2224000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2225000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2226000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2227000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2228000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2229000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2230000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2231000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2232000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2233000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2234000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2235000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2236000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2237000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2238000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2239000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2240000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2241000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2242000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2243000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2244000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2245000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2246000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2247000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2248000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2249000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2250000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2251000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2252000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2253000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2254000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2255000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2256000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2257000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2258000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2259000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2260000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2261000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2262000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2263000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2264000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2265000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2266000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2267000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2268000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2269000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2270000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2271000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2272000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2273000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2274000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2275000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2276000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2277000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2278000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2279000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2280000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2281000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2282000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2283000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2284000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2285000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2286000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2287000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2288000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2289000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2290000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2291000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2292000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2293000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2294000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2295000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2296000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2297000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2298000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2299000013500. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -info: Entering event queue @ 2300000013500. Starting simulation... -switching cpus -info: Entering event queue @ 2300000016000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2301000016000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2302000016000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2303000016000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2304000016000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2305000016000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2306000016000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2307000016000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2308000016000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2309000016000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2310000016000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2311000016000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2312000016000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -info: Entering event queue @ 2313000016000. Starting simulation... -switching cpus -info: Entering event queue @ 2313000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2314000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2315000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2316000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2317000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2318000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2319000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2320000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2321000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2322000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2323000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2324000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2325000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2326000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2327000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2328000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2329000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2330000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -switching cpus -info: Entering event queue @ 2331000020000. Starting simulation... -Switching CPUs... -Next CPU: AtomicSimpleCPU -info: Entering event queue @ 2332000020000. Starting simulation... -switching cpus -info: Entering event queue @ 2332000020500. Starting simulation... diff --git a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-switcheroo-atomic/stats.txt b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-switcheroo-atomic/stats.txt index 7eb912550..af2c3099c 100644 --- a/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-switcheroo-atomic/stats.txt +++ b/tests/quick/fs/10.linux-boot/ref/arm/linux/realview-switcheroo-atomic/stats.txt @@ -4,25 +4,15 @@ sim_seconds 2.332810 # Nu sim_ticks 2332810264000 # Number of ticks simulated final_tick 2332810264000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 840369 # Simulator instruction rate (inst/s) -host_op_rate 1080663 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 32452660609 # Simulator tick rate (ticks/s) -host_mem_usage 444352 # Number of bytes of host memory used -host_seconds 71.88 # Real time elapsed on the host +host_inst_rate 1583722 # Simulator instruction rate (inst/s) +host_op_rate 2036569 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 61158803315 # Simulator tick rate (ticks/s) +host_mem_usage 399324 # Number of bytes of host memory used +host_seconds 38.14 # Real time elapsed on the host sim_insts 60408639 # Number of instructions simulated sim_ops 77681819 # Number of ops (including micro ops) simulated -system.realview.nvmem.bytes_read::cpu0.inst 20 # Number of bytes read from this memory -system.realview.nvmem.bytes_read::total 20 # Number of bytes read from this memory -system.realview.nvmem.bytes_inst_read::cpu0.inst 20 # Number of instructions bytes read from this memory -system.realview.nvmem.bytes_inst_read::total 20 # Number of instructions bytes read from this memory -system.realview.nvmem.num_reads::cpu0.inst 5 # Number of read requests responded to by this memory -system.realview.nvmem.num_reads::total 5 # Number of read requests responded to by this memory -system.realview.nvmem.bw_read::cpu0.inst 9 # Total read bandwidth from this memory (bytes/s) -system.realview.nvmem.bw_read::total 9 # Total read bandwidth from this memory (bytes/s) -system.realview.nvmem.bw_inst_read::cpu0.inst 9 # Instruction read bandwidth from this memory (bytes/s) -system.realview.nvmem.bw_inst_read::total 9 # Instruction read bandwidth from this memory (bytes/s) -system.realview.nvmem.bw_total::cpu0.inst 9 # Total bandwidth to/from this memory (bytes/s) -system.realview.nvmem.bw_total::total 9 # Total bandwidth to/from this memory (bytes/s) +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::realview.clcd 111673344 # Number of bytes read from this memory system.physmem.bytes_read::cpu0.dtb.walker 128 # Number of bytes read from this memory system.physmem.bytes_read::cpu0.itb.walker 192 # Number of bytes read from this memory @@ -74,9 +64,22 @@ system.physmem.bw_total::cpu0.data 3386724 # To system.physmem.bw_total::cpu1.inst 91056 # Total bandwidth to/from this memory (bytes/s) system.physmem.bw_total::cpu1.data 1794913 # Total bandwidth to/from this memory (bytes/s) system.physmem.bw_total::total 54942145 # Total bandwidth to/from this memory (bytes/s) +system.realview.nvmem.bytes_read::cpu0.inst 20 # Number of bytes read from this memory +system.realview.nvmem.bytes_read::total 20 # Number of bytes read from this memory +system.realview.nvmem.bytes_inst_read::cpu0.inst 20 # Number of instructions bytes read from this memory +system.realview.nvmem.bytes_inst_read::total 20 # Number of instructions bytes read from this memory +system.realview.nvmem.num_reads::cpu0.inst 5 # Number of read requests responded to by this memory +system.realview.nvmem.num_reads::total 5 # Number of read requests responded to by this memory +system.realview.nvmem.bw_read::cpu0.inst 9 # Total read bandwidth from this memory (bytes/s) +system.realview.nvmem.bw_read::total 9 # Total read bandwidth from this memory (bytes/s) +system.realview.nvmem.bw_inst_read::cpu0.inst 9 # Instruction read bandwidth from this memory (bytes/s) +system.realview.nvmem.bw_inst_read::total 9 # Instruction read bandwidth from this memory (bytes/s) +system.realview.nvmem.bw_total::cpu0.inst 9 # Total bandwidth to/from this memory (bytes/s) +system.realview.nvmem.bw_total::total 9 # Total bandwidth to/from this memory (bytes/s) system.membus.throughput 55969561 # Throughput (bytes/s) system.membus.data_through_bus 130566366 # Total data (bytes) system.membus.snoop_data_through_bus 0 # Total snoop data (bytes) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.l2c.tags.replacements 62242 # number of replacements system.l2c.tags.tagsinuse 50006.300222 # Cycle average of tags in use system.l2c.tags.total_refs 1678485 # Total number of references to valid blocks. @@ -98,6 +101,18 @@ system.l2c.tags.occ_percent::cpu0.data 0.048104 # Av system.l2c.tags.occ_percent::cpu1.inst 0.032004 # Average percentage of cache occupancy system.l2c.tags.occ_percent::cpu1.data 0.044807 # Average percentage of cache occupancy system.l2c.tags.occ_percent::total 0.763036 # Average percentage of cache occupancy +system.l2c.tags.occ_task_id_blocks::1023 2 # Occupied blocks per task id +system.l2c.tags.occ_task_id_blocks::1024 65383 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1023::4 2 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::0 40 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::1 176 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::2 3589 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::3 9187 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::4 52391 # Occupied blocks per task id +system.l2c.tags.occ_task_id_percent::1023 0.000031 # Percentage of cache occupancy per task id +system.l2c.tags.occ_task_id_percent::1024 0.997665 # Percentage of cache occupancy per task id +system.l2c.tags.tag_accesses 17104735 # Number of tag accesses +system.l2c.tags.data_accesses 17104735 # Number of data accesses system.l2c.ReadReq_hits::cpu0.dtb.walker 9005 # number of ReadReq hits system.l2c.ReadReq_hits::cpu0.itb.walker 3277 # number of ReadReq hits system.l2c.ReadReq_hits::cpu0.inst 473134 # number of ReadReq hits @@ -321,6 +336,14 @@ system.cpu0.icache.tags.occ_blocks::cpu1.inst 67.168341 system.cpu0.icache.tags.occ_percent::cpu0.inst 0.868184 # Average percentage of cache occupancy system.cpu0.icache.tags.occ_percent::cpu1.inst 0.131188 # Average percentage of cache occupancy system.cpu0.icache.tags.occ_percent::total 0.999372 # Average percentage of cache occupancy +system.cpu0.icache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::0 177 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::1 78 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::2 255 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::3 2 # Occupied blocks per task id +system.cpu0.icache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu0.icache.tags.tag_accesses 62285702 # Number of tag accesses +system.cpu0.icache.tags.data_accesses 62285702 # Number of data accesses system.cpu0.icache.ReadReq_hits::cpu0.inst 32064735 # number of ReadReq hits system.cpu0.icache.ReadReq_hits::cpu1.inst 28518763 # number of ReadReq hits system.cpu0.icache.ReadReq_hits::total 60583498 # number of ReadReq hits @@ -377,6 +400,13 @@ system.cpu0.dcache.tags.occ_blocks::cpu1.data 60.698093 system.cpu0.dcache.tags.occ_percent::cpu0.data 0.881443 # Average percentage of cache occupancy system.cpu0.dcache.tags.occ_percent::cpu1.data 0.118551 # Average percentage of cache occupancy system.cpu0.dcache.tags.occ_percent::total 0.999994 # Average percentage of cache occupancy +system.cpu0.dcache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::0 278 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::1 208 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::2 26 # Occupied blocks per task id +system.cpu0.dcache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu0.dcache.tags.tag_accesses 97632366 # Number of tag accesses +system.cpu0.dcache.tags.data_accesses 97632366 # Number of data accesses system.cpu0.dcache.ReadReq_hits::cpu0.data 6995590 # number of ReadReq hits system.cpu0.dcache.ReadReq_hits::cpu1.data 6184430 # number of ReadReq hits system.cpu0.dcache.ReadReq_hits::total 13180020 # number of ReadReq hits @@ -526,6 +556,8 @@ system.iocache.tags.total_refs 0 # To system.iocache.tags.sampled_refs 0 # Sample count of references to valid blocks. system.iocache.tags.avg_refs nan # Average number of references to valid blocks. system.iocache.tags.warmup_cycle 0 # Cycle when the warmup percentage was hit. +system.iocache.tags.tag_accesses 0 # Number of tag accesses +system.iocache.tags.data_accesses 0 # Number of data accesses system.iocache.blocked_cycles::no_mshrs 0 # number of cycles access was blocked system.iocache.blocked_cycles::no_targets 0 # number of cycles access was blocked system.iocache.blocked::no_mshrs 0 # number of cycles access was blocked diff --git a/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-atomic/config.ini b/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-atomic/config.ini index 299ddfd61..9b7646241 100644 --- a/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-atomic/config.ini +++ b/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-atomic/config.ini @@ -20,7 +20,7 @@ eventq_index=0 init_param=0 intel_mp_pointer=system.intel_mp_pointer intel_mp_table=system.intel_mp_table -kernel=/scratch/nilay/GEM5/system/binaries/x86_64-vmlinux-2.6.22.9 +kernel=/dist/binaries/x86_64-vmlinux-2.6.22.9 load_addr_mask=18446744073709551615 mem_mode=atomic mem_ranges=0:134217727 @@ -144,6 +144,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu.dcache.tags @@ -160,6 +161,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu.dtb] @@ -192,6 +194,7 @@ mshrs=10 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=1024 system=system tags=system.cpu.dtb_walker_cache.tags @@ -208,6 +211,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=1024 [system.cpu.icache] @@ -225,6 +229,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu.icache.tags @@ -241,6 +246,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu.interrupts] @@ -289,6 +295,7 @@ mshrs=10 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=1024 system=system tags=system.cpu.itb_walker_cache.tags @@ -305,6 +312,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=1024 [system.cpu.l2cache] @@ -322,6 +330,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=4194304 system=system tags=system.cpu.l2cache.tags @@ -338,6 +347,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=20 +sequential_access=false size=4194304 [system.cpu.toL2Bus] @@ -804,6 +814,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=50 +sequential_access=false size=1024 system=system tags=system.iocache.tags @@ -820,6 +831,7 @@ block_size=64 clk_domain=system.clk_domain eventq_index=0 hit_latency=50 +sequential_access=false size=1024 [system.membus] @@ -1149,7 +1161,7 @@ table_size=65536 [system.pc.south_bridge.ide.disks0.image.child] type=RawDiskImage eventq_index=0 -image_file=/scratch/nilay/GEM5/system/disks/linux-x86.img +image_file=/dist/disks/linux-x86.img read_only=true [system.pc.south_bridge.ide.disks1] @@ -1172,7 +1184,7 @@ table_size=65536 [system.pc.south_bridge.ide.disks1.image.child] type=RawDiskImage eventq_index=0 -image_file=/scratch/nilay/GEM5/system/disks/linux-bigswap2.img +image_file=/dist/disks/linux-bigswap2.img read_only=true [system.pc.south_bridge.int_lines0] diff --git a/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-atomic/simerr b/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-atomic/simerr index 347fa32d8..bb1874a4f 100755 --- a/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-atomic/simerr +++ b/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-atomic/simerr @@ -1,4 +1,3 @@ -warn: add_child('terminal'): child 'terminal' already has parent warn: Sockets disabled, not accepting terminal connections warn: Reading current count from inactive timer. warn: Sockets disabled, not accepting gdb connections @@ -7,4 +6,3 @@ warn: x86 cpuid: unknown family 0x8086 warn: Tried to clear PCI interrupt 14 warn: Unknown mouse command 0xe1. warn: instruction 'wbinvd' unimplemented -hack: be nice to actually delete the event here diff --git a/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-atomic/simout b/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-atomic/simout index bbf756dc6..04f5d2889 100755 --- a/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-atomic/simout +++ b/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-atomic/simout @@ -1,12 +1,12 @@ gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Oct 16 2013 01:35:57 -gem5 started Oct 16 2013 01:42:07 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:10:34 +gem5 started Jan 22 2014 17:30:13 +gem5 executing on u200540-lin command line: build/X86/gem5.opt -d build/X86/tests/opt/quick/fs/10.linux-boot/x86/linux/pc-simple-atomic -re tests/run.py build/X86/tests/opt/quick/fs/10.linux-boot/x86/linux/pc-simple-atomic Global frequency set at 1000000000000 ticks per second -info: kernel located at: /dist/m5/system/binaries/x86_64-vmlinux-2.6.22.9 +info: kernel located at: /dist/binaries/x86_64-vmlinux-2.6.22.9 0: rtc: Real-time clock set to Sun Jan 1 00:00:00 2012 info: Entering event queue @ 0. Starting simulation... -Exiting @ tick 5112126311000 because m5_exit instruction encountered +Exiting @ tick 5112126264500 because m5_exit instruction encountered diff --git a/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-atomic/stats.txt b/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-atomic/stats.txt index 8eed6a1f4..168ad24c1 100644 --- a/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-atomic/stats.txt +++ b/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-atomic/stats.txt @@ -4,13 +4,15 @@ sim_seconds 5.112126 # Nu sim_ticks 5112126264500 # Number of ticks simulated final_tick 5112126264500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 1049292 # Simulator instruction rate (inst/s) -host_op_rate 2148359 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 26829969216 # Simulator tick rate (ticks/s) -host_mem_usage 634884 # Number of bytes of host memory used -host_seconds 190.54 # Real time elapsed on the host +host_inst_rate 1777208 # Simulator instruction rate (inst/s) +host_op_rate 3638722 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 45442487875 # Simulator tick rate (ticks/s) +host_mem_usage 590176 # Number of bytes of host memory used +host_seconds 112.50 # Real time elapsed on the host sim_insts 199929810 # Number of instructions simulated sim_ops 409343850 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::pc.south_bridge.ide 2421184 # Number of bytes read from this memory system.physmem.bytes_read::cpu.dtb.walker 64 # Number of bytes read from this memory system.physmem.bytes_read::cpu.itb.walker 320 # Number of bytes read from this memory @@ -58,6 +60,11 @@ system.iocache.tags.warmup_cycle 4994846763009 # C system.iocache.tags.occ_blocks::pc.south_bridge.ide 0.042448 # Average occupied blocks per requestor system.iocache.tags.occ_percent::pc.south_bridge.ide 0.002653 # Average percentage of cache occupancy system.iocache.tags.occ_percent::total 0.002653 # Average percentage of cache occupancy +system.iocache.tags.occ_task_id_blocks::1023 16 # Occupied blocks per task id +system.iocache.tags.age_task_id_blocks_1023::2 16 # Occupied blocks per task id +system.iocache.tags.occ_task_id_percent::1023 1 # Percentage of cache occupancy per task id +system.iocache.tags.tag_accesses 428616 # Number of tag accesses +system.iocache.tags.data_accesses 428616 # Number of data accesses system.iocache.ReadReq_misses::pc.south_bridge.ide 904 # number of ReadReq misses system.iocache.ReadReq_misses::total 904 # number of ReadReq misses system.iocache.WriteReq_misses::pc.south_bridge.ide 46720 # number of WriteReq misses @@ -107,6 +114,8 @@ system.pc.south_bridge.ide.disks1.dma_write_bytes 4096 system.pc.south_bridge.ide.disks1.dma_write_txs 1 # Number of DMA write transactions. system.iobus.throughput 2555207 # Throughput (bytes/s) system.iobus.data_through_bus 13062542 # Total data (bytes) +system.cpu_clk_domain.clock 500 # Clock period in ticks +system.cpu.apic_clk_domain.clock 8000 # Clock period in ticks system.cpu.numCycles 10224253904 # number of cpu cycles simulated system.cpu.numWorkItemsStarted 0 # number of work items this cpu started system.cpu.numWorkItemsCompleted 0 # number of work items this cpu completed @@ -142,6 +151,13 @@ system.cpu.icache.tags.warmup_cycle 148848615500 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 510.665021 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.997393 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.997393 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 87 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 134 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::2 291 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 245107932 # Number of tag accesses +system.cpu.icache.tags.data_accesses 245107932 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 243525778 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 243525778 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 243525778 # number of demand (read+write) hits @@ -184,6 +200,13 @@ system.cpu.itb_walker_cache.tags.warmup_cycle 5102116468000 system.cpu.itb_walker_cache.tags.occ_blocks::cpu.itb.walker 3.026303 # Average occupied blocks per requestor system.cpu.itb_walker_cache.tags.occ_percent::cpu.itb.walker 0.189144 # Average percentage of cache occupancy system.cpu.itb_walker_cache.tags.occ_percent::total 0.189144 # Average percentage of cache occupancy +system.cpu.itb_walker_cache.tags.occ_task_id_blocks::1024 12 # Occupied blocks per task id +system.cpu.itb_walker_cache.tags.age_task_id_blocks_1024::0 5 # Occupied blocks per task id +system.cpu.itb_walker_cache.tags.age_task_id_blocks_1024::1 2 # Occupied blocks per task id +system.cpu.itb_walker_cache.tags.age_task_id_blocks_1024::2 5 # Occupied blocks per task id +system.cpu.itb_walker_cache.tags.occ_task_id_percent::1024 0.750000 # Percentage of cache occupancy per task id +system.cpu.itb_walker_cache.tags.tag_accesses 28774 # Number of tag accesses +system.cpu.itb_walker_cache.tags.data_accesses 28774 # Number of data accesses system.cpu.itb_walker_cache.ReadReq_hits::cpu.itb.walker 7887 # number of ReadReq hits system.cpu.itb_walker_cache.ReadReq_hits::total 7887 # number of ReadReq hits system.cpu.itb_walker_cache.WriteReq_hits::cpu.itb.walker 2 # number of WriteReq hits @@ -232,6 +255,13 @@ system.cpu.dtb_walker_cache.tags.warmup_cycle 5100462243000 system.cpu.dtb_walker_cache.tags.occ_blocks::cpu.dtb.walker 5.014181 # Average occupied blocks per requestor system.cpu.dtb_walker_cache.tags.occ_percent::cpu.dtb.walker 0.313386 # Average percentage of cache occupancy system.cpu.dtb_walker_cache.tags.occ_percent::total 0.313386 # Average percentage of cache occupancy +system.cpu.dtb_walker_cache.tags.occ_task_id_blocks::1024 12 # Occupied blocks per task id +system.cpu.dtb_walker_cache.tags.age_task_id_blocks_1024::0 5 # Occupied blocks per task id +system.cpu.dtb_walker_cache.tags.age_task_id_blocks_1024::1 2 # Occupied blocks per task id +system.cpu.dtb_walker_cache.tags.age_task_id_blocks_1024::2 5 # Occupied blocks per task id +system.cpu.dtb_walker_cache.tags.occ_task_id_percent::1024 0.750000 # Percentage of cache occupancy per task id +system.cpu.dtb_walker_cache.tags.tag_accesses 52398 # Number of tag accesses +system.cpu.dtb_walker_cache.tags.data_accesses 52398 # Number of data accesses system.cpu.dtb_walker_cache.ReadReq_hits::cpu.dtb.walker 12963 # number of ReadReq hits system.cpu.dtb_walker_cache.ReadReq_hits::total 12963 # number of ReadReq hits system.cpu.dtb_walker_cache.demand_hits::cpu.dtb.walker 12963 # number of demand (read+write) hits @@ -276,6 +306,13 @@ system.cpu.dcache.tags.warmup_cycle 7549500 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 511.999424 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.999999 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.999999 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 226 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 259 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::2 27 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 88813841 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 88813841 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 12077531 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 12077531 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 8095378 # number of WriteReq hits @@ -339,6 +376,15 @@ system.cpu.l2cache.tags.occ_percent::cpu.itb.walker 0.000002 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.038003 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.159035 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.989106 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 64128 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 46 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 282 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::2 3455 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::3 20892 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::4 39453 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.978516 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 32198887 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 32198887 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.dtb.walker 6504 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::cpu.itb.walker 2802 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::cpu.inst 777739 # number of ReadReq hits diff --git a/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-timing/config.ini b/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-timing/config.ini index 4c888767a..d9f47005d 100644 --- a/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-timing/config.ini +++ b/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=true +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -14,10 +16,11 @@ boot_osflags=earlyprintk=ttyS0 console=ttyS0 lpj=7999923 root=/dev/hda1 cache_line_size=64 clk_domain=system.clk_domain e820_table=system.e820_table +eventq_index=0 init_param=0 intel_mp_pointer=system.intel_mp_pointer intel_mp_table=system.intel_mp_table -kernel=/dist/m5/system/binaries/x86_64-vmlinux-2.6.22.9 +kernel=/dist/binaries/x86_64-vmlinux-2.6.22.9 load_addr_mask=18446744073709551615 mem_mode=timing mem_ranges=0:134217727 @@ -38,6 +41,7 @@ system_port=system.membus.slave[1] [system.acpi_description_table_pointer] type=X86ACPIRSDP children=xsdt +eventq_index=0 oem_id= revision=2 rsdt=Null @@ -48,6 +52,7 @@ type=X86ACPIXSDT creator_id= creator_revision=0 entries= +eventq_index=0 oem_id= oem_revision=0 oem_table_id= @@ -56,6 +61,7 @@ oem_table_id= type=Bridge clk_domain=system.clk_domain delay=50000 +eventq_index=0 ranges=11529215046068469760:11529215046068473855 req_size=16 resp_size=16 @@ -66,6 +72,7 @@ slave=system.iobus.master[0] type=Bridge clk_domain=system.clk_domain delay=50000 +eventq_index=0 ranges=4273995776:4273999871 9223372036854775808:11529215046068469759 13835058055282163712:18446744073709551615 req_size=16 resp_size=16 @@ -75,6 +82,7 @@ slave=system.membus.master[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -87,6 +95,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -111,6 +120,7 @@ icache_port=system.cpu.icache.cpu_side type=DerivedClockDomain clk_divider=16 clk_domain=system.cpu_clk_domain +eventq_index=0 [system.cpu.dcache] type=BaseCache @@ -118,6 +128,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=4 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -126,6 +137,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu.dcache.tags @@ -140,18 +152,22 @@ type=LRU assoc=4 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu.dtb] type=X86TLB children=walker +eventq_index=0 size=64 walker=system.cpu.dtb.walker [system.cpu.dtb.walker] type=X86PagetableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=4 system=system port=system.cpu.dtb_walker_cache.cpu_side @@ -162,6 +178,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -170,6 +187,7 @@ mshrs=10 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=1024 system=system tags=system.cpu.dtb_walker_cache.tags @@ -184,7 +202,9 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=1024 [system.cpu.icache] @@ -193,6 +213,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=1 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -201,6 +222,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu.icache.tags @@ -215,12 +237,15 @@ type=LRU assoc=1 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu.interrupts] type=X86LocalApic clk_domain=system.cpu.apic_clk_domain +eventq_index=0 int_latency=1000 pio_addr=2305843009213693952 pio_latency=100000 @@ -231,16 +256,19 @@ pio=system.membus.master[1] [system.cpu.isa] type=X86ISA +eventq_index=0 [system.cpu.itb] type=X86TLB children=walker +eventq_index=0 size=64 walker=system.cpu.itb.walker [system.cpu.itb.walker] type=X86PagetableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=4 system=system port=system.cpu.itb_walker_cache.cpu_side @@ -251,6 +279,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -259,6 +288,7 @@ mshrs=10 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=1024 system=system tags=system.cpu.itb_walker_cache.tags @@ -273,7 +303,9 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=1024 [system.cpu.l2cache] @@ -282,6 +314,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -290,6 +323,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=4194304 system=system tags=system.cpu.l2cache.tags @@ -304,12 +338,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=4194304 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -319,44 +356,52 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side system.cpu.itb_walke [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.e820_table] type=X86E820Table children=entries0 entries1 entries2 entries3 entries=system.e820_table.entries0 system.e820_table.entries1 system.e820_table.entries2 system.e820_table.entries3 +eventq_index=0 [system.e820_table.entries0] type=X86E820Entry addr=0 +eventq_index=0 range_type=1 size=654336 [system.e820_table.entries1] type=X86E820Entry addr=654336 +eventq_index=0 range_type=2 size=394240 [system.e820_table.entries2] type=X86E820Entry addr=1048576 +eventq_index=0 range_type=1 size=133169152 [system.e820_table.entries3] type=X86E820Entry addr=4294901760 +eventq_index=0 range_type=2 size=65536 [system.intel_mp_pointer] type=X86IntelMPFloatingPointer default_config=0 +eventq_index=0 imcr_present=true spec_rev=4 @@ -364,6 +409,7 @@ spec_rev=4 type=X86IntelMPConfigTable children=base_entries00 base_entries01 base_entries02 base_entries03 base_entries04 base_entries05 base_entries06 base_entries07 base_entries08 base_entries09 base_entries10 base_entries11 base_entries12 base_entries13 base_entries14 base_entries15 base_entries16 base_entries17 base_entries18 base_entries19 base_entries20 base_entries21 base_entries22 base_entries23 base_entries24 base_entries25 base_entries26 base_entries27 base_entries28 base_entries29 base_entries30 base_entries31 base_entries32 ext_entries base_entries=system.intel_mp_table.base_entries00 system.intel_mp_table.base_entries01 system.intel_mp_table.base_entries02 system.intel_mp_table.base_entries03 system.intel_mp_table.base_entries04 system.intel_mp_table.base_entries05 system.intel_mp_table.base_entries06 system.intel_mp_table.base_entries07 system.intel_mp_table.base_entries08 system.intel_mp_table.base_entries09 system.intel_mp_table.base_entries10 system.intel_mp_table.base_entries11 system.intel_mp_table.base_entries12 system.intel_mp_table.base_entries13 system.intel_mp_table.base_entries14 system.intel_mp_table.base_entries15 system.intel_mp_table.base_entries16 system.intel_mp_table.base_entries17 system.intel_mp_table.base_entries18 system.intel_mp_table.base_entries19 system.intel_mp_table.base_entries20 system.intel_mp_table.base_entries21 system.intel_mp_table.base_entries22 system.intel_mp_table.base_entries23 system.intel_mp_table.base_entries24 system.intel_mp_table.base_entries25 system.intel_mp_table.base_entries26 system.intel_mp_table.base_entries27 system.intel_mp_table.base_entries28 system.intel_mp_table.base_entries29 system.intel_mp_table.base_entries30 system.intel_mp_table.base_entries31 system.intel_mp_table.base_entries32 +eventq_index=0 ext_entries=system.intel_mp_table.ext_entries local_apic=4276092928 oem_id= @@ -376,6 +422,7 @@ spec_rev=4 type=X86IntelMPProcessor bootstrap=true enable=true +eventq_index=0 family=0 feature_flags=0 local_apic_id=0 @@ -387,6 +434,7 @@ stepping=0 type=X86IntelMPIOAPIC address=4273995776 enable=true +eventq_index=0 id=1 version=17 @@ -394,16 +442,19 @@ version=17 type=X86IntelMPBus bus_id=0 bus_type=ISA +eventq_index=0 [system.intel_mp_table.base_entries03] type=X86IntelMPBus bus_id=1 bus_type=PCI +eventq_index=0 [system.intel_mp_table.base_entries04] type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=16 +eventq_index=0 interrupt_type=INT polarity=ConformPolarity source_bus_id=1 @@ -414,6 +465,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=0 +eventq_index=0 interrupt_type=ExtInt polarity=ConformPolarity source_bus_id=0 @@ -424,6 +476,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=2 +eventq_index=0 interrupt_type=INT polarity=ConformPolarity source_bus_id=0 @@ -434,6 +487,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=0 +eventq_index=0 interrupt_type=ExtInt polarity=ConformPolarity source_bus_id=0 @@ -444,6 +498,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=1 +eventq_index=0 interrupt_type=INT polarity=ConformPolarity source_bus_id=0 @@ -454,6 +509,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=0 +eventq_index=0 interrupt_type=ExtInt polarity=ConformPolarity source_bus_id=0 @@ -464,6 +520,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=3 +eventq_index=0 interrupt_type=INT polarity=ConformPolarity source_bus_id=0 @@ -474,6 +531,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=0 +eventq_index=0 interrupt_type=ExtInt polarity=ConformPolarity source_bus_id=0 @@ -484,6 +542,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=4 +eventq_index=0 interrupt_type=INT polarity=ConformPolarity source_bus_id=0 @@ -494,6 +553,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=0 +eventq_index=0 interrupt_type=ExtInt polarity=ConformPolarity source_bus_id=0 @@ -504,6 +564,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=5 +eventq_index=0 interrupt_type=INT polarity=ConformPolarity source_bus_id=0 @@ -514,6 +575,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=0 +eventq_index=0 interrupt_type=ExtInt polarity=ConformPolarity source_bus_id=0 @@ -524,6 +586,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=6 +eventq_index=0 interrupt_type=INT polarity=ConformPolarity source_bus_id=0 @@ -534,6 +597,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=0 +eventq_index=0 interrupt_type=ExtInt polarity=ConformPolarity source_bus_id=0 @@ -544,6 +608,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=7 +eventq_index=0 interrupt_type=INT polarity=ConformPolarity source_bus_id=0 @@ -554,6 +619,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=0 +eventq_index=0 interrupt_type=ExtInt polarity=ConformPolarity source_bus_id=0 @@ -564,6 +630,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=8 +eventq_index=0 interrupt_type=INT polarity=ConformPolarity source_bus_id=0 @@ -574,6 +641,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=0 +eventq_index=0 interrupt_type=ExtInt polarity=ConformPolarity source_bus_id=0 @@ -584,6 +652,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=9 +eventq_index=0 interrupt_type=INT polarity=ConformPolarity source_bus_id=0 @@ -594,6 +663,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=0 +eventq_index=0 interrupt_type=ExtInt polarity=ConformPolarity source_bus_id=0 @@ -604,6 +674,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=10 +eventq_index=0 interrupt_type=INT polarity=ConformPolarity source_bus_id=0 @@ -614,6 +685,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=0 +eventq_index=0 interrupt_type=ExtInt polarity=ConformPolarity source_bus_id=0 @@ -624,6 +696,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=11 +eventq_index=0 interrupt_type=INT polarity=ConformPolarity source_bus_id=0 @@ -634,6 +707,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=0 +eventq_index=0 interrupt_type=ExtInt polarity=ConformPolarity source_bus_id=0 @@ -644,6 +718,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=12 +eventq_index=0 interrupt_type=INT polarity=ConformPolarity source_bus_id=0 @@ -654,6 +729,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=0 +eventq_index=0 interrupt_type=ExtInt polarity=ConformPolarity source_bus_id=0 @@ -664,6 +740,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=13 +eventq_index=0 interrupt_type=INT polarity=ConformPolarity source_bus_id=0 @@ -674,6 +751,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=0 +eventq_index=0 interrupt_type=ExtInt polarity=ConformPolarity source_bus_id=0 @@ -684,6 +762,7 @@ trigger=ConformTrigger type=X86IntelMPIOIntAssignment dest_io_apic_id=1 dest_io_apic_intin=14 +eventq_index=0 interrupt_type=INT polarity=ConformPolarity source_bus_id=0 @@ -693,16 +772,19 @@ trigger=ConformTrigger [system.intel_mp_table.ext_entries] type=X86IntelMPBusHierarchy bus_id=0 +eventq_index=0 parent_bus=1 subtractive_decode=true [system.intrctrl] type=IntrControl +eventq_index=0 sys=system [system.iobus] type=NoncoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 use_default_range=true width=8 @@ -716,6 +798,7 @@ children=tags addr_ranges=0:134217727 assoc=8 clk_domain=system.clk_domain +eventq_index=0 forward_snoops=false hit_latency=50 is_top_level=true @@ -724,6 +807,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=50 +sequential_access=false size=1024 system=system tags=system.iocache.tags @@ -738,13 +822,16 @@ type=LRU assoc=8 block_size=64 clk_domain=system.clk_domain +eventq_index=0 hit_latency=50 +sequential_access=false size=1024 [system.membus] type=CoherentBus children=badaddr_responder clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -756,6 +843,7 @@ slave=system.apicbridge.master system.system_port system.cpu.l2cache.mem_side sy [system.membus.badaddr_responder] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=0 pio_latency=100000 @@ -772,13 +860,15 @@ pio=system.membus.default [system.pc] type=Pc -children=behind_pci com_1 fake_com_2 fake_com_3 fake_com_4 fake_floppy i_dont_exist pciconfig south_bridge terminal +children=behind_pci com_1 fake_com_2 fake_com_3 fake_com_4 fake_floppy i_dont_exist pciconfig south_bridge +eventq_index=0 intrctrl=system.intrctrl system=system [system.pc.behind_pci] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=9223372036854779128 pio_latency=100000 @@ -797,6 +887,7 @@ pio=system.iobus.master[12] type=Uart8250 children=terminal clk_domain=system.clk_domain +eventq_index=0 pio_addr=9223372036854776824 pio_latency=100000 platform=system.pc @@ -806,13 +897,7 @@ pio=system.iobus.master[13] [system.pc.com_1.terminal] type=Terminal -intr_control=system.intrctrl -number=0 -output=true -port=3456 - -[system.pc.com_1.terminal] -type=Terminal +eventq_index=0 intr_control=system.intrctrl number=0 output=true @@ -821,6 +906,7 @@ port=3456 [system.pc.fake_com_2] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=9223372036854776568 pio_latency=100000 @@ -838,6 +924,7 @@ pio=system.iobus.master[14] [system.pc.fake_com_3] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=9223372036854776808 pio_latency=100000 @@ -855,6 +942,7 @@ pio=system.iobus.master[15] [system.pc.fake_com_4] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=9223372036854776552 pio_latency=100000 @@ -872,6 +960,7 @@ pio=system.iobus.master[16] [system.pc.fake_floppy] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=9223372036854776818 pio_latency=100000 @@ -889,6 +978,7 @@ pio=system.iobus.master[17] [system.pc.i_dont_exist] type=IsaFake clk_domain=system.clk_domain +eventq_index=0 fake_mem=false pio_addr=9223372036854775936 pio_latency=100000 @@ -907,6 +997,7 @@ pio=system.iobus.master[11] type=PciConfigAll bus=0 clk_domain=system.clk_domain +eventq_index=0 pio_addr=0 pio_latency=30000 platform=system.pc @@ -919,6 +1010,7 @@ type=SouthBridge children=cmos dma1 ide int_lines0 int_lines1 int_lines2 int_lines3 int_lines4 int_lines5 int_lines6 io_apic keyboard pic1 pic2 pit speaker cmos=system.pc.south_bridge.cmos dma1=system.pc.south_bridge.dma1 +eventq_index=0 io_apic=system.pc.south_bridge.io_apic keyboard=system.pc.south_bridge.keyboard pic1=system.pc.south_bridge.pic1 @@ -931,6 +1023,7 @@ speaker=system.pc.south_bridge.speaker type=Cmos children=int_pin clk_domain=system.clk_domain +eventq_index=0 int_pin=system.pc.south_bridge.cmos.int_pin pio_addr=9223372036854775920 pio_latency=100000 @@ -940,10 +1033,12 @@ pio=system.iobus.master[1] [system.pc.south_bridge.cmos.int_pin] type=X86IntSourcePin +eventq_index=0 [system.pc.south_bridge.dma1] type=I8237 clk_domain=system.clk_domain +eventq_index=0 pio_addr=9223372036854775808 pio_latency=100000 system=system @@ -972,6 +1067,7 @@ BAR5LegacyIO=false BAR5Size=0 BIST=0 CacheLineSize=0 +CapabilityPtr=0 CardbusCIS=0 ClassCode=1 Command=0 @@ -981,8 +1077,40 @@ HeaderType=0 InterruptLine=14 InterruptPin=1 LatencyTimer=0 +MSICAPBaseOffset=0 +MSICAPCapId=0 +MSICAPMaskBits=0 +MSICAPMsgAddr=0 +MSICAPMsgCtrl=0 +MSICAPMsgData=0 +MSICAPMsgUpperAddr=0 +MSICAPNextCapability=0 +MSICAPPendingBits=0 +MSIXCAPBaseOffset=0 +MSIXCAPCapId=0 +MSIXCAPNextCapability=0 +MSIXMsgCtrl=0 +MSIXPbaOffset=0 +MSIXTableOffset=0 MaximumLatency=0 MinimumGrant=0 +PMCAPBaseOffset=0 +PMCAPCapId=0 +PMCAPCapabilities=0 +PMCAPCtrlStatus=0 +PMCAPNextCapability=0 +PXCAPBaseOffset=0 +PXCAPCapId=0 +PXCAPCapabilities=0 +PXCAPDevCap2=0 +PXCAPDevCapabilities=0 +PXCAPDevCtrl=0 +PXCAPDevCtrl2=0 +PXCAPDevStatus=0 +PXCAPLinkCap=0 +PXCAPLinkCtrl=0 +PXCAPLinkStatus=0 +PXCAPNextCapability=0 ProgIF=128 Revision=0 Status=640 @@ -994,6 +1122,7 @@ clk_domain=system.clk_domain config_latency=20000 ctrl_offset=0 disks=system.pc.south_bridge.ide.disks0 system.pc.south_bridge.ide.disks1 +eventq_index=0 io_shift=0 pci_bus=0 pci_dev=4 @@ -1010,19 +1139,22 @@ type=IdeDisk children=image delay=1000000 driveID=master +eventq_index=0 image=system.pc.south_bridge.ide.disks0.image [system.pc.south_bridge.ide.disks0.image] type=CowDiskImage children=child child=system.pc.south_bridge.ide.disks0.image.child +eventq_index=0 image_file= read_only=false table_size=65536 [system.pc.south_bridge.ide.disks0.image.child] type=RawDiskImage -image_file=/dist/m5/system/disks/linux-x86.img +eventq_index=0 +image_file=/dist/disks/linux-x86.img read_only=true [system.pc.south_bridge.ide.disks1] @@ -1030,102 +1162,120 @@ type=IdeDisk children=image delay=1000000 driveID=master +eventq_index=0 image=system.pc.south_bridge.ide.disks1.image [system.pc.south_bridge.ide.disks1.image] type=CowDiskImage children=child child=system.pc.south_bridge.ide.disks1.image.child +eventq_index=0 image_file= read_only=false table_size=65536 [system.pc.south_bridge.ide.disks1.image.child] type=RawDiskImage -image_file=/dist/m5/system/disks/linux-bigswap2.img +eventq_index=0 +image_file=/dist/disks/linux-bigswap2.img read_only=true [system.pc.south_bridge.int_lines0] type=X86IntLine children=sink +eventq_index=0 sink=system.pc.south_bridge.int_lines0.sink source=system.pc.south_bridge.pic1.output [system.pc.south_bridge.int_lines0.sink] type=X86IntSinkPin device=system.pc.south_bridge.io_apic +eventq_index=0 number=0 [system.pc.south_bridge.int_lines1] type=X86IntLine children=sink +eventq_index=0 sink=system.pc.south_bridge.int_lines1.sink source=system.pc.south_bridge.pic2.output [system.pc.south_bridge.int_lines1.sink] type=X86IntSinkPin device=system.pc.south_bridge.pic1 +eventq_index=0 number=2 [system.pc.south_bridge.int_lines2] type=X86IntLine children=sink +eventq_index=0 sink=system.pc.south_bridge.int_lines2.sink source=system.pc.south_bridge.cmos.int_pin [system.pc.south_bridge.int_lines2.sink] type=X86IntSinkPin device=system.pc.south_bridge.pic2 +eventq_index=0 number=0 [system.pc.south_bridge.int_lines3] type=X86IntLine children=sink +eventq_index=0 sink=system.pc.south_bridge.int_lines3.sink source=system.pc.south_bridge.pit.int_pin [system.pc.south_bridge.int_lines3.sink] type=X86IntSinkPin device=system.pc.south_bridge.pic1 +eventq_index=0 number=0 [system.pc.south_bridge.int_lines4] type=X86IntLine children=sink +eventq_index=0 sink=system.pc.south_bridge.int_lines4.sink source=system.pc.south_bridge.pit.int_pin [system.pc.south_bridge.int_lines4.sink] type=X86IntSinkPin device=system.pc.south_bridge.io_apic +eventq_index=0 number=2 [system.pc.south_bridge.int_lines5] type=X86IntLine children=sink +eventq_index=0 sink=system.pc.south_bridge.int_lines5.sink source=system.pc.south_bridge.keyboard.keyboard_int_pin [system.pc.south_bridge.int_lines5.sink] type=X86IntSinkPin device=system.pc.south_bridge.io_apic +eventq_index=0 number=1 [system.pc.south_bridge.int_lines6] type=X86IntLine children=sink +eventq_index=0 sink=system.pc.south_bridge.int_lines6.sink source=system.pc.south_bridge.keyboard.mouse_int_pin [system.pc.south_bridge.int_lines6.sink] type=X86IntSinkPin device=system.pc.south_bridge.io_apic +eventq_index=0 number=12 [system.pc.south_bridge.io_apic] type=I82094AA apic_id=1 clk_domain=system.clk_domain +eventq_index=0 external_int_pic=system.pc.south_bridge.pic1 int_latency=1000 pio_addr=4273995776 @@ -1140,6 +1290,7 @@ children=keyboard_int_pin mouse_int_pin clk_domain=system.clk_domain command_port=9223372036854775908 data_port=9223372036854775904 +eventq_index=0 keyboard_int_pin=system.pc.south_bridge.keyboard.keyboard_int_pin mouse_int_pin=system.pc.south_bridge.keyboard.mouse_int_pin pio_addr=0 @@ -1149,14 +1300,17 @@ pio=system.iobus.master[5] [system.pc.south_bridge.keyboard.keyboard_int_pin] type=X86IntSourcePin +eventq_index=0 [system.pc.south_bridge.keyboard.mouse_int_pin] type=X86IntSourcePin +eventq_index=0 [system.pc.south_bridge.pic1] type=I8259 children=output clk_domain=system.clk_domain +eventq_index=0 mode=I8259Master output=system.pc.south_bridge.pic1.output pio_addr=9223372036854775840 @@ -1167,11 +1321,13 @@ pio=system.iobus.master[6] [system.pc.south_bridge.pic1.output] type=X86IntSourcePin +eventq_index=0 [system.pc.south_bridge.pic2] type=I8259 children=output clk_domain=system.clk_domain +eventq_index=0 mode=I8259Slave output=system.pc.south_bridge.pic2.output pio_addr=9223372036854775968 @@ -1182,11 +1338,13 @@ pio=system.iobus.master[7] [system.pc.south_bridge.pic2.output] type=X86IntSourcePin +eventq_index=0 [system.pc.south_bridge.pit] type=I8254 children=int_pin clk_domain=system.clk_domain +eventq_index=0 int_pin=system.pc.south_bridge.pit.int_pin pio_addr=9223372036854775872 pio_latency=100000 @@ -1195,10 +1353,12 @@ pio=system.iobus.master[8] [system.pc.south_bridge.pit.int_pin] type=X86IntSourcePin +eventq_index=0 [system.pc.south_bridge.speaker] type=PcSpeaker clk_domain=system.clk_domain +eventq_index=0 i8254=system.pc.south_bridge.pit pio_addr=9223372036854775905 pio_latency=100000 @@ -1217,6 +1377,7 @@ conf_table_reported=true device_bus_width=8 device_rowbuffer_size=1024 devices_per_rank=8 +eventq_index=0 in_addr_map=true mem_sched_policy=frfcfs null=false @@ -1228,19 +1389,23 @@ static_backend_latency=10000 static_frontend_latency=10000 tBURST=5000 tCL=13750 +tRAS=35000 tRCD=13750 tREFI=7800000 tRFC=300000 tRP=13750 +tRRD=6250 tWTR=7500 tXAW=40000 write_buffer_size=32 -write_thresh_perc=70 +write_high_thresh_perc=70 +write_low_thresh_perc=0 port=system.membus.master[3] [system.smbios_table] type=X86SMBiosSMBiosTable children=structures +eventq_index=0 major_version=2 minor_version=5 structures=system.smbios_table.structures @@ -1251,6 +1416,7 @@ characteristic_ext_bytes= characteristics= emb_cont_firmware_major=0 emb_cont_firmware_minor=0 +eventq_index=0 major=0 minor=0 release_date=06/08/2008 @@ -1261,5 +1427,6 @@ version= [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-timing/simerr b/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-timing/simerr index 347fa32d8..bb1874a4f 100755 --- a/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-timing/simerr +++ b/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-timing/simerr @@ -1,4 +1,3 @@ -warn: add_child('terminal'): child 'terminal' already has parent warn: Sockets disabled, not accepting terminal connections warn: Reading current count from inactive timer. warn: Sockets disabled, not accepting gdb connections @@ -7,4 +6,3 @@ warn: x86 cpuid: unknown family 0x8086 warn: Tried to clear PCI interrupt 14 warn: Unknown mouse command 0xe1. warn: instruction 'wbinvd' unimplemented -hack: be nice to actually delete the event here diff --git a/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-timing/simout b/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-timing/simout index 3c12ad36c..f1feb2eac 100755 --- a/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-timing/simout +++ b/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-timing/simout @@ -1,12 +1,12 @@ gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Oct 16 2013 01:35:57 -gem5 started Oct 16 2013 01:55:08 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:10:34 +gem5 started Jan 22 2014 17:30:19 +gem5 executing on u200540-lin command line: build/X86/gem5.opt -d build/X86/tests/opt/quick/fs/10.linux-boot/x86/linux/pc-simple-timing -re tests/run.py build/X86/tests/opt/quick/fs/10.linux-boot/x86/linux/pc-simple-timing Global frequency set at 1000000000000 ticks per second -info: kernel located at: /dist/m5/system/binaries/x86_64-vmlinux-2.6.22.9 +info: kernel located at: /dist/binaries/x86_64-vmlinux-2.6.22.9 0: rtc: Real-time clock set to Sun Jan 1 00:00:00 2012 info: Entering event queue @ 0. Starting simulation... -Exiting @ tick 5192277855000 because m5_exit instruction encountered +Exiting @ tick 5196390180000 because m5_exit instruction encountered diff --git a/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-timing/stats.txt b/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-timing/stats.txt index 79d47dc5b..b371db56a 100644 --- a/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-timing/stats.txt +++ b/tests/quick/fs/10.linux-boot/ref/x86/linux/pc-simple-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 5.196390 # Nu sim_ticks 5196390180000 # Number of ticks simulated final_tick 5196390180000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 893068 # Simulator instruction rate (inst/s) -host_op_rate 1721530 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 36161085452 # Simulator tick rate (ticks/s) -host_mem_usage 586592 # Number of bytes of host memory used -host_seconds 143.70 # Real time elapsed on the host +host_inst_rate 991078 # Simulator instruction rate (inst/s) +host_op_rate 1910460 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 40129605273 # Simulator tick rate (ticks/s) +host_mem_usage 591204 # Number of bytes of host memory used +host_seconds 129.49 # Real time elapsed on the host sim_insts 128334813 # Number of instructions simulated sim_ops 247385808 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::pc.south_bridge.ide 2883712 # Number of bytes read from this memory system.physmem.bytes_read::cpu.dtb.walker 128 # Number of bytes read from this memory system.physmem.bytes_read::cpu.itb.walker 320 # Number of bytes read from this memory @@ -419,6 +421,11 @@ system.iocache.tags.warmup_cycle 5049776837000 # C system.iocache.tags.occ_blocks::pc.south_bridge.ide 0.113099 # Average occupied blocks per requestor system.iocache.tags.occ_percent::pc.south_bridge.ide 0.007069 # Average percentage of cache occupancy system.iocache.tags.occ_percent::total 0.007069 # Average percentage of cache occupancy +system.iocache.tags.occ_task_id_blocks::1023 16 # Occupied blocks per task id +system.iocache.tags.age_task_id_blocks_1023::2 16 # Occupied blocks per task id +system.iocache.tags.occ_task_id_percent::1023 1 # Percentage of cache occupancy per task id +system.iocache.tags.tag_accesses 428004 # Number of tag accesses +system.iocache.tags.data_accesses 428004 # Number of data accesses system.iocache.ReadReq_misses::pc.south_bridge.ide 836 # number of ReadReq misses system.iocache.ReadReq_misses::total 836 # number of ReadReq misses system.iocache.WriteReq_misses::pc.south_bridge.ide 46720 # number of WriteReq misses @@ -616,6 +623,8 @@ system.iobus.respLayer1.occupancy 52989250 # La system.iobus.respLayer1.utilization 0.0 # Layer utilization (%) system.iobus.respLayer2.occupancy 1655000 # Layer occupancy (ticks) system.iobus.respLayer2.utilization 0.0 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks +system.cpu.apic_clk_domain.clock 8000 # Clock period in ticks system.cpu.numCycles 10392780360 # number of cpu cycles simulated system.cpu.numWorkItemsStarted 0 # number of work items this cpu started system.cpu.numWorkItemsCompleted 0 # number of work items this cpu completed @@ -651,6 +660,14 @@ system.cpu.icache.tags.warmup_cycle 161436066250 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 510.351939 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.996781 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.996781 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 45 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 161 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::2 299 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::3 7 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 146161971 # Number of tag accesses +system.cpu.icache.tags.data_accesses 146161971 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 144584753 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 144584753 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 144584753 # number of demand (read+write) hits @@ -729,6 +746,14 @@ system.cpu.itb_walker_cache.tags.warmup_cycle 5169682535000 system.cpu.itb_walker_cache.tags.occ_blocks::cpu.itb.walker 3.069761 # Average occupied blocks per requestor system.cpu.itb_walker_cache.tags.occ_percent::cpu.itb.walker 0.191860 # Average percentage of cache occupancy system.cpu.itb_walker_cache.tags.occ_percent::total 0.191860 # Average percentage of cache occupancy +system.cpu.itb_walker_cache.tags.occ_task_id_blocks::1024 11 # Occupied blocks per task id +system.cpu.itb_walker_cache.tags.age_task_id_blocks_1024::0 3 # Occupied blocks per task id +system.cpu.itb_walker_cache.tags.age_task_id_blocks_1024::1 2 # Occupied blocks per task id +system.cpu.itb_walker_cache.tags.age_task_id_blocks_1024::2 4 # Occupied blocks per task id +system.cpu.itb_walker_cache.tags.age_task_id_blocks_1024::3 2 # Occupied blocks per task id +system.cpu.itb_walker_cache.tags.occ_task_id_percent::1024 0.687500 # Percentage of cache occupancy per task id +system.cpu.itb_walker_cache.tags.tag_accesses 29050 # Number of tag accesses +system.cpu.itb_walker_cache.tags.data_accesses 29050 # Number of data accesses system.cpu.itb_walker_cache.ReadReq_hits::cpu.itb.walker 7617 # number of ReadReq hits system.cpu.itb_walker_cache.ReadReq_hits::total 7617 # number of ReadReq hits system.cpu.itb_walker_cache.WriteReq_hits::cpu.itb.walker 2 # number of WriteReq hits @@ -813,6 +838,14 @@ system.cpu.dtb_walker_cache.tags.warmup_cycle 5168018375000 system.cpu.dtb_walker_cache.tags.occ_blocks::cpu.dtb.walker 5.052475 # Average occupied blocks per requestor system.cpu.dtb_walker_cache.tags.occ_percent::cpu.dtb.walker 0.315780 # Average percentage of cache occupancy system.cpu.dtb_walker_cache.tags.occ_percent::total 0.315780 # Average percentage of cache occupancy +system.cpu.dtb_walker_cache.tags.occ_task_id_blocks::1024 13 # Occupied blocks per task id +system.cpu.dtb_walker_cache.tags.age_task_id_blocks_1024::0 2 # Occupied blocks per task id +system.cpu.dtb_walker_cache.tags.age_task_id_blocks_1024::1 5 # Occupied blocks per task id +system.cpu.dtb_walker_cache.tags.age_task_id_blocks_1024::2 4 # Occupied blocks per task id +system.cpu.dtb_walker_cache.tags.age_task_id_blocks_1024::3 2 # Occupied blocks per task id +system.cpu.dtb_walker_cache.tags.occ_task_id_percent::1024 0.812500 # Percentage of cache occupancy per task id +system.cpu.dtb_walker_cache.tags.tag_accesses 53026 # Number of tag accesses +system.cpu.dtb_walker_cache.tags.data_accesses 53026 # Number of data accesses system.cpu.dtb_walker_cache.ReadReq_hits::cpu.dtb.walker 12806 # number of ReadReq hits system.cpu.dtb_walker_cache.ReadReq_hits::total 12806 # number of ReadReq hits system.cpu.dtb_walker_cache.demand_hits::cpu.dtb.walker 12806 # number of demand (read+write) hits @@ -893,6 +926,14 @@ system.cpu.dcache.tags.warmup_cycle 50992250 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 511.997026 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.999994 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.999994 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 512 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 100 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 333 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::2 78 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::3 1 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 1 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 88253354 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 88253354 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 11993197 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 11993197 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 8040328 # number of WriteReq hits @@ -1047,6 +1088,15 @@ system.cpu.l2cache.tags.occ_percent::cpu.itb.walker 0.000002 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.052573 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.170366 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.987720 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 64716 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 23 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 89 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::2 2864 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::3 4951 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::4 56789 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.987488 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 32180081 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 32180081 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.dtb.walker 6740 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::cpu.itb.walker 2903 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::cpu.inst 775712 # number of ReadReq hits diff --git a/tests/quick/fs/80.netperf-stream/ref/alpha/linux/twosys-tsunami-simple-atomic/config.ini b/tests/quick/fs/80.netperf-stream/ref/alpha/linux/twosys-tsunami-simple-atomic/config.ini index 0062dcbb2..71b238fb8 100644 --- a/tests/quick/fs/80.netperf-stream/ref/alpha/linux/twosys-tsunami-simple-atomic/config.ini +++ b/tests/quick/fs/80.netperf-stream/ref/alpha/linux/twosys-tsunami-simple-atomic/config.ini @@ -5,16 +5,17 @@ boot_cpu_frequency=250 boot_osflags=root=/dev/hda1 console=ttyS0 cache_line_size=64 clk_domain=drivesys.clk_domain -console=/dist/m5/system/binaries/console +console=/dist/binaries/console +eventq_index=0 init_param=0 -kernel=/dist/m5/system/binaries/vmlinux +kernel=/dist/binaries/vmlinux load_addr_mask=1099511627775 mem_mode=atomic mem_ranges=0:134217727 memories=drivesys.physmem num_work_ids=16 -pal=/dist/m5/system/binaries/ts_osfpal -readfile=/z/m5/regression/zizzer/gem5/configs/boot/netperf-server.rcS +pal=/dist/binaries/ts_osfpal +readfile=/work/gem5.ext/configs/boot/netperf-server.rcS symbolfile= system_rev=1024 system_type=34 @@ -31,6 +32,7 @@ system_port=drivesys.membus.slave[0] type=Bridge clk_domain=drivesys.clk_domain delay=50000 +eventq_index=0 ranges=8796093022208:18446744073709551615 req_size=16 resp_size=16 @@ -40,6 +42,7 @@ slave=drivesys.membus.master[0] [drivesys.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=drivesys.voltage_domain [drivesys.cpu] @@ -52,6 +55,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=drivesys.cpu.dtb +eventq_index=0 fastmem=false function_trace=false function_trace_start=0 @@ -82,43 +86,53 @@ icache_port=drivesys.membus.slave[1] [drivesys.cpu.clk_domain] type=SrcClockDomain clock=250 +eventq_index=0 voltage_domain=drivesys.voltage_domain [drivesys.cpu.dtb] type=AlphaTLB +eventq_index=0 size=64 [drivesys.cpu.interrupts] type=AlphaInterrupts +eventq_index=0 [drivesys.cpu.isa] type=AlphaISA +eventq_index=0 +system=drivesys [drivesys.cpu.itb] type=AlphaTLB +eventq_index=0 size=48 [drivesys.cpu.tracer] type=ExeTracer +eventq_index=0 [drivesys.disk0] type=IdeDisk children=image delay=1000000 driveID=master +eventq_index=0 image=drivesys.disk0.image [drivesys.disk0.image] type=CowDiskImage children=child child=drivesys.disk0.image.child +eventq_index=0 image_file= read_only=false table_size=65536 [drivesys.disk0.image.child] type=RawDiskImage -image_file=/dist/m5/system/disks/linux-latest.img +eventq_index=0 +image_file=/dist/disks/linux-latest.img read_only=true [drivesys.disk2] @@ -126,29 +140,34 @@ type=IdeDisk children=image delay=1000000 driveID=master +eventq_index=0 image=drivesys.disk2.image [drivesys.disk2.image] type=CowDiskImage children=child child=drivesys.disk2.image.child +eventq_index=0 image_file= read_only=false table_size=65536 [drivesys.disk2.image.child] type=RawDiskImage -image_file=/dist/m5/system/disks/linux-bigswap2.img +eventq_index=0 +image_file=/dist/disks/linux-bigswap2.img read_only=true [drivesys.intrctrl] type=IntrControl +eventq_index=0 sys=drivesys [drivesys.iobridge] type=Bridge clk_domain=drivesys.clk_domain delay=50000 +eventq_index=0 ranges=0:134217727 req_size=16 resp_size=16 @@ -158,6 +177,7 @@ slave=drivesys.iobus.master[29] [drivesys.iobus] type=NoncoherentBus clk_domain=drivesys.clk_domain +eventq_index=0 header_cycles=1 use_default_range=true width=8 @@ -169,6 +189,7 @@ slave=drivesys.bridge.master drivesys.tsunami.ide.dma drivesys.tsunami.ethernet. type=CoherentBus children=badaddr_responder clk_domain=drivesys.clk_domain +eventq_index=0 header_cycles=1 system=drivesys use_default_range=false @@ -180,6 +201,7 @@ slave=drivesys.system_port drivesys.cpu.icache_port drivesys.cpu.dcache_port dri [drivesys.membus.badaddr_responder] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=0 pio_latency=100000 @@ -199,6 +221,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=drivesys.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -210,15 +233,18 @@ port=drivesys.membus.master[1] type=SimpleDisk children=disk disk=drivesys.simple_disk.disk +eventq_index=0 system=drivesys [drivesys.simple_disk.disk] type=RawDiskImage -image_file=/dist/m5/system/disks/linux-latest.img +eventq_index=0 +image_file=/dist/disks/linux-latest.img read_only=true [drivesys.terminal] type=Terminal +eventq_index=0 intr_control=drivesys.intrctrl number=0 output=true @@ -227,6 +253,7 @@ port=3456 [drivesys.tsunami] type=Tsunami children=backdoor cchip ethernet fake_OROM fake_ata0 fake_ata1 fake_pnp_addr fake_pnp_read0 fake_pnp_read1 fake_pnp_read2 fake_pnp_read3 fake_pnp_read4 fake_pnp_read5 fake_pnp_read6 fake_pnp_read7 fake_pnp_write fake_ppc fake_sm_chip fake_uart1 fake_uart2 fake_uart3 fake_uart4 fb ide io pchip pciconfig uart +eventq_index=0 intrctrl=drivesys.intrctrl system=drivesys @@ -235,6 +262,7 @@ type=AlphaBackdoor clk_domain=drivesys.clk_domain cpu=drivesys.cpu disk=drivesys.simple_disk +eventq_index=0 pio_addr=8804682956800 pio_latency=100000 platform=drivesys.tsunami @@ -245,6 +273,7 @@ pio=drivesys.iobus.master[24] [drivesys.tsunami.cchip] type=TsunamiCChip clk_domain=drivesys.clk_domain +eventq_index=0 pio_addr=8803072344064 pio_latency=100000 system=drivesys @@ -274,6 +303,7 @@ BAR5LegacyIO=false BAR5Size=0 BIST=0 CacheLineSize=0 +CapabilityPtr=0 CardbusCIS=0 ClassCode=2 Command=0 @@ -283,8 +313,40 @@ HeaderType=0 InterruptLine=30 InterruptPin=1 LatencyTimer=0 +MSICAPBaseOffset=0 +MSICAPCapId=0 +MSICAPMaskBits=0 +MSICAPMsgAddr=0 +MSICAPMsgCtrl=0 +MSICAPMsgData=0 +MSICAPMsgUpperAddr=0 +MSICAPNextCapability=0 +MSICAPPendingBits=0 +MSIXCAPBaseOffset=0 +MSIXCAPCapId=0 +MSIXCAPNextCapability=0 +MSIXMsgCtrl=0 +MSIXPbaOffset=0 +MSIXTableOffset=0 MaximumLatency=52 MinimumGrant=176 +PMCAPBaseOffset=0 +PMCAPCapId=0 +PMCAPCapabilities=0 +PMCAPCtrlStatus=0 +PMCAPNextCapability=0 +PXCAPBaseOffset=0 +PXCAPCapId=0 +PXCAPCapabilities=0 +PXCAPDevCap2=0 +PXCAPDevCapabilities=0 +PXCAPDevCtrl=0 +PXCAPDevCtrl2=0 +PXCAPDevStatus=0 +PXCAPLinkCap=0 +PXCAPLinkCtrl=0 +PXCAPLinkStatus=0 +PXCAPNextCapability=0 ProgIF=0 Revision=0 Status=656 @@ -301,6 +363,7 @@ dma_read_delay=0 dma_read_factor=0 dma_write_delay=0 dma_write_factor=0 +eventq_index=0 hardware_address=00:90:00:00:00:02 intr_delay=10000000 pci_bus=0 @@ -325,11 +388,13 @@ pio=drivesys.iobus.master[27] [drivesys.tsunami.ethernet.clk_domain] type=SrcClockDomain clock=2000 +eventq_index=0 voltage_domain=drivesys.voltage_domain [drivesys.tsunami.fake_OROM] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8796093677568 pio_latency=100000 @@ -347,6 +412,7 @@ pio=drivesys.iobus.master[8] [drivesys.tsunami.fake_ata0] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848432 pio_latency=100000 @@ -364,6 +430,7 @@ pio=drivesys.iobus.master[19] [drivesys.tsunami.fake_ata1] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848304 pio_latency=100000 @@ -381,6 +448,7 @@ pio=drivesys.iobus.master[20] [drivesys.tsunami.fake_pnp_addr] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848569 pio_latency=100000 @@ -398,6 +466,7 @@ pio=drivesys.iobus.master[9] [drivesys.tsunami.fake_pnp_read0] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848451 pio_latency=100000 @@ -415,6 +484,7 @@ pio=drivesys.iobus.master[11] [drivesys.tsunami.fake_pnp_read1] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848515 pio_latency=100000 @@ -432,6 +502,7 @@ pio=drivesys.iobus.master[12] [drivesys.tsunami.fake_pnp_read2] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848579 pio_latency=100000 @@ -449,6 +520,7 @@ pio=drivesys.iobus.master[13] [drivesys.tsunami.fake_pnp_read3] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848643 pio_latency=100000 @@ -466,6 +538,7 @@ pio=drivesys.iobus.master[14] [drivesys.tsunami.fake_pnp_read4] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848707 pio_latency=100000 @@ -483,6 +556,7 @@ pio=drivesys.iobus.master[15] [drivesys.tsunami.fake_pnp_read5] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848771 pio_latency=100000 @@ -500,6 +574,7 @@ pio=drivesys.iobus.master[16] [drivesys.tsunami.fake_pnp_read6] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848835 pio_latency=100000 @@ -517,6 +592,7 @@ pio=drivesys.iobus.master[17] [drivesys.tsunami.fake_pnp_read7] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848899 pio_latency=100000 @@ -534,6 +610,7 @@ pio=drivesys.iobus.master[18] [drivesys.tsunami.fake_pnp_write] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615850617 pio_latency=100000 @@ -551,6 +628,7 @@ pio=drivesys.iobus.master[10] [drivesys.tsunami.fake_ppc] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848891 pio_latency=100000 @@ -568,6 +646,7 @@ pio=drivesys.iobus.master[7] [drivesys.tsunami.fake_sm_chip] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848816 pio_latency=100000 @@ -585,6 +664,7 @@ pio=drivesys.iobus.master[2] [drivesys.tsunami.fake_uart1] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848696 pio_latency=100000 @@ -602,6 +682,7 @@ pio=drivesys.iobus.master[3] [drivesys.tsunami.fake_uart2] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848936 pio_latency=100000 @@ -619,6 +700,7 @@ pio=drivesys.iobus.master[4] [drivesys.tsunami.fake_uart3] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848680 pio_latency=100000 @@ -636,6 +718,7 @@ pio=drivesys.iobus.master[5] [drivesys.tsunami.fake_uart4] type=IsaFake clk_domain=drivesys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848944 pio_latency=100000 @@ -654,6 +737,7 @@ pio=drivesys.iobus.master[6] type=BadDevice clk_domain=drivesys.clk_domain devicename=FrameBuffer +eventq_index=0 pio_addr=8804615848912 pio_latency=100000 system=drivesys @@ -681,6 +765,7 @@ BAR5LegacyIO=false BAR5Size=0 BIST=0 CacheLineSize=0 +CapabilityPtr=0 CardbusCIS=0 ClassCode=1 Command=0 @@ -690,8 +775,40 @@ HeaderType=0 InterruptLine=31 InterruptPin=1 LatencyTimer=0 +MSICAPBaseOffset=0 +MSICAPCapId=0 +MSICAPMaskBits=0 +MSICAPMsgAddr=0 +MSICAPMsgCtrl=0 +MSICAPMsgData=0 +MSICAPMsgUpperAddr=0 +MSICAPNextCapability=0 +MSICAPPendingBits=0 +MSIXCAPBaseOffset=0 +MSIXCAPCapId=0 +MSIXCAPNextCapability=0 +MSIXMsgCtrl=0 +MSIXPbaOffset=0 +MSIXTableOffset=0 MaximumLatency=0 MinimumGrant=0 +PMCAPBaseOffset=0 +PMCAPCapId=0 +PMCAPCapabilities=0 +PMCAPCtrlStatus=0 +PMCAPNextCapability=0 +PXCAPBaseOffset=0 +PXCAPCapId=0 +PXCAPCapabilities=0 +PXCAPDevCap2=0 +PXCAPDevCapabilities=0 +PXCAPDevCtrl=0 +PXCAPDevCtrl2=0 +PXCAPDevStatus=0 +PXCAPLinkCap=0 +PXCAPLinkCtrl=0 +PXCAPLinkStatus=0 +PXCAPNextCapability=0 ProgIF=133 Revision=0 Status=640 @@ -703,6 +820,7 @@ clk_domain=drivesys.clk_domain config_latency=20000 ctrl_offset=0 disks=drivesys.disk0 drivesys.disk2 +eventq_index=0 io_shift=0 pci_bus=0 pci_dev=0 @@ -717,6 +835,7 @@ pio=drivesys.iobus.master[25] [drivesys.tsunami.io] type=TsunamiIO clk_domain=drivesys.clk_domain +eventq_index=0 frequency=976562500 pio_addr=8804615847936 pio_latency=100000 @@ -729,6 +848,7 @@ pio=drivesys.iobus.master[22] [drivesys.tsunami.pchip] type=TsunamiPChip clk_domain=drivesys.clk_domain +eventq_index=0 pio_addr=8802535473152 pio_latency=100000 system=drivesys @@ -739,6 +859,7 @@ pio=drivesys.iobus.master[1] type=PciConfigAll bus=0 clk_domain=drivesys.clk_domain +eventq_index=0 pio_addr=0 pio_latency=30000 platform=drivesys.tsunami @@ -749,6 +870,7 @@ pio=drivesys.iobus.default [drivesys.tsunami.uart] type=Uart8250 clk_domain=drivesys.clk_domain +eventq_index=0 pio_addr=8804615848952 pio_latency=100000 platform=drivesys.tsunami @@ -758,10 +880,12 @@ pio=drivesys.iobus.master[23] [drivesys.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 [etherdump] type=EtherDump +eventq_index=0 file=ethertrace maxlen=96 @@ -770,6 +894,7 @@ type=EtherLink delay=0 delay_var=0 dump=etherdump +eventq_index=0 speed=8000.000000 int0=testsys.tsunami.ethernet.interface int1=drivesys.tsunami.ethernet.interface @@ -777,7 +902,9 @@ int1=drivesys.tsunami.ethernet.interface [root] type=Root children=drivesys etherdump etherlink testsys +eventq_index=0 full_system=true +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -789,16 +916,17 @@ boot_cpu_frequency=500 boot_osflags=root=/dev/hda1 console=ttyS0 cache_line_size=64 clk_domain=testsys.clk_domain -console=/dist/m5/system/binaries/console +console=/dist/binaries/console +eventq_index=0 init_param=0 -kernel=/dist/m5/system/binaries/vmlinux +kernel=/dist/binaries/vmlinux load_addr_mask=1099511627775 mem_mode=atomic mem_ranges=0:134217727 memories=testsys.physmem num_work_ids=16 -pal=/dist/m5/system/binaries/ts_osfpal -readfile=/z/m5/regression/zizzer/gem5/configs/boot/netperf-stream-client.rcS +pal=/dist/binaries/ts_osfpal +readfile=/work/gem5.ext/configs/boot/netperf-stream-client.rcS symbolfile= system_rev=1024 system_type=34 @@ -815,6 +943,7 @@ system_port=testsys.membus.slave[0] type=Bridge clk_domain=testsys.clk_domain delay=50000 +eventq_index=0 ranges=8796093022208:18446744073709551615 req_size=16 resp_size=16 @@ -824,6 +953,7 @@ slave=testsys.membus.master[0] [testsys.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=testsys.voltage_domain [testsys.cpu] @@ -836,6 +966,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=testsys.cpu.dtb +eventq_index=0 fastmem=false function_trace=false function_trace_start=0 @@ -866,43 +997,53 @@ icache_port=testsys.membus.slave[1] [testsys.cpu.clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=testsys.voltage_domain [testsys.cpu.dtb] type=AlphaTLB +eventq_index=0 size=64 [testsys.cpu.interrupts] type=AlphaInterrupts +eventq_index=0 [testsys.cpu.isa] type=AlphaISA +eventq_index=0 +system=testsys [testsys.cpu.itb] type=AlphaTLB +eventq_index=0 size=48 [testsys.cpu.tracer] type=ExeTracer +eventq_index=0 [testsys.disk0] type=IdeDisk children=image delay=1000000 driveID=master +eventq_index=0 image=testsys.disk0.image [testsys.disk0.image] type=CowDiskImage children=child child=testsys.disk0.image.child +eventq_index=0 image_file= read_only=false table_size=65536 [testsys.disk0.image.child] type=RawDiskImage -image_file=/dist/m5/system/disks/linux-latest.img +eventq_index=0 +image_file=/dist/disks/linux-latest.img read_only=true [testsys.disk2] @@ -910,29 +1051,34 @@ type=IdeDisk children=image delay=1000000 driveID=master +eventq_index=0 image=testsys.disk2.image [testsys.disk2.image] type=CowDiskImage children=child child=testsys.disk2.image.child +eventq_index=0 image_file= read_only=false table_size=65536 [testsys.disk2.image.child] type=RawDiskImage -image_file=/dist/m5/system/disks/linux-bigswap2.img +eventq_index=0 +image_file=/dist/disks/linux-bigswap2.img read_only=true [testsys.intrctrl] type=IntrControl +eventq_index=0 sys=testsys [testsys.iobridge] type=Bridge clk_domain=testsys.clk_domain delay=50000 +eventq_index=0 ranges=0:134217727 req_size=16 resp_size=16 @@ -942,6 +1088,7 @@ slave=testsys.iobus.master[29] [testsys.iobus] type=NoncoherentBus clk_domain=testsys.clk_domain +eventq_index=0 header_cycles=1 use_default_range=true width=8 @@ -953,6 +1100,7 @@ slave=testsys.bridge.master testsys.tsunami.ide.dma testsys.tsunami.ethernet.dma type=CoherentBus children=badaddr_responder clk_domain=testsys.clk_domain +eventq_index=0 header_cycles=1 system=testsys use_default_range=false @@ -964,6 +1112,7 @@ slave=testsys.system_port testsys.cpu.icache_port testsys.cpu.dcache_port testsy [testsys.membus.badaddr_responder] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=0 pio_latency=100000 @@ -983,6 +1132,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=testsys.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -994,15 +1144,18 @@ port=testsys.membus.master[1] type=SimpleDisk children=disk disk=testsys.simple_disk.disk +eventq_index=0 system=testsys [testsys.simple_disk.disk] type=RawDiskImage -image_file=/dist/m5/system/disks/linux-latest.img +eventq_index=0 +image_file=/dist/disks/linux-latest.img read_only=true [testsys.terminal] type=Terminal +eventq_index=0 intr_control=testsys.intrctrl number=0 output=true @@ -1011,6 +1164,7 @@ port=3456 [testsys.tsunami] type=Tsunami children=backdoor cchip ethernet fake_OROM fake_ata0 fake_ata1 fake_pnp_addr fake_pnp_read0 fake_pnp_read1 fake_pnp_read2 fake_pnp_read3 fake_pnp_read4 fake_pnp_read5 fake_pnp_read6 fake_pnp_read7 fake_pnp_write fake_ppc fake_sm_chip fake_uart1 fake_uart2 fake_uart3 fake_uart4 fb ide io pchip pciconfig uart +eventq_index=0 intrctrl=testsys.intrctrl system=testsys @@ -1019,6 +1173,7 @@ type=AlphaBackdoor clk_domain=testsys.clk_domain cpu=testsys.cpu disk=testsys.simple_disk +eventq_index=0 pio_addr=8804682956800 pio_latency=100000 platform=testsys.tsunami @@ -1029,6 +1184,7 @@ pio=testsys.iobus.master[24] [testsys.tsunami.cchip] type=TsunamiCChip clk_domain=testsys.clk_domain +eventq_index=0 pio_addr=8803072344064 pio_latency=100000 system=testsys @@ -1058,6 +1214,7 @@ BAR5LegacyIO=false BAR5Size=0 BIST=0 CacheLineSize=0 +CapabilityPtr=0 CardbusCIS=0 ClassCode=2 Command=0 @@ -1067,8 +1224,40 @@ HeaderType=0 InterruptLine=30 InterruptPin=1 LatencyTimer=0 +MSICAPBaseOffset=0 +MSICAPCapId=0 +MSICAPMaskBits=0 +MSICAPMsgAddr=0 +MSICAPMsgCtrl=0 +MSICAPMsgData=0 +MSICAPMsgUpperAddr=0 +MSICAPNextCapability=0 +MSICAPPendingBits=0 +MSIXCAPBaseOffset=0 +MSIXCAPCapId=0 +MSIXCAPNextCapability=0 +MSIXMsgCtrl=0 +MSIXPbaOffset=0 +MSIXTableOffset=0 MaximumLatency=52 MinimumGrant=176 +PMCAPBaseOffset=0 +PMCAPCapId=0 +PMCAPCapabilities=0 +PMCAPCtrlStatus=0 +PMCAPNextCapability=0 +PXCAPBaseOffset=0 +PXCAPCapId=0 +PXCAPCapabilities=0 +PXCAPDevCap2=0 +PXCAPDevCapabilities=0 +PXCAPDevCtrl=0 +PXCAPDevCtrl2=0 +PXCAPDevStatus=0 +PXCAPLinkCap=0 +PXCAPLinkCtrl=0 +PXCAPLinkStatus=0 +PXCAPNextCapability=0 ProgIF=0 Revision=0 Status=656 @@ -1085,6 +1274,7 @@ dma_read_delay=0 dma_read_factor=0 dma_write_delay=0 dma_write_factor=0 +eventq_index=0 hardware_address=00:90:00:00:00:01 intr_delay=10000000 pci_bus=0 @@ -1109,11 +1299,13 @@ pio=testsys.iobus.master[27] [testsys.tsunami.ethernet.clk_domain] type=SrcClockDomain clock=2000 +eventq_index=0 voltage_domain=testsys.voltage_domain [testsys.tsunami.fake_OROM] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8796093677568 pio_latency=100000 @@ -1131,6 +1323,7 @@ pio=testsys.iobus.master[8] [testsys.tsunami.fake_ata0] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848432 pio_latency=100000 @@ -1148,6 +1341,7 @@ pio=testsys.iobus.master[19] [testsys.tsunami.fake_ata1] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848304 pio_latency=100000 @@ -1165,6 +1359,7 @@ pio=testsys.iobus.master[20] [testsys.tsunami.fake_pnp_addr] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848569 pio_latency=100000 @@ -1182,6 +1377,7 @@ pio=testsys.iobus.master[9] [testsys.tsunami.fake_pnp_read0] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848451 pio_latency=100000 @@ -1199,6 +1395,7 @@ pio=testsys.iobus.master[11] [testsys.tsunami.fake_pnp_read1] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848515 pio_latency=100000 @@ -1216,6 +1413,7 @@ pio=testsys.iobus.master[12] [testsys.tsunami.fake_pnp_read2] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848579 pio_latency=100000 @@ -1233,6 +1431,7 @@ pio=testsys.iobus.master[13] [testsys.tsunami.fake_pnp_read3] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848643 pio_latency=100000 @@ -1250,6 +1449,7 @@ pio=testsys.iobus.master[14] [testsys.tsunami.fake_pnp_read4] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848707 pio_latency=100000 @@ -1267,6 +1467,7 @@ pio=testsys.iobus.master[15] [testsys.tsunami.fake_pnp_read5] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848771 pio_latency=100000 @@ -1284,6 +1485,7 @@ pio=testsys.iobus.master[16] [testsys.tsunami.fake_pnp_read6] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848835 pio_latency=100000 @@ -1301,6 +1503,7 @@ pio=testsys.iobus.master[17] [testsys.tsunami.fake_pnp_read7] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848899 pio_latency=100000 @@ -1318,6 +1521,7 @@ pio=testsys.iobus.master[18] [testsys.tsunami.fake_pnp_write] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615850617 pio_latency=100000 @@ -1335,6 +1539,7 @@ pio=testsys.iobus.master[10] [testsys.tsunami.fake_ppc] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848891 pio_latency=100000 @@ -1352,6 +1557,7 @@ pio=testsys.iobus.master[7] [testsys.tsunami.fake_sm_chip] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848816 pio_latency=100000 @@ -1369,6 +1575,7 @@ pio=testsys.iobus.master[2] [testsys.tsunami.fake_uart1] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848696 pio_latency=100000 @@ -1386,6 +1593,7 @@ pio=testsys.iobus.master[3] [testsys.tsunami.fake_uart2] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848936 pio_latency=100000 @@ -1403,6 +1611,7 @@ pio=testsys.iobus.master[4] [testsys.tsunami.fake_uart3] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848680 pio_latency=100000 @@ -1420,6 +1629,7 @@ pio=testsys.iobus.master[5] [testsys.tsunami.fake_uart4] type=IsaFake clk_domain=testsys.clk_domain +eventq_index=0 fake_mem=false pio_addr=8804615848944 pio_latency=100000 @@ -1438,6 +1648,7 @@ pio=testsys.iobus.master[6] type=BadDevice clk_domain=testsys.clk_domain devicename=FrameBuffer +eventq_index=0 pio_addr=8804615848912 pio_latency=100000 system=testsys @@ -1465,6 +1676,7 @@ BAR5LegacyIO=false BAR5Size=0 BIST=0 CacheLineSize=0 +CapabilityPtr=0 CardbusCIS=0 ClassCode=1 Command=0 @@ -1474,8 +1686,40 @@ HeaderType=0 InterruptLine=31 InterruptPin=1 LatencyTimer=0 +MSICAPBaseOffset=0 +MSICAPCapId=0 +MSICAPMaskBits=0 +MSICAPMsgAddr=0 +MSICAPMsgCtrl=0 +MSICAPMsgData=0 +MSICAPMsgUpperAddr=0 +MSICAPNextCapability=0 +MSICAPPendingBits=0 +MSIXCAPBaseOffset=0 +MSIXCAPCapId=0 +MSIXCAPNextCapability=0 +MSIXMsgCtrl=0 +MSIXPbaOffset=0 +MSIXTableOffset=0 MaximumLatency=0 MinimumGrant=0 +PMCAPBaseOffset=0 +PMCAPCapId=0 +PMCAPCapabilities=0 +PMCAPCtrlStatus=0 +PMCAPNextCapability=0 +PXCAPBaseOffset=0 +PXCAPCapId=0 +PXCAPCapabilities=0 +PXCAPDevCap2=0 +PXCAPDevCapabilities=0 +PXCAPDevCtrl=0 +PXCAPDevCtrl2=0 +PXCAPDevStatus=0 +PXCAPLinkCap=0 +PXCAPLinkCtrl=0 +PXCAPLinkStatus=0 +PXCAPNextCapability=0 ProgIF=133 Revision=0 Status=640 @@ -1487,6 +1731,7 @@ clk_domain=testsys.clk_domain config_latency=20000 ctrl_offset=0 disks=testsys.disk0 testsys.disk2 +eventq_index=0 io_shift=0 pci_bus=0 pci_dev=0 @@ -1501,6 +1746,7 @@ pio=testsys.iobus.master[25] [testsys.tsunami.io] type=TsunamiIO clk_domain=testsys.clk_domain +eventq_index=0 frequency=976562500 pio_addr=8804615847936 pio_latency=100000 @@ -1513,6 +1759,7 @@ pio=testsys.iobus.master[22] [testsys.tsunami.pchip] type=TsunamiPChip clk_domain=testsys.clk_domain +eventq_index=0 pio_addr=8802535473152 pio_latency=100000 system=testsys @@ -1523,6 +1770,7 @@ pio=testsys.iobus.master[1] type=PciConfigAll bus=0 clk_domain=testsys.clk_domain +eventq_index=0 pio_addr=0 pio_latency=30000 platform=testsys.tsunami @@ -1533,6 +1781,7 @@ pio=testsys.iobus.default [testsys.tsunami.uart] type=Uart8250 clk_domain=testsys.clk_domain +eventq_index=0 pio_addr=8804615848952 pio_latency=100000 platform=testsys.tsunami @@ -1542,5 +1791,6 @@ pio=testsys.iobus.master[23] [testsys.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/fs/80.netperf-stream/ref/alpha/linux/twosys-tsunami-simple-atomic/simerr b/tests/quick/fs/80.netperf-stream/ref/alpha/linux/twosys-tsunami-simple-atomic/simerr index 22a941a4b..c0d08bdf9 100755 --- a/tests/quick/fs/80.netperf-stream/ref/alpha/linux/twosys-tsunami-simple-atomic/simerr +++ b/tests/quick/fs/80.netperf-stream/ref/alpha/linux/twosys-tsunami-simple-atomic/simerr @@ -1,8 +1,5 @@ warn: Sockets disabled, not accepting terminal connections -warn: CoherentBus testsys.membus has no snooping ports attached! warn: Sockets disabled, not accepting gdb connections -warn: CoherentBus drivesys.membus has no snooping ports attached! warn: Prefetch instructions in Alpha do not do anything warn: Prefetch instructions in Alpha do not do anything warn: Obsolete M5 ivlb instruction encountered. -hack: be nice to actually delete the event here diff --git a/tests/quick/fs/80.netperf-stream/ref/alpha/linux/twosys-tsunami-simple-atomic/simout b/tests/quick/fs/80.netperf-stream/ref/alpha/linux/twosys-tsunami-simple-atomic/simout index af627b8fa..75ac3ebe6 100755 --- a/tests/quick/fs/80.netperf-stream/ref/alpha/linux/twosys-tsunami-simple-atomic/simout +++ b/tests/quick/fs/80.netperf-stream/ref/alpha/linux/twosys-tsunami-simple-atomic/simout @@ -1,16 +1,14 @@ -Redirecting stdout to build/ALPHA/tests/opt/quick/fs/80.netperf-stream/alpha/linux/twosys-tsunami-simple-atomic/simout -Redirecting stderr to build/ALPHA/tests/opt/quick/fs/80.netperf-stream/alpha/linux/twosys-tsunami-simple-atomic/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 24 2013 03:08:53 -gem5 started Sep 28 2013 03:05:52 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:27:55 +gem5 started Jan 22 2014 17:25:12 +gem5 executing on u200540-lin command line: build/ALPHA/gem5.opt -d build/ALPHA/tests/opt/quick/fs/80.netperf-stream/alpha/linux/twosys-tsunami-simple-atomic -re tests/run.py build/ALPHA/tests/opt/quick/fs/80.netperf-stream/alpha/linux/twosys-tsunami-simple-atomic Global frequency set at 1000000000000 ticks per second -info: kernel located at: /dist/m5/system/binaries/vmlinux +info: kernel located at: /dist/binaries/vmlinux 0: testsys.tsunami.io.rtc: Real-time clock set to Thu Jan 1 00:00:00 2009 -info: kernel located at: /dist/m5/system/binaries/vmlinux +info: kernel located at: /dist/binaries/vmlinux 0: drivesys.tsunami.io.rtc: Real-time clock set to Thu Jan 1 00:00:00 2009 info: Entering event queue @ 0. Starting simulation... Exiting @ tick 4321621592000 because checkpoint diff --git a/tests/quick/fs/80.netperf-stream/ref/alpha/linux/twosys-tsunami-simple-atomic/stats.txt b/tests/quick/fs/80.netperf-stream/ref/alpha/linux/twosys-tsunami-simple-atomic/stats.txt index 4ccc9d7bc..cf63db341 100644 --- a/tests/quick/fs/80.netperf-stream/ref/alpha/linux/twosys-tsunami-simple-atomic/stats.txt +++ b/tests/quick/fs/80.netperf-stream/ref/alpha/linux/twosys-tsunami-simple-atomic/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.200409 # Nu sim_ticks 200409284500 # Number of ticks simulated final_tick 4321214250500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 21337245 # Simulator instruction rate (inst/s) -host_op_rate 21337231 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 8163912733 # Simulator tick rate (ticks/s) -host_mem_usage 473328 # Number of bytes of host memory used -host_seconds 24.55 # Real time elapsed on the host +host_inst_rate 22333008 # Simulator instruction rate (inst/s) +host_op_rate 22332995 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 8544906534 # Simulator tick rate (ticks/s) +host_mem_usage 473604 # Number of bytes of host memory used +host_seconds 23.45 # Real time elapsed on the host sim_insts 523790075 # Number of instructions simulated sim_ops 523790075 # Number of ops (including micro ops) simulated +testsys.voltage_domain.voltage 1 # Voltage in Volts +testsys.clk_domain.clock 1000 # Clock period in ticks testsys.physmem.bytes_read::cpu.inst 81046720 # Number of bytes read from this memory testsys.physmem.bytes_read::cpu.data 27826276 # Number of bytes read from this memory testsys.physmem.bytes_read::tsunami.ethernet 57260496 # Number of bytes read from this memory @@ -55,6 +57,7 @@ testsys.disk2.dma_read_txs 0 # Nu testsys.disk2.dma_write_full_pages 0 # Number of full page size DMA writes. testsys.disk2.dma_write_bytes 0 # Number of bytes transfered via DMA writes. testsys.disk2.dma_write_txs 0 # Number of DMA write transactions. +testsys.cpu.clk_domain.clock 500 # Clock period in ticks testsys.cpu.dtb.fetch_hits 0 # ITB hits testsys.cpu.dtb.fetch_misses 0 # ITB misses testsys.cpu.dtb.fetch_acv 0 # ITB acv @@ -178,6 +181,7 @@ testsys.cpu.kern.mode_ticks::kernel 994603000 60.01% 60.01% # nu testsys.cpu.kern.mode_ticks::user 533068000 32.16% 92.17% # number of ticks spent at the given mode testsys.cpu.kern.mode_ticks::idle 129740500 7.83% 100.00% # number of ticks spent at the given mode testsys.cpu.kern.swap_context 438 # number of times the context was actually changed +testsys.tsunami.ethernet.clk_domain.clock 2000 # Clock period in ticks testsys.tsunami.ethernet.txBytes 960 # Bytes Transmitted testsys.tsunami.ethernet.rxBytes 798 # Bytes Received testsys.tsunami.ethernet.txPackets 8 # Number of Packets Transmitted @@ -229,6 +233,8 @@ testsys.tsunami.ethernet.postedInterrupts 2385819 # n testsys.tsunami.ethernet.droppedPackets 0 # number of packets dropped testsys.iobus.throughput 290423421 # Throughput (bytes/s) testsys.iobus.data_through_bus 58203550 # Total data (bytes) +drivesys.voltage_domain.voltage 1 # Voltage in Volts +drivesys.clk_domain.clock 1000 # Clock period in ticks drivesys.physmem.bytes_read::cpu.inst 76205572 # Number of bytes read from this memory drivesys.physmem.bytes_read::cpu.data 26284292 # Number of bytes read from this memory drivesys.physmem.bytes_read::tsunami.ethernet 57260526 # Number of bytes read from this memory @@ -273,6 +279,7 @@ drivesys.disk2.dma_read_txs 0 # Nu drivesys.disk2.dma_write_full_pages 0 # Number of full page size DMA writes. drivesys.disk2.dma_write_bytes 0 # Number of bytes transfered via DMA writes. drivesys.disk2.dma_write_txs 0 # Number of DMA write transactions. +drivesys.cpu.clk_domain.clock 250 # Clock period in ticks drivesys.cpu.dtb.fetch_hits 0 # ITB hits drivesys.cpu.dtb.fetch_misses 0 # ITB misses drivesys.cpu.dtb.fetch_acv 0 # ITB acv @@ -386,6 +393,7 @@ drivesys.cpu.kern.mode_ticks::kernel 78134250 2.63% 2.63% # nu drivesys.cpu.kern.mode_ticks::user 319668250 10.78% 13.41% # number of ticks spent at the given mode drivesys.cpu.kern.mode_ticks::idle 2567942000 86.59% 100.00% # number of ticks spent at the given mode drivesys.cpu.kern.swap_context 72 # number of times the context was actually changed +drivesys.tsunami.ethernet.clk_domain.clock 2000 # Clock period in ticks drivesys.tsunami.ethernet.txBytes 798 # Bytes Transmitted drivesys.tsunami.ethernet.rxBytes 960 # Bytes Received drivesys.tsunami.ethernet.txPackets 5 # Number of Packets Transmitted @@ -445,13 +453,15 @@ sim_seconds 0.000407 # Nu sim_ticks 407341500 # Number of ticks simulated final_tick 4321621592000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 10977168844 # Simulator instruction rate (inst/s) -host_op_rate 10973505866 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 8529941400 # Simulator tick rate (ticks/s) -host_mem_usage 473328 # Number of bytes of host memory used +host_inst_rate 11306223920 # Simulator instruction rate (inst/s) +host_op_rate 11302970418 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 8786485437 # Simulator tick rate (ticks/s) +host_mem_usage 473604 # Number of bytes of host memory used host_seconds 0.05 # Real time elapsed on the host sim_insts 523862353 # Number of instructions simulated sim_ops 523862353 # Number of ops (including micro ops) simulated +testsys.voltage_domain.voltage 1 # Voltage in Volts +testsys.clk_domain.clock 1000 # Clock period in ticks testsys.physmem.bytes_read::cpu.inst 144504 # Number of bytes read from this memory testsys.physmem.bytes_read::cpu.data 49936 # Number of bytes read from this memory testsys.physmem.bytes_read::tsunami.ethernet 116376 # Number of bytes read from this memory @@ -493,6 +503,7 @@ testsys.disk2.dma_read_txs 0 # Nu testsys.disk2.dma_write_full_pages 0 # Number of full page size DMA writes. testsys.disk2.dma_write_bytes 0 # Number of bytes transfered via DMA writes. testsys.disk2.dma_write_txs 0 # Number of DMA write transactions. +testsys.cpu.clk_domain.clock 500 # Clock period in ticks testsys.cpu.dtb.fetch_hits 0 # ITB hits testsys.cpu.dtb.fetch_misses 0 # ITB misses testsys.cpu.dtb.fetch_acv 0 # ITB acv @@ -588,6 +599,7 @@ testsys.cpu.kern.mode_ticks::kernel 0 # nu testsys.cpu.kern.mode_ticks::user 0 # number of ticks spent at the given mode testsys.cpu.kern.mode_ticks::idle 0 # number of ticks spent at the given mode testsys.cpu.kern.swap_context 0 # number of times the context was actually changed +testsys.tsunami.ethernet.clk_domain.clock 2000 # Clock period in ticks testsys.tsunami.ethernet.descDMAReads 4849 # Number of descriptors the device read w/ DMA testsys.tsunami.ethernet.descDMAWrites 0 # Number of descriptors the device wrote w/ DMA testsys.tsunami.ethernet.descDmaReadBytes 116376 # number of descriptor bytes read w/ DMA @@ -621,6 +633,8 @@ testsys.tsunami.ethernet.postedInterrupts 4849 # n testsys.tsunami.ethernet.droppedPackets 0 # number of packets dropped testsys.iobus.throughput 290429529 # Throughput (bytes/s) testsys.iobus.data_through_bus 118304 # Total data (bytes) +drivesys.voltage_domain.voltage 1 # Voltage in Volts +drivesys.clk_domain.clock 1000 # Clock period in ticks drivesys.physmem.bytes_read::cpu.inst 144608 # Number of bytes read from this memory drivesys.physmem.bytes_read::cpu.data 49952 # Number of bytes read from this memory drivesys.physmem.bytes_read::tsunami.ethernet 116400 # Number of bytes read from this memory @@ -662,6 +676,7 @@ drivesys.disk2.dma_read_txs 0 # Nu drivesys.disk2.dma_write_full_pages 0 # Number of full page size DMA writes. drivesys.disk2.dma_write_bytes 0 # Number of bytes transfered via DMA writes. drivesys.disk2.dma_write_txs 0 # Number of DMA write transactions. +drivesys.cpu.clk_domain.clock 250 # Clock period in ticks drivesys.cpu.dtb.fetch_hits 0 # ITB hits drivesys.cpu.dtb.fetch_misses 0 # ITB misses drivesys.cpu.dtb.fetch_acv 0 # ITB acv @@ -757,6 +772,7 @@ drivesys.cpu.kern.mode_ticks::kernel 0 # nu drivesys.cpu.kern.mode_ticks::user 0 # number of ticks spent at the given mode drivesys.cpu.kern.mode_ticks::idle 0 # number of ticks spent at the given mode drivesys.cpu.kern.swap_context 0 # number of times the context was actually changed +drivesys.tsunami.ethernet.clk_domain.clock 2000 # Clock period in ticks drivesys.tsunami.ethernet.descDMAReads 4850 # Number of descriptors the device read w/ DMA drivesys.tsunami.ethernet.descDMAWrites 0 # Number of descriptors the device wrote w/ DMA drivesys.tsunami.ethernet.descDmaReadBytes 116400 # number of descriptor bytes read w/ DMA diff --git a/tests/quick/se/00.hello/ref/alpha/linux/inorder-timing/config.ini b/tests/quick/se/00.hello/ref/alpha/linux/inorder-timing/config.ini index 8be59c81c..38d5b70ef 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/inorder-timing/config.ini +++ b/tests/quick/se/00.hello/ref/alpha/linux/inorder-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -56,6 +60,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 fetchBuffSize=4 function_trace=false function_trace_start=0 @@ -90,6 +95,7 @@ BTBTagSize=16 RASSize=16 choiceCtrBits=2 choicePredictorSize=8192 +eventq_index=0 globalCtrBits=2 globalPredictorSize=8192 instShiftAmt=2 @@ -105,6 +111,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -113,6 +120,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -127,11 +135,14 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] type=AlphaTLB +eventq_index=0 size=64 [system.cpu.icache] @@ -140,6 +151,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -148,6 +160,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -162,17 +175,23 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] type=AlphaInterrupts +eventq_index=0 [system.cpu.isa] type=AlphaISA +eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB +eventq_index=0 size=48 [system.cpu.l2cache] @@ -181,6 +200,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -189,6 +209,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -203,12 +224,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -218,6 +242,7 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -227,7 +252,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/alpha/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/alpha/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -241,11 +267,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -265,6 +293,7 @@ conf_table_reported=true device_bus_width=8 device_rowbuffer_size=1024 devices_per_rank=8 +eventq_index=0 in_addr_map=true mem_sched_policy=frfcfs null=false @@ -276,17 +305,21 @@ static_backend_latency=10000 static_frontend_latency=10000 tBURST=5000 tCL=13750 +tRAS=35000 tRCD=13750 tREFI=7800000 tRFC=300000 tRP=13750 +tRRD=6250 tWTR=7500 tXAW=40000 write_buffer_size=32 -write_thresh_perc=70 +write_high_thresh_perc=70 +write_low_thresh_perc=0 port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/alpha/linux/inorder-timing/simerr b/tests/quick/se/00.hello/ref/alpha/linux/inorder-timing/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/inorder-timing/simerr +++ b/tests/quick/se/00.hello/ref/alpha/linux/inorder-timing/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/alpha/linux/inorder-timing/simout b/tests/quick/se/00.hello/ref/alpha/linux/inorder-timing/simout index b50e34b75..b1e32f7df 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/inorder-timing/simout +++ b/tests/quick/se/00.hello/ref/alpha/linux/inorder-timing/simout @@ -1,14 +1,12 @@ -Redirecting stdout to build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/inorder-timing/simout -Redirecting stderr to build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/inorder-timing/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 24 2013 03:08:53 -gem5 started Sep 28 2013 03:05:26 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:27:55 +gem5 started Jan 22 2014 17:24:08 +gem5 executing on u200540-lin command line: build/ALPHA/gem5.opt -d build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/inorder-timing -re tests/run.py build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/inorder-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... info: Increasing stack size by one page. Hello world! -Exiting @ tick 25046000 because target called exit() +Exiting @ tick 25485000 because target called exit() diff --git a/tests/quick/se/00.hello/ref/alpha/linux/inorder-timing/stats.txt b/tests/quick/se/00.hello/ref/alpha/linux/inorder-timing/stats.txt index 3b67933ac..116ba4c72 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/inorder-timing/stats.txt +++ b/tests/quick/se/00.hello/ref/alpha/linux/inorder-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000025 # Nu sim_ticks 25485000 # Number of ticks simulated final_tick 25485000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 27492 # Simulator instruction rate (inst/s) -host_op_rate 27490 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 109632626 # Simulator tick rate (ticks/s) -host_mem_usage 225100 # Number of bytes of host memory used -host_seconds 0.23 # Real time elapsed on the host +host_inst_rate 24806 # Simulator instruction rate (inst/s) +host_op_rate 24805 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 98922905 # Simulator tick rate (ticks/s) +host_mem_usage 229760 # Number of bytes of host memory used +host_seconds 0.26 # Real time elapsed on the host sim_insts 6390 # Number of instructions simulated sim_ops 6390 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 19200 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 10752 # Number of bytes read from this memory system.physmem.bytes_read::total 29952 # Number of bytes read from this memory @@ -214,6 +216,7 @@ system.membus.reqLayer0.occupancy 560000 # La system.membus.reqLayer0.utilization 2.2 # Layer utilization (%) system.membus.respLayer1.occupancy 4374750 # Layer occupancy (ticks) system.membus.respLayer1.utilization 17.2 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.branchPred.lookups 1632 # Number of BP lookups system.cpu.branchPred.condPredicted 1160 # Number of conditional branches predicted system.cpu.branchPred.condIncorrect 706 # Number of conditional branches incorrect @@ -325,6 +328,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 142.311081 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.069488 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.069488 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 301 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 126 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 175 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.146973 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 2131 # Number of tag accesses +system.cpu.icache.tags.data_accesses 2131 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 560 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 560 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 560 # number of demand (read+write) hits @@ -430,6 +439,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 56.745411 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.004344 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.001732 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.006076 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 395 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 155 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 240 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.012054 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 4228 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 4228 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 1 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 1 # number of ReadReq hits system.cpu.l2cache.demand_hits::cpu.inst 1 # number of demand (read+write) hits @@ -553,6 +568,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 103.493430 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.025267 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.025267 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 168 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 40 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 128 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.041016 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 4264 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 4264 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 1086 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 1086 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 515 # number of WriteReq hits diff --git a/tests/quick/se/00.hello/ref/alpha/linux/o3-timing/config.ini b/tests/quick/se/00.hello/ref/alpha/linux/o3-timing/config.ini index 07eaff0f1..6e7555e80 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/o3-timing/config.ini +++ b/tests/quick/se/00.hello/ref/alpha/linux/o3-timing/config.ini @@ -159,6 +159,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -175,6 +176,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] @@ -504,6 +506,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -520,6 +523,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] @@ -529,6 +533,7 @@ eventq_index=0 [system.cpu.isa] type=AlphaISA eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB @@ -550,6 +555,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -566,6 +572,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] @@ -592,7 +599,7 @@ env= errout=cerr euid=100 eventq_index=0 -executable=tests/test-progs/hello/bin/alpha/linux/hello +executable=/dist/test-progs/hello/bin/alpha/linux/hello gid=100 input=cin max_stack_size=67108864 diff --git a/tests/quick/se/00.hello/ref/alpha/linux/o3-timing/simerr b/tests/quick/se/00.hello/ref/alpha/linux/o3-timing/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/o3-timing/simerr +++ b/tests/quick/se/00.hello/ref/alpha/linux/o3-timing/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/alpha/linux/o3-timing/simout b/tests/quick/se/00.hello/ref/alpha/linux/o3-timing/simout index 589b57e2d..5b34c9429 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/o3-timing/simout +++ b/tests/quick/se/00.hello/ref/alpha/linux/o3-timing/simout @@ -1,12 +1,12 @@ gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Oct 15 2013 18:24:51 -gem5 started Oct 16 2013 01:34:33 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:27:55 +gem5 started Jan 22 2014 17:24:08 +gem5 executing on u200540-lin command line: build/ALPHA/gem5.opt -d build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/o3-timing -re tests/run.py build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/o3-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... info: Increasing stack size by one page. Hello world! -Exiting @ tick 20671000 because target called exit() +Exiting @ tick 21065000 because target called exit() diff --git a/tests/quick/se/00.hello/ref/alpha/linux/o3-timing/stats.txt b/tests/quick/se/00.hello/ref/alpha/linux/o3-timing/stats.txt index cfed15046..7833baea6 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/o3-timing/stats.txt +++ b/tests/quick/se/00.hello/ref/alpha/linux/o3-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000021 # Nu sim_ticks 21065000 # Number of ticks simulated final_tick 21065000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 36663 # Simulator instruction rate (inst/s) -host_op_rate 36659 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 121177991 # Simulator tick rate (ticks/s) -host_mem_usage 273132 # Number of bytes of host memory used -host_seconds 0.17 # Real time elapsed on the host +host_inst_rate 40027 # Simulator instruction rate (inst/s) +host_op_rate 40023 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 132300521 # Simulator tick rate (ticks/s) +host_mem_usage 230780 # Number of bytes of host memory used +host_seconds 0.16 # Real time elapsed on the host sim_insts 6372 # Number of instructions simulated sim_ops 6372 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 20032 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 11136 # Number of bytes read from this memory system.physmem.bytes_read::total 31168 # Number of bytes read from this memory @@ -214,6 +216,7 @@ system.membus.reqLayer0.occupancy 619000 # La system.membus.reqLayer0.utilization 2.9 # Layer utilization (%) system.membus.respLayer1.occupancy 4556000 # Layer occupancy (ticks) system.membus.respLayer1.utilization 21.6 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.branchPred.lookups 2883 # Number of BP lookups system.cpu.branchPred.condPredicted 1697 # Number of conditional branches predicted system.cpu.branchPred.condIncorrect 511 # Number of conditional branches incorrect @@ -544,6 +547,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 159.548856 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.077905 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.077905 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 314 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 143 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 171 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.153320 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 5078 # Number of tag accesses +system.cpu.icache.tags.data_accesses 5078 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 1893 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 1893 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 1893 # number of demand (read+write) hits @@ -630,6 +639,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 59.787647 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.004872 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.001825 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.006696 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 414 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 182 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 232 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.012634 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 4399 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 4399 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 1 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 1 # number of ReadReq hits system.cpu.l2cache.demand_hits::cpu.inst 1 # number of demand (read+write) hits @@ -753,6 +768,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 107.351368 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.026209 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.026209 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 174 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 50 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 124 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.042480 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 5692 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 5692 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 1724 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 1724 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 506 # number of WriteReq hits diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-atomic/config.ini b/tests/quick/se/00.hello/ref/alpha/linux/simple-atomic/config.ini index 3d9687a29..06ea19107 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-atomic/config.ini +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-atomic/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 fastmem=false function_trace=false function_trace_start=0 @@ -74,20 +79,26 @@ icache_port=system.membus.slave[1] [system.cpu.dtb] type=AlphaTLB +eventq_index=0 size=64 [system.cpu.interrupts] type=AlphaInterrupts +eventq_index=0 [system.cpu.isa] type=AlphaISA +eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB +eventq_index=0 size=48 [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -97,7 +108,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/alpha/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/alpha/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -111,11 +123,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -128,6 +142,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -137,5 +152,6 @@ port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-atomic/simerr b/tests/quick/se/00.hello/ref/alpha/linux/simple-atomic/simerr index 7edd901b2..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-atomic/simerr +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-atomic/simerr @@ -1,3 +1 @@ -warn: CoherentBus system.membus has no snooping ports attached! warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-atomic/simout b/tests/quick/se/00.hello/ref/alpha/linux/simple-atomic/simout index 1fb01db1e..1ccb73543 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-atomic/simout +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-atomic/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/simple-atomic/simout -Redirecting stderr to build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/simple-atomic/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 24 2013 03:08:53 -gem5 started Sep 28 2013 03:05:26 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:27:55 +gem5 started Jan 22 2014 17:24:08 +gem5 executing on u200540-lin command line: build/ALPHA/gem5.opt -d build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/simple-atomic -re tests/run.py build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/simple-atomic Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-atomic/stats.txt b/tests/quick/se/00.hello/ref/alpha/linux/simple-atomic/stats.txt index 469297f21..26873a78e 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-atomic/stats.txt +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-atomic/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000003 # Nu sim_ticks 3208000 # Number of ticks simulated final_tick 3208000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 2502 # Simulator instruction rate (inst/s) -host_op_rate 2502 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 1255935 # Simulator tick rate (ticks/s) -host_mem_usage 215792 # Number of bytes of host memory used -host_seconds 2.55 # Real time elapsed on the host +host_inst_rate 44230 # Simulator instruction rate (inst/s) +host_op_rate 44225 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 22200446 # Simulator tick rate (ticks/s) +host_mem_usage 220024 # Number of bytes of host memory used +host_seconds 0.14 # Real time elapsed on the host sim_insts 6390 # Number of instructions simulated sim_ops 6390 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 25600 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 8788 # Number of bytes read from this memory system.physmem.bytes_read::total 34388 # Number of bytes read from this memory @@ -36,6 +38,7 @@ system.physmem.bw_total::total 12806733167 # To system.membus.throughput 12806733167 # Throughput (bytes/s) system.membus.data_through_bus 41084 # Total data (bytes) system.membus.snoop_data_through_bus 0 # Total snoop data (bytes) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.dtb.fetch_hits 0 # ITB hits system.cpu.dtb.fetch_misses 0 # ITB misses system.cpu.dtb.fetch_acv 0 # ITB acv diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MESI_Two_Level/config.ini b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MESI_Two_Level/config.ini index 0a3882bba..1d40a69d9 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MESI_Two_Level/config.ini +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MESI_Two_Level/config.ini @@ -88,6 +88,7 @@ eventq_index=0 [system.cpu.isa] type=AlphaISA eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB @@ -107,7 +108,7 @@ env= errout=cerr euid=100 eventq_index=0 -executable=tests/test-progs/hello/bin/alpha/linux/hello +executable=/dist/test-progs/hello/bin/alpha/linux/hello gid=100 input=cin max_stack_size=67108864 diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MESI_Two_Level/simerr b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MESI_Two_Level/simerr index bbc0c797e..86244d4bf 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MESI_Two_Level/simerr +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MESI_Two_Level/simerr @@ -3,4 +3,3 @@ warn: rounding error > tolerance warn: rounding error > tolerance 0.072760 rounded to 0 warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MESI_Two_Level/simout b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MESI_Two_Level/simout index 5fac9bcf7..703a818a3 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MESI_Two_Level/simout +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MESI_Two_Level/simout @@ -1,12 +1,10 @@ -Redirecting stdout to build/ALPHA_MESI_CMP_directory/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MESI_CMP_directory/simout -Redirecting stderr to build/ALPHA_MESI_CMP_directory/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MESI_CMP_directory/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:27:02 -gem5 started Sep 22 2013 05:27:12 -gem5 executing on zizzer -command line: build/ALPHA_MESI_CMP_directory/gem5.opt -d build/ALPHA_MESI_CMP_directory/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MESI_CMP_directory -re tests/run.py build/ALPHA_MESI_CMP_directory/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MESI_CMP_directory +gem5 compiled Jan 22 2014 16:37:52 +gem5 started Jan 22 2014 17:25:49 +gem5 executing on u200540-lin +command line: build/ALPHA_MESI_Two_Level/gem5.opt -d build/ALPHA_MESI_Two_Level/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MESI_Two_Level -re tests/run.py build/ALPHA_MESI_Two_Level/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MESI_Two_Level Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... info: Increasing stack size by one page. diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MESI_Two_Level/stats.txt b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MESI_Two_Level/stats.txt index dd7fe91b8..9dc55b67c 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MESI_Two_Level/stats.txt +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MESI_Two_Level/stats.txt @@ -4,13 +4,16 @@ sim_seconds 0.000139 # Nu sim_ticks 138616 # Number of ticks simulated final_tick 138616 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_inst_rate 20823 # Simulator instruction rate (inst/s) -host_op_rate 20821 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 451640 # Simulator tick rate (ticks/s) -host_mem_usage 170972 # Number of bytes of host memory used -host_seconds 0.31 # Real time elapsed on the host +host_inst_rate 26295 # Simulator instruction rate (inst/s) +host_op_rate 26294 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 570348 # Simulator tick rate (ticks/s) +host_mem_usage 126360 # Number of bytes of host memory used +host_seconds 0.24 # Real time elapsed on the host sim_insts 6390 # Number of instructions simulated sim_ops 6390 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.delayHist::bucket_size 1 # delay histogram for all message system.ruby.delayHist::max_bucket 9 # delay histogram for all message system.ruby.delayHist::samples 9645 # delay histogram for all message @@ -100,6 +103,7 @@ system.ruby.network.routers1.msg_bytes.Response_Control::2 6392 system.ruby.network.routers1.msg_bytes.Writeback_Data::0 10440 system.ruby.network.routers1.msg_bytes.Writeback_Data::1 10152 system.ruby.network.routers1.msg_bytes.Writeback_Control::0 2328 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.dir_cntrl0.memBuffer.memReq 1737 # Total number of memory requests system.ruby.dir_cntrl0.memBuffer.memRead 1460 # Number of memory reads system.ruby.dir_cntrl0.memBuffer.memWrite 277 # Number of memory writes @@ -149,6 +153,7 @@ system.ruby.network.msg_byte.Response_Data 697032 system.ruby.network.msg_byte.Response_Control 114288 system.ruby.network.msg_byte.Writeback_Data 61776 system.ruby.network.msg_byte.Writeback_Control 6984 +system.cpu.clk_domain.clock 1 # Clock period in ticks system.cpu.dtb.fetch_hits 0 # ITB hits system.cpu.dtb.fetch_misses 0 # ITB misses system.cpu.dtb.fetch_acv 0 # ITB acv diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/config.ini b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/config.ini index 454f386da..055a078bd 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/config.ini +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,6 +14,7 @@ children=clk_domain cpu physmem ruby sys_port_proxy voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -68,24 +73,31 @@ icache_port=system.ruby.l1_cntrl0.sequencer.slave[0] [system.cpu.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu.dtb] type=AlphaTLB +eventq_index=0 size=64 [system.cpu.interrupts] type=AlphaInterrupts +eventq_index=0 [system.cpu.isa] type=AlphaISA +eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB +eventq_index=0 size=48 [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -95,7 +107,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/alpha/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/alpha/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -111,6 +124,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -119,18 +133,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 l2_cntrl0 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 l2_cntrl0 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=1 random_seed=1234 randomization=false -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -138,9 +156,10 @@ type=Directory_Controller children=directory memBuffer buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=2 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory directory_latency=6 +eventq_index=0 memBuffer=system.ruby.dir_cntrl0.memBuffer number_of_TBEs=256 peer=Null @@ -151,6 +170,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -167,6 +187,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -186,7 +207,8 @@ L1Dcache=system.ruby.l1_cntrl0.L1Dcache L1Icache=system.ruby.l1_cntrl0.L1Icache buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 +eventq_index=0 l2_select_num_bits=0 number_of_TBEs=256 peer=Null @@ -204,6 +226,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -218,6 +241,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=3 replacement_policy=PSEUDO_LRU @@ -233,6 +257,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.L1Dcache deadlock_threshold=500000 +eventq_index=0 icache=system.ruby.l1_cntrl0.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -250,7 +275,8 @@ children=L2cache L2cache=system.ruby.l2_cntrl0.L2cache buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 +eventq_index=0 number_of_TBEs=256 peer=Null recycle_latency=10 @@ -265,6 +291,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=15 replacement_policy=PSEUDO_LRU @@ -278,6 +305,7 @@ tagArrayBanks=1 type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -287,6 +315,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 system.ruby.network.ext_links2 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 system.ruby.network.int_links2 number_of_virtual_networks=10 @@ -297,6 +326,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers0 latency=1 @@ -306,6 +336,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l2_cntrl0 int_node=system.ruby.network.routers1 latency=1 @@ -315,6 +346,7 @@ weight=1 [system.ruby.network.ext_links2] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers2 latency=1 @@ -324,6 +356,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=3 node_a=system.ruby.network.routers0 @@ -333,6 +366,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=4 node_a=system.ruby.network.routers1 @@ -342,6 +376,7 @@ weight=1 [system.ruby.network.int_links2] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=5 node_a=system.ruby.network.routers2 @@ -351,38 +386,36 @@ weight=1 [system.ruby.network.routers0] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers1] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers2] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 [system.ruby.network.routers3] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=3 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=1 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -394,5 +427,6 @@ slave=system.system_port [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/simerr b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/simerr index bbc0c797e..86244d4bf 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/simerr +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/simerr @@ -3,4 +3,3 @@ warn: rounding error > tolerance warn: rounding error > tolerance 0.072760 rounded to 0 warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/simout b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/simout index 7aebf91e4..e44640397 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/simout +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA_MOESI_CMP_directory/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/simout -Redirecting stderr to build/ALPHA_MOESI_CMP_directory/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:36:12 -gem5 started Sep 22 2013 05:36:34 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:42:56 +gem5 started Jan 22 2014 17:26:22 +gem5 executing on u200540-lin command line: build/ALPHA_MOESI_CMP_directory/gem5.opt -d build/ALPHA_MOESI_CMP_directory/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MOESI_CMP_directory -re tests/run.py build/ALPHA_MOESI_CMP_directory/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MOESI_CMP_directory Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/stats.txt b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/stats.txt index 6769cc2eb..97b9e8b98 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/stats.txt +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_directory/stats.txt @@ -4,13 +4,16 @@ sim_seconds 0.000118 # Nu sim_ticks 117611 # Number of ticks simulated final_tick 117611 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_inst_rate 18637 # Simulator instruction rate (inst/s) -host_op_rate 18636 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 342978 # Simulator tick rate (ticks/s) -host_mem_usage 174220 # Number of bytes of host memory used -host_seconds 0.34 # Real time elapsed on the host +host_inst_rate 23182 # Simulator instruction rate (inst/s) +host_op_rate 23181 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 426626 # Simulator tick rate (ticks/s) +host_mem_usage 130676 # Number of bytes of host memory used +host_seconds 0.28 # Real time elapsed on the host sim_insts 6390 # Number of instructions simulated sim_ops 6390 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.outstanding_req_hist::bucket_size 1 system.ruby.outstanding_req_hist::max_bucket 9 system.ruby.outstanding_req_hist::samples 8449 @@ -82,6 +85,7 @@ system.ruby.network.routers1.msg_bytes.Writeback_Control::0 21664 system.ruby.network.routers1.msg_bytes.Writeback_Control::1 17488 system.ruby.network.routers1.msg_bytes.Writeback_Control::2 7192 system.ruby.network.routers1.msg_bytes.Unblock_Control::2 19768 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.dir_cntrl0.memBuffer.memReq 1303 # Total number of memory requests system.ruby.dir_cntrl0.memBuffer.memRead 1109 # Number of memory reads system.ruby.dir_cntrl0.memBuffer.memWrite 194 # Number of memory writes @@ -139,6 +143,7 @@ system.ruby.network.msg_byte.ResponseL2hit_Data 54648 system.ruby.network.msg_byte.Writeback_Data 334368 system.ruby.network.msg_byte.Writeback_Control 139032 system.ruby.network.msg_byte.Unblock_Control 59304 +system.cpu.clk_domain.clock 1 # Clock period in ticks system.cpu.dtb.fetch_hits 0 # ITB hits system.cpu.dtb.fetch_misses 0 # ITB misses system.cpu.dtb.fetch_acv 0 # ITB acv diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_token/config.ini b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_token/config.ini index 98cbeddd9..c83923549 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_token/config.ini +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_token/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,6 +14,7 @@ children=clk_domain cpu physmem ruby sys_port_proxy voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -68,24 +73,31 @@ icache_port=system.ruby.l1_cntrl0.sequencer.slave[0] [system.cpu.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu.dtb] type=AlphaTLB +eventq_index=0 size=64 [system.cpu.interrupts] type=AlphaInterrupts +eventq_index=0 [system.cpu.isa] type=AlphaISA +eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB +eventq_index=0 size=48 [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -95,7 +107,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/alpha/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/alpha/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -111,6 +124,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -119,18 +133,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 l2_cntrl0 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 l2_cntrl0 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=1 random_seed=1234 randomization=false -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -138,10 +156,11 @@ type=Directory_Controller children=directory memBuffer buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=2 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory directory_latency=5 distributed_persistent=true +eventq_index=0 fixed_timeout_latency=100 l2_select_num_bits=0 memBuffer=system.ruby.dir_cntrl0.memBuffer @@ -155,6 +174,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -171,6 +191,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -191,8 +212,9 @@ L1Icache=system.ruby.l1_cntrl0.L1Icache N_tokens=2 buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 dynamic_timeout_enabled=true +eventq_index=0 fixed_timeout_latency=300 l1_request_latency=2 l1_response_latency=2 @@ -215,6 +237,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -229,6 +252,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -244,6 +268,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.L1Dcache deadlock_threshold=500000 +eventq_index=0 icache=system.ruby.l1_cntrl0.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -262,7 +287,8 @@ L2cache=system.ruby.l2_cntrl0.L2cache N_tokens=2 buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 +eventq_index=0 filtering_enabled=true l2_request_latency=5 l2_response_latency=5 @@ -278,6 +304,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=10 replacement_policy=PSEUDO_LRU @@ -291,6 +318,7 @@ tagArrayBanks=1 type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -300,6 +328,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 system.ruby.network.ext_links2 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 system.ruby.network.int_links2 number_of_virtual_networks=10 @@ -310,6 +339,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers0 latency=1 @@ -319,6 +349,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l2_cntrl0 int_node=system.ruby.network.routers1 latency=1 @@ -328,6 +359,7 @@ weight=1 [system.ruby.network.ext_links2] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers2 latency=1 @@ -337,6 +369,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=3 node_a=system.ruby.network.routers0 @@ -346,6 +379,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=4 node_a=system.ruby.network.routers1 @@ -355,6 +389,7 @@ weight=1 [system.ruby.network.int_links2] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=5 node_a=system.ruby.network.routers2 @@ -364,38 +399,36 @@ weight=1 [system.ruby.network.routers0] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers1] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers2] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 [system.ruby.network.routers3] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=3 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=1 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -407,5 +440,6 @@ slave=system.system_port [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_token/simerr b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_token/simerr index bbc0c797e..86244d4bf 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_token/simerr +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_token/simerr @@ -3,4 +3,3 @@ warn: rounding error > tolerance warn: rounding error > tolerance 0.072760 rounded to 0 warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_token/simout b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_token/simout index 972ce6ed2..05cd140ea 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_token/simout +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_token/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA_MOESI_CMP_token/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MOESI_CMP_token/simout -Redirecting stderr to build/ALPHA_MOESI_CMP_token/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MOESI_CMP_token/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:44:48 -gem5 started Sep 22 2013 05:44:59 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:47:59 +gem5 started Jan 22 2014 17:27:26 +gem5 executing on u200540-lin command line: build/ALPHA_MOESI_CMP_token/gem5.opt -d build/ALPHA_MOESI_CMP_token/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MOESI_CMP_token -re tests/run.py build/ALPHA_MOESI_CMP_token/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MOESI_CMP_token Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_token/stats.txt b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_token/stats.txt index 5443611c7..47e7c5bb6 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_token/stats.txt +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_CMP_token/stats.txt @@ -4,13 +4,16 @@ sim_seconds 0.000114 # Nu sim_ticks 113627 # Number of ticks simulated final_tick 113627 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_inst_rate 15419 # Simulator instruction rate (inst/s) -host_op_rate 15419 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 274163 # Simulator tick rate (ticks/s) -host_mem_usage 171088 # Number of bytes of host memory used -host_seconds 0.41 # Real time elapsed on the host +host_inst_rate 25426 # Simulator instruction rate (inst/s) +host_op_rate 25424 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 452072 # Simulator tick rate (ticks/s) +host_mem_usage 127540 # Number of bytes of host memory used +host_seconds 0.25 # Real time elapsed on the host sim_insts 6390 # Number of instructions simulated sim_ops 6390 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.outstanding_req_hist::bucket_size 1 system.ruby.outstanding_req_hist::max_bucket 9 system.ruby.outstanding_req_hist::samples 8449 @@ -76,6 +79,7 @@ system.ruby.network.routers1.msg_bytes.ResponseL2hit_Data::4 14688 system.ruby.network.routers1.msg_bytes.Response_Control::4 8 system.ruby.network.routers1.msg_bytes.Writeback_Data::4 113976 system.ruby.network.routers1.msg_bytes.Writeback_Control::4 7736 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.dir_cntrl0.memBuffer.memReq 1407 # Total number of memory requests system.ruby.dir_cntrl0.memBuffer.memRead 1178 # Number of memory reads system.ruby.dir_cntrl0.memBuffer.memWrite 229 # Number of memory writes @@ -125,6 +129,7 @@ system.ruby.network.msg_byte.ResponseL2hit_Data 44064 system.ruby.network.msg_byte.Response_Control 24 system.ruby.network.msg_byte.Writeback_Data 341928 system.ruby.network.msg_byte.Writeback_Control 23208 +system.cpu.clk_domain.clock 1 # Clock period in ticks system.cpu.dtb.fetch_hits 0 # ITB hits system.cpu.dtb.fetch_misses 0 # ITB misses system.cpu.dtb.fetch_acv 0 # ITB acv diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_hammer/config.ini b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_hammer/config.ini index 5efa528b0..bbaaafb7c 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_hammer/config.ini +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_hammer/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,6 +14,7 @@ children=clk_domain cpu physmem ruby sys_port_proxy voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -68,24 +73,31 @@ icache_port=system.ruby.l1_cntrl0.sequencer.slave[0] [system.cpu.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu.dtb] type=AlphaTLB +eventq_index=0 size=64 [system.cpu.interrupts] type=AlphaInterrupts +eventq_index=0 [system.cpu.isa] type=AlphaISA +eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB +eventq_index=0 size=48 [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -95,7 +107,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/alpha/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/alpha/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -111,6 +124,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -119,18 +133,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=1 random_seed=1234 randomization=false -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -138,8 +156,9 @@ type=Directory_Controller children=directory memBuffer probeFilter buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory +eventq_index=0 full_bit_dir_enabled=false memBuffer=system.ruby.dir_cntrl0.memBuffer memory_controller_latency=2 @@ -154,6 +173,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -170,6 +190,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -187,6 +208,7 @@ type=RubyCache assoc=4 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=1 replacement_policy=PSEUDO_LRU @@ -205,7 +227,8 @@ L2cache=system.ruby.l1_cntrl0.L2cache buffer_size=0 cache_response_latency=10 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 +eventq_index=0 issue_latency=2 l2_cache_hit_latency=10 no_mig_atomic=true @@ -223,6 +246,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -237,6 +261,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=2 replacement_policy=PSEUDO_LRU @@ -251,6 +276,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=10 replacement_policy=PSEUDO_LRU @@ -266,6 +292,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.L1Dcache deadlock_threshold=500000 +eventq_index=0 icache=system.ruby.l1_cntrl0.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -281,6 +308,7 @@ slave=system.cpu.icache_port system.cpu.dcache_port type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -290,6 +318,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 number_of_virtual_networks=10 @@ -300,6 +329,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers0 latency=1 @@ -309,6 +339,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers1 latency=1 @@ -318,6 +349,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=2 node_a=system.ruby.network.routers0 @@ -327,6 +359,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=3 node_a=system.ruby.network.routers1 @@ -336,32 +369,29 @@ weight=1 [system.ruby.network.routers0] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers1] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers2] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=1 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -373,5 +403,6 @@ slave=system.system_port [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_hammer/simerr b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_hammer/simerr index bbc0c797e..86244d4bf 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_hammer/simerr +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_hammer/simerr @@ -3,4 +3,3 @@ warn: rounding error > tolerance warn: rounding error > tolerance 0.072760 rounded to 0 warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_hammer/simout b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_hammer/simout index 2f946fb64..74d9e5871 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_hammer/simout +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_hammer/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA_MOESI_hammer/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MOESI_hammer/simout -Redirecting stderr to build/ALPHA_MOESI_hammer/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MOESI_hammer/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:17:28 -gem5 started Sep 22 2013 05:18:00 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:32:54 +gem5 started Jan 22 2014 17:25:16 +gem5 executing on u200540-lin command line: build/ALPHA_MOESI_hammer/gem5.opt -d build/ALPHA_MOESI_hammer/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MOESI_hammer -re tests/run.py build/ALPHA_MOESI_hammer/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby-MOESI_hammer Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_hammer/stats.txt b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_hammer/stats.txt index da745542b..afdd49aff 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_hammer/stats.txt +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby-MOESI_hammer/stats.txt @@ -4,13 +4,16 @@ sim_seconds 0.000093 # Nu sim_ticks 93341 # Number of ticks simulated final_tick 93341 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_inst_rate 30230 # Simulator instruction rate (inst/s) -host_op_rate 30227 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 441501 # Simulator tick rate (ticks/s) -host_mem_usage 171020 # Number of bytes of host memory used -host_seconds 0.21 # Real time elapsed on the host +host_inst_rate 34391 # Simulator instruction rate (inst/s) +host_op_rate 34389 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 502293 # Simulator tick rate (ticks/s) +host_mem_usage 127476 # Number of bytes of host memory used +host_seconds 0.19 # Real time elapsed on the host sim_insts 6390 # Number of instructions simulated sim_ops 6390 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.outstanding_req_hist::bucket_size 1 system.ruby.outstanding_req_hist::max_bucket 9 system.ruby.outstanding_req_hist::samples 8449 @@ -43,6 +46,7 @@ system.ruby.miss_latency_hist::stdev 10.823033 system.ruby.miss_latency_hist | 0 0.00% 0.00% | 0 0.00% 0.00% | 0 0.00% 0.00% | 1098 94.74% 94.74% | 9 0.78% 95.51% | 24 2.07% 97.58% | 0 0.00% 97.58% | 27 2.33% 99.91% | 1 0.09% 100.00% | 0 0.00% 100.00% system.ruby.miss_latency_hist::total 1159 system.ruby.Directory.incomplete_times 1158 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.l1_cntrl0.L1Dcache.demand_hits 1332 # Number of cache demand hits system.ruby.l1_cntrl0.L1Dcache.demand_misses 716 # Number of cache demand misses system.ruby.l1_cntrl0.L1Dcache.demand_accesses 2048 # Number of cache demand accesses @@ -124,6 +128,7 @@ system.ruby.network.msg_byte.Response_Data 250344 system.ruby.network.msg_byte.Writeback_Data 47520 system.ruby.network.msg_byte.Writeback_Control 77016 system.ruby.network.msg_byte.Unblock_Control 27816 +system.cpu.clk_domain.clock 1 # Clock period in ticks system.cpu.dtb.fetch_hits 0 # ITB hits system.cpu.dtb.fetch_misses 0 # ITB misses system.cpu.dtb.fetch_acv 0 # ITB acv diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby/config.ini b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby/config.ini index 5c6bf177e..080d250b7 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby/config.ini +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,6 +14,7 @@ children=clk_domain cpu physmem ruby sys_port_proxy voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -68,24 +73,31 @@ icache_port=system.ruby.l1_cntrl0.sequencer.slave[0] [system.cpu.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu.dtb] type=AlphaTLB +eventq_index=0 size=64 [system.cpu.interrupts] type=AlphaInterrupts +eventq_index=0 [system.cpu.isa] type=AlphaISA +eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB +eventq_index=0 size=48 [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -95,7 +107,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/alpha/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/alpha/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -111,6 +124,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -119,18 +133,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=1 random_seed=1234 randomization=false -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -138,9 +156,10 @@ type=Directory_Controller children=directory memBuffer buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory directory_latency=12 +eventq_index=0 memBuffer=system.ruby.dir_cntrl0.memBuffer number_of_TBEs=256 peer=Null @@ -151,6 +170,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -167,6 +187,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -186,7 +207,8 @@ buffer_size=0 cacheMemory=system.ruby.l1_cntrl0.cacheMemory cache_response_latency=12 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 +eventq_index=0 issue_latency=2 number_of_TBEs=256 peer=Null @@ -202,6 +224,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -217,6 +240,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.cacheMemory deadlock_threshold=500000 +eventq_index=0 icache=system.ruby.l1_cntrl0.cacheMemory max_outstanding_requests=16 ruby_system=system.ruby @@ -232,6 +256,7 @@ slave=system.cpu.icache_port system.cpu.dcache_port type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -241,6 +266,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 number_of_virtual_networks=10 @@ -251,6 +277,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers0 latency=1 @@ -260,6 +287,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers1 latency=1 @@ -269,6 +297,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=2 node_a=system.ruby.network.routers0 @@ -278,6 +307,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=3 node_a=system.ruby.network.routers1 @@ -287,32 +317,29 @@ weight=1 [system.ruby.network.routers0] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers1] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers2] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=1 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -324,5 +351,6 @@ slave=system.system_port [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby/simerr b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby/simerr index bbc0c797e..86244d4bf 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby/simerr +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby/simerr @@ -3,4 +3,3 @@ warn: rounding error > tolerance warn: rounding error > tolerance 0.072760 rounded to 0 warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby/simout b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby/simout index cedef1822..e7d414efc 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby/simout +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby/simout -Redirecting stderr to build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 24 2013 03:08:53 -gem5 started Sep 28 2013 03:05:27 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:27:55 +gem5 started Jan 22 2014 17:24:20 +gem5 executing on u200540-lin command line: build/ALPHA/gem5.opt -d build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby -re tests/run.py build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/simple-timing-ruby Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby/stats.txt b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby/stats.txt index 6d4e698a8..19e4fff41 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby/stats.txt +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing-ruby/stats.txt @@ -4,13 +4,16 @@ sim_seconds 0.000144 # Nu sim_ticks 143853 # Number of ticks simulated final_tick 143853 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_inst_rate 26416 # Simulator instruction rate (inst/s) -host_op_rate 26414 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 594577 # Simulator tick rate (ticks/s) -host_mem_usage 170576 # Number of bytes of host memory used -host_seconds 0.24 # Real time elapsed on the host +host_inst_rate 41580 # Simulator instruction rate (inst/s) +host_op_rate 41576 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 935887 # Simulator tick rate (ticks/s) +host_mem_usage 126996 # Number of bytes of host memory used +host_seconds 0.15 # Real time elapsed on the host sim_insts 6390 # Number of instructions simulated sim_ops 6390 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.delayHist::bucket_size 1 # delay histogram for all message system.ruby.delayHist::max_bucket 9 # delay histogram for all message system.ruby.delayHist::samples 3456 # delay histogram for all message @@ -47,6 +50,7 @@ system.ruby.miss_latency_hist::stdev 7.725779 system.ruby.miss_latency_hist | 0 0.00% 0.00% | 0 0.00% 0.00% | 0 0.00% 0.00% | 336 19.42% 19.42% | 1251 72.31% 91.73% | 136 7.86% 99.60% | 5 0.29% 99.88% | 2 0.12% 100.00% | 0 0.00% 100.00% | 0 0.00% 100.00% system.ruby.miss_latency_hist::total 1730 system.ruby.Directory.incomplete_times 1729 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.l1_cntrl0.cacheMemory.demand_hits 6718 # Number of cache demand hits system.ruby.l1_cntrl0.cacheMemory.demand_misses 1730 # Number of cache demand misses system.ruby.l1_cntrl0.cacheMemory.demand_accesses 8448 # Number of cache demand accesses @@ -99,6 +103,7 @@ system.ruby.network.msg_byte.Control 41520 system.ruby.network.msg_byte.Data 372816 system.ruby.network.msg_byte.Response_Data 373680 system.ruby.network.msg_byte.Writeback_Control 41424 +system.cpu.clk_domain.clock 1 # Clock period in ticks system.cpu.dtb.fetch_hits 0 # ITB hits system.cpu.dtb.fetch_misses 0 # ITB misses system.cpu.dtb.fetch_acv 0 # ITB acv diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing/config.ini b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing/config.ini index 595a8159f..b0e615a7c 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing/config.ini +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -71,6 +76,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -79,6 +85,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -93,11 +100,14 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] type=AlphaTLB +eventq_index=0 size=64 [system.cpu.icache] @@ -106,6 +116,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -114,6 +125,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -128,17 +140,23 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] type=AlphaInterrupts +eventq_index=0 [system.cpu.isa] type=AlphaISA +eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB +eventq_index=0 size=48 [system.cpu.l2cache] @@ -147,6 +165,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -155,6 +174,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -169,12 +189,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -184,6 +207,7 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -193,7 +217,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/alpha/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/alpha/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -207,11 +232,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -224,6 +251,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -233,5 +261,6 @@ port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing/simerr b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing/simerr +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing/simout b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing/simout index b5f87b785..03ecf7225 100755 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing/simout +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/simple-timing/simout -Redirecting stderr to build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/simple-timing/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 24 2013 03:08:53 -gem5 started Sep 28 2013 03:05:26 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:27:55 +gem5 started Jan 22 2014 17:24:16 +gem5 executing on u200540-lin command line: build/ALPHA/gem5.opt -d build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/simple-timing -re tests/run.py build/ALPHA/tests/opt/quick/se/00.hello/alpha/linux/simple-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing/stats.txt b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing/stats.txt index 6038d0a3c..84f056acc 100644 --- a/tests/quick/se/00.hello/ref/alpha/linux/simple-timing/stats.txt +++ b/tests/quick/se/00.hello/ref/alpha/linux/simple-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000033 # Nu sim_ticks 32544000 # Number of ticks simulated final_tick 32544000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 27670 # Simulator instruction rate (inst/s) -host_op_rate 27667 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 140894748 # Simulator tick rate (ticks/s) -host_mem_usage 224272 # Number of bytes of host memory used -host_seconds 0.23 # Real time elapsed on the host +host_inst_rate 61527 # Simulator instruction rate (inst/s) +host_op_rate 61510 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 313188739 # Simulator tick rate (ticks/s) +host_mem_usage 228704 # Number of bytes of host memory used +host_seconds 0.10 # Real time elapsed on the host sim_insts 6390 # Number of instructions simulated sim_ops 6390 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 17792 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 10752 # Number of bytes read from this memory system.physmem.bytes_read::total 28544 # Number of bytes read from this memory @@ -42,6 +44,7 @@ system.membus.reqLayer0.occupancy 446000 # La system.membus.reqLayer0.utilization 1.4 # Layer utilization (%) system.membus.respLayer1.occupancy 4014000 # Layer occupancy (ticks) system.membus.respLayer1.utilization 12.3 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.dtb.fetch_hits 0 # ITB hits system.cpu.dtb.fetch_misses 0 # ITB misses system.cpu.dtb.fetch_acv 0 # ITB acv @@ -106,6 +109,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 127.998991 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.062500 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.062500 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 279 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 103 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 176 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.136230 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 13081 # Number of tag accesses +system.cpu.icache.tags.data_accesses 13081 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 6122 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 6122 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 6122 # number of demand (read+write) hits @@ -186,6 +195,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 56.479444 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.003907 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.001724 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.005630 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 373 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 122 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 251 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.011383 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 4022 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 4022 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 1 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 1 # number of ReadReq hits system.cpu.l2cache.demand_hits::cpu.inst 1 # number of demand (read+write) hits @@ -309,6 +324,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 103.762109 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.025333 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.025333 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 168 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 27 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 141 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.041016 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 4264 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 4264 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 1088 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 1088 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 792 # number of WriteReq hits diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/o3-timing/config.ini b/tests/quick/se/00.hello/ref/alpha/tru64/o3-timing/config.ini index b9dbe7d51..15208c06e 100644 --- a/tests/quick/se/00.hello/ref/alpha/tru64/o3-timing/config.ini +++ b/tests/quick/se/00.hello/ref/alpha/tru64/o3-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -64,6 +68,8 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 +fetchBufferSize=64 fetchToDecodeDelay=1 fetchTrapLatency=1 fetchWidth=8 @@ -128,6 +134,7 @@ BTBTagSize=16 RASSize=16 choiceCtrBits=2 choicePredictorSize=8192 +eventq_index=0 globalCtrBits=2 globalPredictorSize=8192 instShiftAmt=2 @@ -143,6 +150,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -151,6 +159,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -165,26 +174,32 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] type=AlphaTLB +eventq_index=0 size=64 [system.cpu.fuPool] type=FUPool children=FUList0 FUList1 FUList2 FUList3 FUList4 FUList5 FUList6 FUList7 FUList8 FUList=system.cpu.fuPool.FUList0 system.cpu.fuPool.FUList1 system.cpu.fuPool.FUList2 system.cpu.fuPool.FUList3 system.cpu.fuPool.FUList4 system.cpu.fuPool.FUList5 system.cpu.fuPool.FUList6 system.cpu.fuPool.FUList7 system.cpu.fuPool.FUList8 +eventq_index=0 [system.cpu.fuPool.FUList0] type=FUDesc children=opList count=6 +eventq_index=0 opList=system.cpu.fuPool.FUList0.opList [system.cpu.fuPool.FUList0.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=IntAlu opLat=1 @@ -193,16 +208,19 @@ opLat=1 type=FUDesc children=opList0 opList1 count=2 +eventq_index=0 opList=system.cpu.fuPool.FUList1.opList0 system.cpu.fuPool.FUList1.opList1 [system.cpu.fuPool.FUList1.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=IntMult opLat=3 [system.cpu.fuPool.FUList1.opList1] type=OpDesc +eventq_index=0 issueLat=19 opClass=IntDiv opLat=20 @@ -211,22 +229,26 @@ opLat=20 type=FUDesc children=opList0 opList1 opList2 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList2.opList0 system.cpu.fuPool.FUList2.opList1 system.cpu.fuPool.FUList2.opList2 [system.cpu.fuPool.FUList2.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatAdd opLat=2 [system.cpu.fuPool.FUList2.opList1] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatCmp opLat=2 [system.cpu.fuPool.FUList2.opList2] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatCvt opLat=2 @@ -235,22 +257,26 @@ opLat=2 type=FUDesc children=opList0 opList1 opList2 count=2 +eventq_index=0 opList=system.cpu.fuPool.FUList3.opList0 system.cpu.fuPool.FUList3.opList1 system.cpu.fuPool.FUList3.opList2 [system.cpu.fuPool.FUList3.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatMult opLat=4 [system.cpu.fuPool.FUList3.opList1] type=OpDesc +eventq_index=0 issueLat=12 opClass=FloatDiv opLat=12 [system.cpu.fuPool.FUList3.opList2] type=OpDesc +eventq_index=0 issueLat=24 opClass=FloatSqrt opLat=24 @@ -259,10 +285,12 @@ opLat=24 type=FUDesc children=opList count=0 +eventq_index=0 opList=system.cpu.fuPool.FUList4.opList [system.cpu.fuPool.FUList4.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemRead opLat=1 @@ -271,124 +299,145 @@ opLat=1 type=FUDesc children=opList00 opList01 opList02 opList03 opList04 opList05 opList06 opList07 opList08 opList09 opList10 opList11 opList12 opList13 opList14 opList15 opList16 opList17 opList18 opList19 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList5.opList00 system.cpu.fuPool.FUList5.opList01 system.cpu.fuPool.FUList5.opList02 system.cpu.fuPool.FUList5.opList03 system.cpu.fuPool.FUList5.opList04 system.cpu.fuPool.FUList5.opList05 system.cpu.fuPool.FUList5.opList06 system.cpu.fuPool.FUList5.opList07 system.cpu.fuPool.FUList5.opList08 system.cpu.fuPool.FUList5.opList09 system.cpu.fuPool.FUList5.opList10 system.cpu.fuPool.FUList5.opList11 system.cpu.fuPool.FUList5.opList12 system.cpu.fuPool.FUList5.opList13 system.cpu.fuPool.FUList5.opList14 system.cpu.fuPool.FUList5.opList15 system.cpu.fuPool.FUList5.opList16 system.cpu.fuPool.FUList5.opList17 system.cpu.fuPool.FUList5.opList18 system.cpu.fuPool.FUList5.opList19 [system.cpu.fuPool.FUList5.opList00] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAdd opLat=1 [system.cpu.fuPool.FUList5.opList01] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAddAcc opLat=1 [system.cpu.fuPool.FUList5.opList02] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAlu opLat=1 [system.cpu.fuPool.FUList5.opList03] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdCmp opLat=1 [system.cpu.fuPool.FUList5.opList04] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdCvt opLat=1 [system.cpu.fuPool.FUList5.opList05] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMisc opLat=1 [system.cpu.fuPool.FUList5.opList06] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMult opLat=1 [system.cpu.fuPool.FUList5.opList07] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMultAcc opLat=1 [system.cpu.fuPool.FUList5.opList08] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdShift opLat=1 [system.cpu.fuPool.FUList5.opList09] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdShiftAcc opLat=1 [system.cpu.fuPool.FUList5.opList10] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdSqrt opLat=1 [system.cpu.fuPool.FUList5.opList11] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatAdd opLat=1 [system.cpu.fuPool.FUList5.opList12] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatAlu opLat=1 [system.cpu.fuPool.FUList5.opList13] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatCmp opLat=1 [system.cpu.fuPool.FUList5.opList14] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatCvt opLat=1 [system.cpu.fuPool.FUList5.opList15] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatDiv opLat=1 [system.cpu.fuPool.FUList5.opList16] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMisc opLat=1 [system.cpu.fuPool.FUList5.opList17] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMult opLat=1 [system.cpu.fuPool.FUList5.opList18] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMultAcc opLat=1 [system.cpu.fuPool.FUList5.opList19] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatSqrt opLat=1 @@ -397,10 +446,12 @@ opLat=1 type=FUDesc children=opList count=0 +eventq_index=0 opList=system.cpu.fuPool.FUList6.opList [system.cpu.fuPool.FUList6.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemWrite opLat=1 @@ -409,16 +460,19 @@ opLat=1 type=FUDesc children=opList0 opList1 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList7.opList0 system.cpu.fuPool.FUList7.opList1 [system.cpu.fuPool.FUList7.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemRead opLat=1 [system.cpu.fuPool.FUList7.opList1] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemWrite opLat=1 @@ -427,10 +481,12 @@ opLat=1 type=FUDesc children=opList count=1 +eventq_index=0 opList=system.cpu.fuPool.FUList8.opList [system.cpu.fuPool.FUList8.opList] type=OpDesc +eventq_index=0 issueLat=3 opClass=IprAccess opLat=3 @@ -441,6 +497,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -449,6 +506,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -463,17 +521,23 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] type=AlphaInterrupts +eventq_index=0 [system.cpu.isa] type=AlphaISA +eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB +eventq_index=0 size=48 [system.cpu.l2cache] @@ -482,6 +546,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -490,6 +555,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -504,12 +570,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -519,6 +588,7 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -528,7 +598,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/alpha/tru64/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/alpha/tru64/hello gid=100 input=cin max_stack_size=67108864 @@ -542,11 +613,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -566,6 +639,7 @@ conf_table_reported=true device_bus_width=8 device_rowbuffer_size=1024 devices_per_rank=8 +eventq_index=0 in_addr_map=true mem_sched_policy=frfcfs null=false @@ -577,17 +651,21 @@ static_backend_latency=10000 static_frontend_latency=10000 tBURST=5000 tCL=13750 +tRAS=35000 tRCD=13750 tREFI=7800000 tRFC=300000 tRP=13750 +tRRD=6250 tWTR=7500 tXAW=40000 write_buffer_size=32 -write_thresh_perc=70 +write_high_thresh_perc=70 +write_low_thresh_perc=0 port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/o3-timing/simerr b/tests/quick/se/00.hello/ref/alpha/tru64/o3-timing/simerr index 27f858d8f..62976a831 100755 --- a/tests/quick/se/00.hello/ref/alpha/tru64/o3-timing/simerr +++ b/tests/quick/se/00.hello/ref/alpha/tru64/o3-timing/simerr @@ -1,4 +1,3 @@ warn: Sockets disabled, not accepting gdb connections warn: Prefetch instructions in Alpha do not do anything warn: ignoring syscall sigprocmask(18446744073709547831, 1, ...) -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/o3-timing/simout b/tests/quick/se/00.hello/ref/alpha/tru64/o3-timing/simout index 4cf5ca9ef..da1484dec 100755 --- a/tests/quick/se/00.hello/ref/alpha/tru64/o3-timing/simout +++ b/tests/quick/se/00.hello/ref/alpha/tru64/o3-timing/simout @@ -1,12 +1,12 @@ gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Oct 15 2013 18:24:51 -gem5 started Oct 16 2013 01:34:33 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:27:55 +gem5 started Jan 22 2014 17:24:20 +gem5 executing on u200540-lin command line: build/ALPHA/gem5.opt -d build/ALPHA/tests/opt/quick/se/00.hello/alpha/tru64/o3-timing -re tests/run.py build/ALPHA/tests/opt/quick/se/00.hello/alpha/tru64/o3-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... info: Increasing stack size by one page. Hello world! -Exiting @ tick 11933500 because target called exit() +Exiting @ tick 11990500 because target called exit() diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/o3-timing/stats.txt b/tests/quick/se/00.hello/ref/alpha/tru64/o3-timing/stats.txt index 5e19e4b84..baea5f5eb 100644 --- a/tests/quick/se/00.hello/ref/alpha/tru64/o3-timing/stats.txt +++ b/tests/quick/se/00.hello/ref/alpha/tru64/o3-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000012 # Nu sim_ticks 11990500 # Number of ticks simulated final_tick 11990500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 47015 # Simulator instruction rate (inst/s) -host_op_rate 46988 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 235942636 # Simulator tick rate (ticks/s) -host_mem_usage 225832 # Number of bytes of host memory used -host_seconds 0.05 # Real time elapsed on the host +host_inst_rate 21306 # Simulator instruction rate (inst/s) +host_op_rate 21301 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 106974940 # Simulator tick rate (ticks/s) +host_mem_usage 229436 # Number of bytes of host memory used +host_seconds 0.11 # Real time elapsed on the host sim_insts 2387 # Number of instructions simulated sim_ops 2387 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 12032 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 5440 # Number of bytes read from this memory system.physmem.bytes_read::total 17472 # Number of bytes read from this memory @@ -211,6 +213,7 @@ system.membus.reqLayer0.occupancy 344000 # La system.membus.reqLayer0.utilization 2.9 # Layer utilization (%) system.membus.respLayer1.occupancy 2551500 # Layer occupancy (ticks) system.membus.respLayer1.utilization 21.3 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.branchPred.lookups 1176 # Number of BP lookups system.cpu.branchPred.condPredicted 619 # Number of conditional branches predicted system.cpu.branchPred.condIncorrect 258 # Number of conditional branches incorrect @@ -541,6 +544,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 93.236237 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.045526 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.045526 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 188 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 161 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 27 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.091797 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 2318 # Number of tag accesses +system.cpu.icache.tags.data_accesses 2318 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 815 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 815 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 815 # number of demand (read+write) hits @@ -627,6 +636,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 28.688277 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.002851 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.000875 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.003727 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 249 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 211 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 38 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.007599 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 2457 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 2457 # Number of data accesses system.cpu.l2cache.ReadReq_misses::cpu.inst 188 # number of ReadReq misses system.cpu.l2cache.ReadReq_misses::cpu.data 61 # number of ReadReq misses system.cpu.l2cache.ReadReq_misses::total 249 # number of ReadReq misses @@ -744,6 +759,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 45.667407 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.011149 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.011149 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 85 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 66 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 19 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.020752 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 1989 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 1989 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 545 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 545 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 213 # number of WriteReq hits diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-atomic/config.ini b/tests/quick/se/00.hello/ref/alpha/tru64/simple-atomic/config.ini index b66459c3a..aca9f495e 100644 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-atomic/config.ini +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-atomic/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 fastmem=false function_trace=false function_trace_start=0 @@ -74,20 +79,26 @@ icache_port=system.membus.slave[1] [system.cpu.dtb] type=AlphaTLB +eventq_index=0 size=64 [system.cpu.interrupts] type=AlphaInterrupts +eventq_index=0 [system.cpu.isa] type=AlphaISA +eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB +eventq_index=0 size=48 [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -97,7 +108,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/alpha/tru64/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/alpha/tru64/hello gid=100 input=cin max_stack_size=67108864 @@ -111,11 +123,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -128,6 +142,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -137,5 +152,6 @@ port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-atomic/simerr b/tests/quick/se/00.hello/ref/alpha/tru64/simple-atomic/simerr index bcbfa5445..32998f270 100755 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-atomic/simerr +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-atomic/simerr @@ -1,4 +1,2 @@ -warn: CoherentBus system.membus has no snooping ports attached! warn: Sockets disabled, not accepting gdb connections warn: ignoring syscall sigprocmask(18446744073709547831, 1, ...) -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-atomic/simout b/tests/quick/se/00.hello/ref/alpha/tru64/simple-atomic/simout index 034bc5823..33ba2e738 100755 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-atomic/simout +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-atomic/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA/tests/opt/quick/se/00.hello/alpha/tru64/simple-atomic/simout -Redirecting stderr to build/ALPHA/tests/opt/quick/se/00.hello/alpha/tru64/simple-atomic/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 24 2013 03:08:53 -gem5 started Sep 28 2013 03:05:27 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:27:55 +gem5 started Jan 22 2014 17:24:20 +gem5 executing on u200540-lin command line: build/ALPHA/gem5.opt -d build/ALPHA/tests/opt/quick/se/00.hello/alpha/tru64/simple-atomic -re tests/run.py build/ALPHA/tests/opt/quick/se/00.hello/alpha/tru64/simple-atomic Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-atomic/stats.txt b/tests/quick/se/00.hello/ref/alpha/tru64/simple-atomic/stats.txt index aec79b975..04acc5c7e 100644 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-atomic/stats.txt +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-atomic/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000001 # Nu sim_ticks 1297500 # Number of ticks simulated final_tick 1297500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 388869 # Simulator instruction rate (inst/s) -host_op_rate 388153 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 195100518 # Simulator tick rate (ticks/s) -host_mem_usage 215488 # Number of bytes of host memory used -host_seconds 0.01 # Real time elapsed on the host +host_inst_rate 31206 # Simulator instruction rate (inst/s) +host_op_rate 31196 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 15701703 # Simulator tick rate (ticks/s) +host_mem_usage 219708 # Number of bytes of host memory used +host_seconds 0.08 # Real time elapsed on the host sim_insts 2577 # Number of instructions simulated sim_ops 2577 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 10340 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 3016 # Number of bytes read from this memory system.physmem.bytes_read::total 13356 # Number of bytes read from this memory @@ -36,6 +38,7 @@ system.physmem.bw_total::total 11879768786 # To system.membus.throughput 11879768786 # Throughput (bytes/s) system.membus.data_through_bus 15414 # Total data (bytes) system.membus.snoop_data_through_bus 0 # Total snoop data (bytes) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.dtb.fetch_hits 0 # ITB hits system.cpu.dtb.fetch_misses 0 # ITB misses system.cpu.dtb.fetch_acv 0 # ITB acv diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MESI_Two_Level/config.ini b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MESI_Two_Level/config.ini index f2dc4f3e0..8168c285c 100644 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MESI_Two_Level/config.ini +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MESI_Two_Level/config.ini @@ -88,6 +88,7 @@ eventq_index=0 [system.cpu.isa] type=AlphaISA eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB @@ -107,7 +108,7 @@ env= errout=cerr euid=100 eventq_index=0 -executable=tests/test-progs/hello/bin/alpha/tru64/hello +executable=/dist/test-progs/hello/bin/alpha/tru64/hello gid=100 input=cin max_stack_size=67108864 diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MESI_Two_Level/simerr b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MESI_Two_Level/simerr index 492f3e68f..a30a2a95c 100755 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MESI_Two_Level/simerr +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MESI_Two_Level/simerr @@ -4,4 +4,3 @@ warn: rounding error > tolerance 0.072760 rounded to 0 warn: Sockets disabled, not accepting gdb connections warn: ignoring syscall sigprocmask(18446744073709547831, 1, ...) -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MESI_Two_Level/simout b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MESI_Two_Level/simout index 5722711d2..f35dc8674 100755 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MESI_Two_Level/simout +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MESI_Two_Level/simout @@ -1,14 +1,12 @@ -Redirecting stdout to build/ALPHA_MESI_CMP_directory/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MESI_CMP_directory/simout -Redirecting stderr to build/ALPHA_MESI_CMP_directory/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MESI_CMP_directory/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:27:02 -gem5 started Sep 22 2013 05:27:13 -gem5 executing on zizzer -command line: build/ALPHA_MESI_CMP_directory/gem5.opt -d build/ALPHA_MESI_CMP_directory/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MESI_CMP_directory -re tests/run.py build/ALPHA_MESI_CMP_directory/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MESI_CMP_directory +gem5 compiled Jan 22 2014 16:37:52 +gem5 started Jan 22 2014 17:26:00 +gem5 executing on u200540-lin +command line: build/ALPHA_MESI_Two_Level/gem5.opt -d build/ALPHA_MESI_Two_Level/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MESI_Two_Level -re tests/run.py build/ALPHA_MESI_Two_Level/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MESI_Two_Level Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... info: Increasing stack size by one page. Hello world! -Exiting @ tick 52575 because target called exit() +Exiting @ tick 52548 because target called exit() diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MESI_Two_Level/stats.txt b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MESI_Two_Level/stats.txt index 1d9a45506..96547c7d5 100644 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MESI_Two_Level/stats.txt +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MESI_Two_Level/stats.txt @@ -4,13 +4,16 @@ sim_seconds 0.000053 # Nu sim_ticks 52548 # Number of ticks simulated final_tick 52548 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_inst_rate 16682 # Simulator instruction rate (inst/s) -host_op_rate 16680 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 340078 # Simulator tick rate (ticks/s) -host_mem_usage 169540 # Number of bytes of host memory used -host_seconds 0.15 # Real time elapsed on the host +host_inst_rate 25744 # Simulator instruction rate (inst/s) +host_op_rate 25740 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 524809 # Simulator tick rate (ticks/s) +host_mem_usage 124924 # Number of bytes of host memory used +host_seconds 0.10 # Real time elapsed on the host sim_insts 2577 # Number of instructions simulated sim_ops 2577 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.delayHist::bucket_size 1 # delay histogram for all message system.ruby.delayHist::max_bucket 9 # delay histogram for all message system.ruby.delayHist::samples 3612 # delay histogram for all message @@ -100,6 +103,7 @@ system.ruby.network.routers1.msg_bytes.Response_Control::2 2176 system.ruby.network.routers1.msg_bytes.Writeback_Data::0 3240 system.ruby.network.routers1.msg_bytes.Writeback_Data::1 4464 system.ruby.network.routers1.msg_bytes.Writeback_Control::0 632 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.dir_cntrl0.memBuffer.memReq 650 # Total number of memory requests system.ruby.dir_cntrl0.memBuffer.memRead 547 # Number of memory reads system.ruby.dir_cntrl0.memBuffer.memWrite 103 # Number of memory writes @@ -148,6 +152,7 @@ system.ruby.network.msg_byte.Response_Data 263952 system.ruby.network.msg_byte.Response_Control 41760 system.ruby.network.msg_byte.Writeback_Data 23112 system.ruby.network.msg_byte.Writeback_Control 1896 +system.cpu.clk_domain.clock 1 # Clock period in ticks system.cpu.dtb.fetch_hits 0 # ITB hits system.cpu.dtb.fetch_misses 0 # ITB misses system.cpu.dtb.fetch_acv 0 # ITB acv diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/config.ini b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/config.ini index 1cc47929f..647bb1e23 100644 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/config.ini +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,6 +14,7 @@ children=clk_domain cpu physmem ruby sys_port_proxy voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -68,24 +73,31 @@ icache_port=system.ruby.l1_cntrl0.sequencer.slave[0] [system.cpu.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu.dtb] type=AlphaTLB +eventq_index=0 size=64 [system.cpu.interrupts] type=AlphaInterrupts +eventq_index=0 [system.cpu.isa] type=AlphaISA +eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB +eventq_index=0 size=48 [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -95,7 +107,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/alpha/tru64/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/alpha/tru64/hello gid=100 input=cin max_stack_size=67108864 @@ -111,6 +124,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -119,18 +133,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 l2_cntrl0 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 l2_cntrl0 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=1 random_seed=1234 randomization=false -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -138,9 +156,10 @@ type=Directory_Controller children=directory memBuffer buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=2 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory directory_latency=6 +eventq_index=0 memBuffer=system.ruby.dir_cntrl0.memBuffer number_of_TBEs=256 peer=Null @@ -151,6 +170,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -167,6 +187,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -186,7 +207,8 @@ L1Dcache=system.ruby.l1_cntrl0.L1Dcache L1Icache=system.ruby.l1_cntrl0.L1Icache buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 +eventq_index=0 l2_select_num_bits=0 number_of_TBEs=256 peer=Null @@ -204,6 +226,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -218,6 +241,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=3 replacement_policy=PSEUDO_LRU @@ -233,6 +257,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.L1Dcache deadlock_threshold=500000 +eventq_index=0 icache=system.ruby.l1_cntrl0.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -250,7 +275,8 @@ children=L2cache L2cache=system.ruby.l2_cntrl0.L2cache buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 +eventq_index=0 number_of_TBEs=256 peer=Null recycle_latency=10 @@ -265,6 +291,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=15 replacement_policy=PSEUDO_LRU @@ -278,6 +305,7 @@ tagArrayBanks=1 type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -287,6 +315,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 system.ruby.network.ext_links2 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 system.ruby.network.int_links2 number_of_virtual_networks=10 @@ -297,6 +326,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers0 latency=1 @@ -306,6 +336,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l2_cntrl0 int_node=system.ruby.network.routers1 latency=1 @@ -315,6 +346,7 @@ weight=1 [system.ruby.network.ext_links2] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers2 latency=1 @@ -324,6 +356,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=3 node_a=system.ruby.network.routers0 @@ -333,6 +366,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=4 node_a=system.ruby.network.routers1 @@ -342,6 +376,7 @@ weight=1 [system.ruby.network.int_links2] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=5 node_a=system.ruby.network.routers2 @@ -351,38 +386,36 @@ weight=1 [system.ruby.network.routers0] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers1] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers2] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 [system.ruby.network.routers3] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=3 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=1 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -394,5 +427,6 @@ slave=system.system_port [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/simerr b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/simerr index 492f3e68f..a30a2a95c 100755 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/simerr +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/simerr @@ -4,4 +4,3 @@ warn: rounding error > tolerance 0.072760 rounded to 0 warn: Sockets disabled, not accepting gdb connections warn: ignoring syscall sigprocmask(18446744073709547831, 1, ...) -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/simout b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/simout index e2683dd74..c37233c6d 100755 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/simout +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA_MOESI_CMP_directory/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/simout -Redirecting stderr to build/ALPHA_MOESI_CMP_directory/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:36:12 -gem5 started Sep 22 2013 05:36:23 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:42:56 +gem5 started Jan 22 2014 17:26:33 +gem5 executing on u200540-lin command line: build/ALPHA_MOESI_CMP_directory/gem5.opt -d build/ALPHA_MOESI_CMP_directory/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory -re tests/run.py build/ALPHA_MOESI_CMP_directory/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/stats.txt b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/stats.txt index 5ece97b1b..b3553454d 100644 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/stats.txt +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_directory/stats.txt @@ -4,13 +4,16 @@ sim_seconds 0.000045 # Nu sim_ticks 44968 # Number of ticks simulated final_tick 44968 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_inst_rate 16150 # Simulator instruction rate (inst/s) -host_op_rate 16148 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 281738 # Simulator tick rate (ticks/s) -host_mem_usage 171884 # Number of bytes of host memory used -host_seconds 0.16 # Real time elapsed on the host +host_inst_rate 17948 # Simulator instruction rate (inst/s) +host_op_rate 17946 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 313128 # Simulator tick rate (ticks/s) +host_mem_usage 128348 # Number of bytes of host memory used +host_seconds 0.14 # Real time elapsed on the host sim_insts 2577 # Number of instructions simulated sim_ops 2577 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.outstanding_req_hist::bucket_size 1 system.ruby.outstanding_req_hist::max_bucket 9 system.ruby.outstanding_req_hist::samples 3295 @@ -82,6 +85,7 @@ system.ruby.network.routers1.msg_bytes.Writeback_Control::0 8032 system.ruby.network.routers1.msg_bytes.Writeback_Control::1 6512 system.ruby.network.routers1.msg_bytes.Writeback_Control::2 2648 system.ruby.network.routers1.msg_bytes.Unblock_Control::2 7464 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.dir_cntrl0.memBuffer.memReq 499 # Total number of memory requests system.ruby.dir_cntrl0.memBuffer.memRead 423 # Number of memory reads system.ruby.dir_cntrl0.memBuffer.memWrite 76 # Number of memory writes @@ -139,6 +143,7 @@ system.ruby.network.msg_byte.ResponseL2hit_Data 18792 system.ruby.network.msg_byte.Writeback_Data 124848 system.ruby.network.msg_byte.Writeback_Control 51576 system.ruby.network.msg_byte.Unblock_Control 22384 +system.cpu.clk_domain.clock 1 # Clock period in ticks system.cpu.dtb.fetch_hits 0 # ITB hits system.cpu.dtb.fetch_misses 0 # ITB misses system.cpu.dtb.fetch_acv 0 # ITB acv diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/config.ini b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/config.ini index 57448e3a7..2bf0001da 100644 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/config.ini +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,6 +14,7 @@ children=clk_domain cpu physmem ruby sys_port_proxy voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -68,24 +73,31 @@ icache_port=system.ruby.l1_cntrl0.sequencer.slave[0] [system.cpu.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu.dtb] type=AlphaTLB +eventq_index=0 size=64 [system.cpu.interrupts] type=AlphaInterrupts +eventq_index=0 [system.cpu.isa] type=AlphaISA +eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB +eventq_index=0 size=48 [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -95,7 +107,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/alpha/tru64/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/alpha/tru64/hello gid=100 input=cin max_stack_size=67108864 @@ -111,6 +124,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -119,18 +133,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 l2_cntrl0 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 l2_cntrl0 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=1 random_seed=1234 randomization=false -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -138,10 +156,11 @@ type=Directory_Controller children=directory memBuffer buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=2 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory directory_latency=5 distributed_persistent=true +eventq_index=0 fixed_timeout_latency=100 l2_select_num_bits=0 memBuffer=system.ruby.dir_cntrl0.memBuffer @@ -155,6 +174,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -171,6 +191,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -191,8 +212,9 @@ L1Icache=system.ruby.l1_cntrl0.L1Icache N_tokens=2 buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 dynamic_timeout_enabled=true +eventq_index=0 fixed_timeout_latency=300 l1_request_latency=2 l1_response_latency=2 @@ -215,6 +237,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -229,6 +252,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -244,6 +268,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.L1Dcache deadlock_threshold=500000 +eventq_index=0 icache=system.ruby.l1_cntrl0.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -262,7 +287,8 @@ L2cache=system.ruby.l2_cntrl0.L2cache N_tokens=2 buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 +eventq_index=0 filtering_enabled=true l2_request_latency=5 l2_response_latency=5 @@ -278,6 +304,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=10 replacement_policy=PSEUDO_LRU @@ -291,6 +318,7 @@ tagArrayBanks=1 type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -300,6 +328,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 system.ruby.network.ext_links2 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 system.ruby.network.int_links2 number_of_virtual_networks=10 @@ -310,6 +339,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers0 latency=1 @@ -319,6 +349,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l2_cntrl0 int_node=system.ruby.network.routers1 latency=1 @@ -328,6 +359,7 @@ weight=1 [system.ruby.network.ext_links2] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers2 latency=1 @@ -337,6 +369,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=3 node_a=system.ruby.network.routers0 @@ -346,6 +379,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=4 node_a=system.ruby.network.routers1 @@ -355,6 +389,7 @@ weight=1 [system.ruby.network.int_links2] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=5 node_a=system.ruby.network.routers2 @@ -364,38 +399,36 @@ weight=1 [system.ruby.network.routers0] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers1] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers2] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 [system.ruby.network.routers3] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=3 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=1 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -407,5 +440,6 @@ slave=system.system_port [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/simerr b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/simerr index 492f3e68f..a30a2a95c 100755 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/simerr +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/simerr @@ -4,4 +4,3 @@ warn: rounding error > tolerance 0.072760 rounded to 0 warn: Sockets disabled, not accepting gdb connections warn: ignoring syscall sigprocmask(18446744073709547831, 1, ...) -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/simout b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/simout index 76c77f4a5..b3289a2c7 100755 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/simout +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA_MOESI_CMP_token/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/simout -Redirecting stderr to build/ALPHA_MOESI_CMP_token/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:44:48 -gem5 started Sep 22 2013 05:45:00 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:47:59 +gem5 started Jan 22 2014 17:27:30 +gem5 executing on u200540-lin command line: build/ALPHA_MOESI_CMP_token/gem5.opt -d build/ALPHA_MOESI_CMP_token/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MOESI_CMP_token -re tests/run.py build/ALPHA_MOESI_CMP_token/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MOESI_CMP_token Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/stats.txt b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/stats.txt index 17ea98764..0c82e32e7 100644 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/stats.txt +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_CMP_token/stats.txt @@ -4,13 +4,16 @@ sim_seconds 0.000043 # Nu sim_ticks 43073 # Number of ticks simulated final_tick 43073 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_inst_rate 7100 # Simulator instruction rate (inst/s) -host_op_rate 7100 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 118667 # Simulator tick rate (ticks/s) -host_mem_usage 169652 # Number of bytes of host memory used -host_seconds 0.36 # Real time elapsed on the host +host_inst_rate 26553 # Simulator instruction rate (inst/s) +host_op_rate 26550 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 443703 # Simulator tick rate (ticks/s) +host_mem_usage 126100 # Number of bytes of host memory used +host_seconds 0.10 # Real time elapsed on the host sim_insts 2577 # Number of instructions simulated sim_ops 2577 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.outstanding_req_hist::bucket_size 1 system.ruby.outstanding_req_hist::max_bucket 9 system.ruby.outstanding_req_hist::samples 3295 @@ -76,6 +79,7 @@ system.ruby.network.routers1.msg_bytes.ResponseL2hit_Data::4 5040 system.ruby.network.routers1.msg_bytes.Response_Control::4 8 system.ruby.network.routers1.msg_bytes.Writeback_Data::4 42192 system.ruby.network.routers1.msg_bytes.Writeback_Control::4 2920 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.dir_cntrl0.memBuffer.memReq 532 # Total number of memory requests system.ruby.dir_cntrl0.memBuffer.memRead 448 # Number of memory reads system.ruby.dir_cntrl0.memBuffer.memWrite 84 # Number of memory writes @@ -125,6 +129,7 @@ system.ruby.network.msg_byte.ResponseL2hit_Data 15120 system.ruby.network.msg_byte.Response_Control 24 system.ruby.network.msg_byte.Writeback_Data 126576 system.ruby.network.msg_byte.Writeback_Control 8760 +system.cpu.clk_domain.clock 1 # Clock period in ticks system.cpu.dtb.fetch_hits 0 # ITB hits system.cpu.dtb.fetch_misses 0 # ITB misses system.cpu.dtb.fetch_acv 0 # ITB acv diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_hammer/config.ini b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_hammer/config.ini index fed15fed0..1829ec00a 100644 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_hammer/config.ini +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_hammer/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,6 +14,7 @@ children=clk_domain cpu physmem ruby sys_port_proxy voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -68,24 +73,31 @@ icache_port=system.ruby.l1_cntrl0.sequencer.slave[0] [system.cpu.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu.dtb] type=AlphaTLB +eventq_index=0 size=64 [system.cpu.interrupts] type=AlphaInterrupts +eventq_index=0 [system.cpu.isa] type=AlphaISA +eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB +eventq_index=0 size=48 [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -95,7 +107,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/alpha/tru64/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/alpha/tru64/hello gid=100 input=cin max_stack_size=67108864 @@ -111,6 +124,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -119,18 +133,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=1 random_seed=1234 randomization=false -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -138,8 +156,9 @@ type=Directory_Controller children=directory memBuffer probeFilter buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory +eventq_index=0 full_bit_dir_enabled=false memBuffer=system.ruby.dir_cntrl0.memBuffer memory_controller_latency=2 @@ -154,6 +173,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -170,6 +190,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -187,6 +208,7 @@ type=RubyCache assoc=4 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=1 replacement_policy=PSEUDO_LRU @@ -205,7 +227,8 @@ L2cache=system.ruby.l1_cntrl0.L2cache buffer_size=0 cache_response_latency=10 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 +eventq_index=0 issue_latency=2 l2_cache_hit_latency=10 no_mig_atomic=true @@ -223,6 +246,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -237,6 +261,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=2 replacement_policy=PSEUDO_LRU @@ -251,6 +276,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=10 replacement_policy=PSEUDO_LRU @@ -266,6 +292,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.L1Dcache deadlock_threshold=500000 +eventq_index=0 icache=system.ruby.l1_cntrl0.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -281,6 +308,7 @@ slave=system.cpu.icache_port system.cpu.dcache_port type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -290,6 +318,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 number_of_virtual_networks=10 @@ -300,6 +329,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers0 latency=1 @@ -309,6 +339,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers1 latency=1 @@ -318,6 +349,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=2 node_a=system.ruby.network.routers0 @@ -327,6 +359,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=3 node_a=system.ruby.network.routers1 @@ -336,32 +369,29 @@ weight=1 [system.ruby.network.routers0] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers1] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers2] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=1 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -373,5 +403,6 @@ slave=system.system_port [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_hammer/simerr b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_hammer/simerr index 492f3e68f..a30a2a95c 100755 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_hammer/simerr +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_hammer/simerr @@ -4,4 +4,3 @@ warn: rounding error > tolerance 0.072760 rounded to 0 warn: Sockets disabled, not accepting gdb connections warn: ignoring syscall sigprocmask(18446744073709547831, 1, ...) -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_hammer/simout b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_hammer/simout index fa7b05ab3..74d6c0f17 100755 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_hammer/simout +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_hammer/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA_MOESI_hammer/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MOESI_hammer/simout -Redirecting stderr to build/ALPHA_MOESI_hammer/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MOESI_hammer/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:17:28 -gem5 started Sep 22 2013 05:17:49 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:32:54 +gem5 started Jan 22 2014 17:25:27 +gem5 executing on u200540-lin command line: build/ALPHA_MOESI_hammer/gem5.opt -d build/ALPHA_MOESI_hammer/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MOESI_hammer -re tests/run.py build/ALPHA_MOESI_hammer/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby-MOESI_hammer Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_hammer/stats.txt b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_hammer/stats.txt index fc4b80ac1..fe7ac0efa 100644 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_hammer/stats.txt +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby-MOESI_hammer/stats.txt @@ -4,13 +4,16 @@ sim_seconds 0.000035 # Nu sim_ticks 35432 # Number of ticks simulated final_tick 35432 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_inst_rate 20063 # Simulator instruction rate (inst/s) -host_op_rate 20060 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 275767 # Simulator tick rate (ticks/s) -host_mem_usage 169584 # Number of bytes of host memory used -host_seconds 0.13 # Real time elapsed on the host +host_inst_rate 28350 # Simulator instruction rate (inst/s) +host_op_rate 28346 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 389675 # Simulator tick rate (ticks/s) +host_mem_usage 126044 # Number of bytes of host memory used +host_seconds 0.09 # Real time elapsed on the host sim_insts 2577 # Number of instructions simulated sim_ops 2577 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.outstanding_req_hist::bucket_size 1 system.ruby.outstanding_req_hist::max_bucket 9 system.ruby.outstanding_req_hist::samples 3295 @@ -43,6 +46,7 @@ system.ruby.miss_latency_hist::stdev 8.819211 system.ruby.miss_latency_hist | 0 0.00% 0.00% | 0 0.00% 0.00% | 0 0.00% 0.00% | 421 95.46% 95.46% | 2 0.45% 95.92% | 12 2.72% 98.64% | 0 0.00% 98.64% | 6 1.36% 100.00% | 0 0.00% 100.00% | 0 0.00% 100.00% system.ruby.miss_latency_hist::total 441 system.ruby.Directory.incomplete_times 440 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.l1_cntrl0.L1Dcache.demand_hits 469 # Number of cache demand hits system.ruby.l1_cntrl0.L1Dcache.demand_misses 240 # Number of cache demand misses system.ruby.l1_cntrl0.L1Dcache.demand_accesses 709 # Number of cache demand accesses @@ -123,6 +127,7 @@ system.ruby.network.msg_byte.Response_Data 95256 system.ruby.network.msg_byte.Writeback_Data 17496 system.ruby.network.msg_byte.Writeback_Control 28656 system.ruby.network.msg_byte.Unblock_Control 10560 +system.cpu.clk_domain.clock 1 # Clock period in ticks system.cpu.dtb.fetch_hits 0 # ITB hits system.cpu.dtb.fetch_misses 0 # ITB misses system.cpu.dtb.fetch_acv 0 # ITB acv diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby/config.ini b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby/config.ini index 56f1e35ca..360da34a5 100644 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby/config.ini +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,6 +14,7 @@ children=clk_domain cpu physmem ruby sys_port_proxy voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -68,24 +73,31 @@ icache_port=system.ruby.l1_cntrl0.sequencer.slave[0] [system.cpu.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu.dtb] type=AlphaTLB +eventq_index=0 size=64 [system.cpu.interrupts] type=AlphaInterrupts +eventq_index=0 [system.cpu.isa] type=AlphaISA +eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB +eventq_index=0 size=48 [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -95,7 +107,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/alpha/tru64/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/alpha/tru64/hello gid=100 input=cin max_stack_size=67108864 @@ -111,6 +124,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -119,18 +133,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=1 random_seed=1234 randomization=false -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -138,9 +156,10 @@ type=Directory_Controller children=directory memBuffer buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory directory_latency=12 +eventq_index=0 memBuffer=system.ruby.dir_cntrl0.memBuffer number_of_TBEs=256 peer=Null @@ -151,6 +170,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -167,6 +187,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -186,7 +207,8 @@ buffer_size=0 cacheMemory=system.ruby.l1_cntrl0.cacheMemory cache_response_latency=12 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 +eventq_index=0 issue_latency=2 number_of_TBEs=256 peer=Null @@ -202,6 +224,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -217,6 +240,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.cacheMemory deadlock_threshold=500000 +eventq_index=0 icache=system.ruby.l1_cntrl0.cacheMemory max_outstanding_requests=16 ruby_system=system.ruby @@ -232,6 +256,7 @@ slave=system.cpu.icache_port system.cpu.dcache_port type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -241,6 +266,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 number_of_virtual_networks=10 @@ -251,6 +277,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers0 latency=1 @@ -260,6 +287,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers1 latency=1 @@ -269,6 +297,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=2 node_a=system.ruby.network.routers0 @@ -278,6 +307,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=3 node_a=system.ruby.network.routers1 @@ -287,32 +317,29 @@ weight=1 [system.ruby.network.routers0] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers1] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers2] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=1 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -324,5 +351,6 @@ slave=system.system_port [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby/simerr b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby/simerr index 492f3e68f..a30a2a95c 100755 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby/simerr +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby/simerr @@ -4,4 +4,3 @@ warn: rounding error > tolerance 0.072760 rounded to 0 warn: Sockets disabled, not accepting gdb connections warn: ignoring syscall sigprocmask(18446744073709547831, 1, ...) -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby/simout b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby/simout index 980ebae91..11cc12ff4 100755 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby/simout +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby/simout -Redirecting stderr to build/ALPHA/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 24 2013 03:08:53 -gem5 started Sep 28 2013 03:05:38 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:27:55 +gem5 started Jan 22 2014 17:24:30 +gem5 executing on u200540-lin command line: build/ALPHA/gem5.opt -d build/ALPHA/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby -re tests/run.py build/ALPHA/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing-ruby Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby/stats.txt b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby/stats.txt index a74ef311a..845b4481e 100644 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby/stats.txt +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing-ruby/stats.txt @@ -4,13 +4,16 @@ sim_seconds 0.000052 # Nu sim_ticks 52498 # Number of ticks simulated final_tick 52498 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_inst_rate 27660 # Simulator instruction rate (inst/s) -host_op_rate 27654 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 563232 # Simulator tick rate (ticks/s) -host_mem_usage 168112 # Number of bytes of host memory used -host_seconds 0.09 # Real time elapsed on the host +host_inst_rate 24935 # Simulator instruction rate (inst/s) +host_op_rate 24932 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 507835 # Simulator tick rate (ticks/s) +host_mem_usage 124536 # Number of bytes of host memory used +host_seconds 0.10 # Real time elapsed on the host sim_insts 2577 # Number of instructions simulated sim_ops 2577 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.delayHist::bucket_size 1 # delay histogram for all message system.ruby.delayHist::max_bucket 9 # delay histogram for all message system.ruby.delayHist::samples 1248 # delay histogram for all message @@ -47,6 +50,7 @@ system.ruby.miss_latency_hist::stdev 6.377524 system.ruby.miss_latency_hist | 0 0.00% 0.00% | 0 0.00% 0.00% | 0 0.00% 0.00% | 142 22.68% 22.68% | 448 71.57% 94.25% | 36 5.75% 100.00% | 0 0.00% 100.00% | 0 0.00% 100.00% | 0 0.00% 100.00% | 0 0.00% 100.00% system.ruby.miss_latency_hist::total 626 system.ruby.Directory.incomplete_times 625 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.l1_cntrl0.cacheMemory.demand_hits 2668 # Number of cache demand hits system.ruby.l1_cntrl0.cacheMemory.demand_misses 626 # Number of cache demand misses system.ruby.l1_cntrl0.cacheMemory.demand_accesses 3294 # Number of cache demand accesses @@ -98,6 +102,7 @@ system.ruby.network.msg_byte.Control 15024 system.ruby.network.msg_byte.Data 134352 system.ruby.network.msg_byte.Response_Data 135216 system.ruby.network.msg_byte.Writeback_Control 14928 +system.cpu.clk_domain.clock 1 # Clock period in ticks system.cpu.dtb.fetch_hits 0 # ITB hits system.cpu.dtb.fetch_misses 0 # ITB misses system.cpu.dtb.fetch_acv 0 # ITB acv diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing/config.ini b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing/config.ini index 81f228137..7ab4d5c2a 100644 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing/config.ini +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -71,6 +76,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -79,6 +85,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -93,11 +100,14 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] type=AlphaTLB +eventq_index=0 size=64 [system.cpu.icache] @@ -106,6 +116,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -114,6 +125,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -128,17 +140,23 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] type=AlphaInterrupts +eventq_index=0 [system.cpu.isa] type=AlphaISA +eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB +eventq_index=0 size=48 [system.cpu.l2cache] @@ -147,6 +165,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -155,6 +174,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -169,12 +189,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -184,6 +207,7 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -193,7 +217,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/alpha/tru64/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/alpha/tru64/hello gid=100 input=cin max_stack_size=67108864 @@ -207,11 +232,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -224,6 +251,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -233,5 +261,6 @@ port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing/simerr b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing/simerr index 31ae36f2e..32998f270 100755 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing/simerr +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing/simerr @@ -1,3 +1,2 @@ warn: Sockets disabled, not accepting gdb connections warn: ignoring syscall sigprocmask(18446744073709547831, 1, ...) -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing/simout b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing/simout index f5b60c70f..cd7b05e76 100755 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing/simout +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing/simout -Redirecting stderr to build/ALPHA/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 24 2013 03:08:53 -gem5 started Sep 28 2013 03:05:38 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:27:55 +gem5 started Jan 22 2014 17:24:26 +gem5 executing on u200540-lin command line: build/ALPHA/gem5.opt -d build/ALPHA/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing -re tests/run.py build/ALPHA/tests/opt/quick/se/00.hello/alpha/tru64/simple-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing/stats.txt b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing/stats.txt index 0eefef01d..3fc7cd393 100644 --- a/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing/stats.txt +++ b/tests/quick/se/00.hello/ref/alpha/tru64/simple-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000017 # Nu sim_ticks 16524000 # Number of ticks simulated final_tick 16524000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 70 # Simulator instruction rate (inst/s) -host_op_rate 70 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 446596 # Simulator tick rate (ticks/s) -host_mem_usage 222964 # Number of bytes of host memory used -host_seconds 37.00 # Real time elapsed on the host +host_inst_rate 33204 # Simulator instruction rate (inst/s) +host_op_rate 33192 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 212757424 # Simulator tick rate (ticks/s) +host_mem_usage 228444 # Number of bytes of host memory used +host_seconds 0.08 # Real time elapsed on the host sim_insts 2577 # Number of instructions simulated sim_ops 2577 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 10432 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 5248 # Number of bytes read from this memory system.physmem.bytes_read::total 15680 # Number of bytes read from this memory @@ -42,6 +44,7 @@ system.membus.reqLayer0.occupancy 245000 # La system.membus.reqLayer0.utilization 1.5 # Layer utilization (%) system.membus.respLayer1.occupancy 2205000 # Layer occupancy (ticks) system.membus.respLayer1.utilization 13.3 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.dtb.fetch_hits 0 # ITB hits system.cpu.dtb.fetch_misses 0 # ITB misses system.cpu.dtb.fetch_acv 0 # ITB acv @@ -106,6 +109,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 80.050296 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.039087 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.039087 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 163 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 102 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 61 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.079590 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 5335 # Number of tag accesses +system.cpu.icache.tags.data_accesses 5335 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 2423 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 2423 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 2423 # number of demand (read+write) hits @@ -186,6 +195,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 26.994192 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.002447 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.000824 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.003270 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 218 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 136 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 82 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.006653 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 2205 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 2205 # Number of data accesses system.cpu.l2cache.ReadReq_misses::cpu.inst 163 # number of ReadReq misses system.cpu.l2cache.ReadReq_misses::cpu.data 55 # number of ReadReq misses system.cpu.l2cache.ReadReq_misses::total 218 # number of ReadReq misses @@ -303,6 +318,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 47.437790 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.011581 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.011581 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 82 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 39 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 43 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.020020 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 1500 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 1500 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 360 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 360 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 267 # number of WriteReq hits diff --git a/tests/quick/se/00.hello/ref/arm/linux/o3-timing-checker/config.ini b/tests/quick/se/00.hello/ref/arm/linux/o3-timing-checker/config.ini index 91966eab0..5c3361f47 100644 --- a/tests/quick/se/00.hello/ref/arm/linux/o3-timing-checker/config.ini +++ b/tests/quick/se/00.hello/ref/arm/linux/o3-timing-checker/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -64,6 +68,8 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 +fetchBufferSize=64 fetchToDecodeDelay=1 fetchTrapLatency=1 fetchWidth=8 @@ -128,6 +134,7 @@ BTBTagSize=16 RASSize=16 choiceCtrBits=2 choicePredictorSize=8192 +eventq_index=0 globalCtrBits=2 globalPredictorSize=8192 instShiftAmt=2 @@ -147,6 +154,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.checker.dtb +eventq_index=0 exitOnError=false function_trace=false function_trace_start=0 @@ -171,18 +179,21 @@ workload=system.cpu.workload [system.cpu.checker.dtb] type=ArmTLB children=walker +eventq_index=0 size=64 walker=system.cpu.checker.dtb.walker [system.cpu.checker.dtb.walker] type=ArmTableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=2 sys=system port=system.cpu.toL2Bus.slave[5] [system.cpu.checker.isa] type=ArmISA +eventq_index=0 fpsid=1090793632 id_isar0=34607377 id_isar1=34677009 @@ -201,18 +212,21 @@ midr=890224640 [system.cpu.checker.itb] type=ArmTLB children=walker +eventq_index=0 size=64 walker=system.cpu.checker.itb.walker [system.cpu.checker.itb.walker] type=ArmTableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=2 sys=system port=system.cpu.toL2Bus.slave[4] [system.cpu.checker.tracer] type=ExeTracer +eventq_index=0 [system.cpu.dcache] type=BaseCache @@ -220,6 +234,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -228,6 +243,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -242,18 +258,22 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] type=ArmTLB children=walker +eventq_index=0 size=64 walker=system.cpu.dtb.walker [system.cpu.dtb.walker] type=ArmTableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=2 sys=system port=system.cpu.toL2Bus.slave[3] @@ -262,15 +282,18 @@ port=system.cpu.toL2Bus.slave[3] type=FUPool children=FUList0 FUList1 FUList2 FUList3 FUList4 FUList5 FUList6 FUList7 FUList8 FUList=system.cpu.fuPool.FUList0 system.cpu.fuPool.FUList1 system.cpu.fuPool.FUList2 system.cpu.fuPool.FUList3 system.cpu.fuPool.FUList4 system.cpu.fuPool.FUList5 system.cpu.fuPool.FUList6 system.cpu.fuPool.FUList7 system.cpu.fuPool.FUList8 +eventq_index=0 [system.cpu.fuPool.FUList0] type=FUDesc children=opList count=6 +eventq_index=0 opList=system.cpu.fuPool.FUList0.opList [system.cpu.fuPool.FUList0.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=IntAlu opLat=1 @@ -279,16 +302,19 @@ opLat=1 type=FUDesc children=opList0 opList1 count=2 +eventq_index=0 opList=system.cpu.fuPool.FUList1.opList0 system.cpu.fuPool.FUList1.opList1 [system.cpu.fuPool.FUList1.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=IntMult opLat=3 [system.cpu.fuPool.FUList1.opList1] type=OpDesc +eventq_index=0 issueLat=19 opClass=IntDiv opLat=20 @@ -297,22 +323,26 @@ opLat=20 type=FUDesc children=opList0 opList1 opList2 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList2.opList0 system.cpu.fuPool.FUList2.opList1 system.cpu.fuPool.FUList2.opList2 [system.cpu.fuPool.FUList2.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatAdd opLat=2 [system.cpu.fuPool.FUList2.opList1] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatCmp opLat=2 [system.cpu.fuPool.FUList2.opList2] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatCvt opLat=2 @@ -321,22 +351,26 @@ opLat=2 type=FUDesc children=opList0 opList1 opList2 count=2 +eventq_index=0 opList=system.cpu.fuPool.FUList3.opList0 system.cpu.fuPool.FUList3.opList1 system.cpu.fuPool.FUList3.opList2 [system.cpu.fuPool.FUList3.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatMult opLat=4 [system.cpu.fuPool.FUList3.opList1] type=OpDesc +eventq_index=0 issueLat=12 opClass=FloatDiv opLat=12 [system.cpu.fuPool.FUList3.opList2] type=OpDesc +eventq_index=0 issueLat=24 opClass=FloatSqrt opLat=24 @@ -345,10 +379,12 @@ opLat=24 type=FUDesc children=opList count=0 +eventq_index=0 opList=system.cpu.fuPool.FUList4.opList [system.cpu.fuPool.FUList4.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemRead opLat=1 @@ -357,124 +393,145 @@ opLat=1 type=FUDesc children=opList00 opList01 opList02 opList03 opList04 opList05 opList06 opList07 opList08 opList09 opList10 opList11 opList12 opList13 opList14 opList15 opList16 opList17 opList18 opList19 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList5.opList00 system.cpu.fuPool.FUList5.opList01 system.cpu.fuPool.FUList5.opList02 system.cpu.fuPool.FUList5.opList03 system.cpu.fuPool.FUList5.opList04 system.cpu.fuPool.FUList5.opList05 system.cpu.fuPool.FUList5.opList06 system.cpu.fuPool.FUList5.opList07 system.cpu.fuPool.FUList5.opList08 system.cpu.fuPool.FUList5.opList09 system.cpu.fuPool.FUList5.opList10 system.cpu.fuPool.FUList5.opList11 system.cpu.fuPool.FUList5.opList12 system.cpu.fuPool.FUList5.opList13 system.cpu.fuPool.FUList5.opList14 system.cpu.fuPool.FUList5.opList15 system.cpu.fuPool.FUList5.opList16 system.cpu.fuPool.FUList5.opList17 system.cpu.fuPool.FUList5.opList18 system.cpu.fuPool.FUList5.opList19 [system.cpu.fuPool.FUList5.opList00] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAdd opLat=1 [system.cpu.fuPool.FUList5.opList01] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAddAcc opLat=1 [system.cpu.fuPool.FUList5.opList02] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAlu opLat=1 [system.cpu.fuPool.FUList5.opList03] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdCmp opLat=1 [system.cpu.fuPool.FUList5.opList04] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdCvt opLat=1 [system.cpu.fuPool.FUList5.opList05] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMisc opLat=1 [system.cpu.fuPool.FUList5.opList06] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMult opLat=1 [system.cpu.fuPool.FUList5.opList07] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMultAcc opLat=1 [system.cpu.fuPool.FUList5.opList08] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdShift opLat=1 [system.cpu.fuPool.FUList5.opList09] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdShiftAcc opLat=1 [system.cpu.fuPool.FUList5.opList10] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdSqrt opLat=1 [system.cpu.fuPool.FUList5.opList11] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatAdd opLat=1 [system.cpu.fuPool.FUList5.opList12] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatAlu opLat=1 [system.cpu.fuPool.FUList5.opList13] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatCmp opLat=1 [system.cpu.fuPool.FUList5.opList14] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatCvt opLat=1 [system.cpu.fuPool.FUList5.opList15] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatDiv opLat=1 [system.cpu.fuPool.FUList5.opList16] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMisc opLat=1 [system.cpu.fuPool.FUList5.opList17] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMult opLat=1 [system.cpu.fuPool.FUList5.opList18] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMultAcc opLat=1 [system.cpu.fuPool.FUList5.opList19] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatSqrt opLat=1 @@ -483,10 +540,12 @@ opLat=1 type=FUDesc children=opList count=0 +eventq_index=0 opList=system.cpu.fuPool.FUList6.opList [system.cpu.fuPool.FUList6.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemWrite opLat=1 @@ -495,16 +554,19 @@ opLat=1 type=FUDesc children=opList0 opList1 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList7.opList0 system.cpu.fuPool.FUList7.opList1 [system.cpu.fuPool.FUList7.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemRead opLat=1 [system.cpu.fuPool.FUList7.opList1] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemWrite opLat=1 @@ -513,10 +575,12 @@ opLat=1 type=FUDesc children=opList count=1 +eventq_index=0 opList=system.cpu.fuPool.FUList8.opList [system.cpu.fuPool.FUList8.opList] type=OpDesc +eventq_index=0 issueLat=3 opClass=IprAccess opLat=3 @@ -527,6 +591,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -535,6 +600,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -549,14 +615,18 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] type=ArmInterrupts +eventq_index=0 [system.cpu.isa] type=ArmISA +eventq_index=0 fpsid=1090793632 id_isar0=34607377 id_isar1=34677009 @@ -575,12 +645,14 @@ midr=890224640 [system.cpu.itb] type=ArmTLB children=walker +eventq_index=0 size=64 walker=system.cpu.itb.walker [system.cpu.itb.walker] type=ArmTableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=2 sys=system port=system.cpu.toL2Bus.slave[2] @@ -591,6 +663,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -599,6 +672,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -613,12 +687,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -628,6 +705,7 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side system.cpu.itb.walke [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -637,7 +715,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/arm/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/arm/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -651,11 +730,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -675,6 +756,7 @@ conf_table_reported=true device_bus_width=8 device_rowbuffer_size=1024 devices_per_rank=8 +eventq_index=0 in_addr_map=true mem_sched_policy=frfcfs null=false @@ -686,17 +768,21 @@ static_backend_latency=10000 static_frontend_latency=10000 tBURST=5000 tCL=13750 +tRAS=35000 tRCD=13750 tREFI=7800000 tRFC=300000 tRP=13750 +tRRD=6250 tWTR=7500 tXAW=40000 write_buffer_size=32 -write_thresh_perc=70 +write_high_thresh_perc=70 +write_low_thresh_perc=0 port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/arm/linux/o3-timing-checker/simerr b/tests/quick/se/00.hello/ref/arm/linux/o3-timing-checker/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/arm/linux/o3-timing-checker/simerr +++ b/tests/quick/se/00.hello/ref/arm/linux/o3-timing-checker/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/arm/linux/o3-timing-checker/simout b/tests/quick/se/00.hello/ref/arm/linux/o3-timing-checker/simout index 47104f06c..dc275e0b8 100755 --- a/tests/quick/se/00.hello/ref/arm/linux/o3-timing-checker/simout +++ b/tests/quick/se/00.hello/ref/arm/linux/o3-timing-checker/simout @@ -1,11 +1,11 @@ gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Oct 16 2013 01:36:42 -gem5 started Oct 16 2013 01:55:20 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:24:06 +gem5 started Jan 22 2014 17:30:22 +gem5 executing on u200540-lin command line: build/ARM/gem5.opt -d build/ARM/tests/opt/quick/se/00.hello/arm/linux/o3-timing-checker -re tests/run.py build/ARM/tests/opt/quick/se/00.hello/arm/linux/o3-timing-checker Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... Hello world! -Exiting @ tick 16494000 because target called exit() +Exiting @ tick 16981000 because target called exit() diff --git a/tests/quick/se/00.hello/ref/arm/linux/o3-timing-checker/stats.txt b/tests/quick/se/00.hello/ref/arm/linux/o3-timing-checker/stats.txt index 6f535bcb9..8e11038e3 100644 --- a/tests/quick/se/00.hello/ref/arm/linux/o3-timing-checker/stats.txt +++ b/tests/quick/se/00.hello/ref/arm/linux/o3-timing-checker/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000017 # Nu sim_ticks 16981000 # Number of ticks simulated final_tick 16981000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 41552 # Simulator instruction rate (inst/s) -host_op_rate 51840 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 153628168 # Simulator tick rate (ticks/s) -host_mem_usage 240508 # Number of bytes of host memory used -host_seconds 0.11 # Real time elapsed on the host +host_inst_rate 35724 # Simulator instruction rate (inst/s) +host_op_rate 44574 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 132106037 # Simulator tick rate (ticks/s) +host_mem_usage 247896 # Number of bytes of host memory used +host_seconds 0.13 # Real time elapsed on the host sim_insts 4591 # Number of instructions simulated sim_ops 5729 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 17280 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 7808 # Number of bytes read from this memory system.physmem.bytes_read::total 25088 # Number of bytes read from this memory @@ -215,6 +217,7 @@ system.membus.reqLayer0.occupancy 483500 # La system.membus.reqLayer0.utilization 2.8 # Layer utilization (%) system.membus.respLayer1.occupancy 3646500 # Layer occupancy (ticks) system.membus.respLayer1.utilization 21.5 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.branchPred.lookups 2481 # Number of BP lookups system.cpu.branchPred.condPredicted 1780 # Number of conditional branches predicted system.cpu.branchPred.condIncorrect 482 # Number of conditional branches incorrect @@ -597,6 +600,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 148.072869 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.072301 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.072301 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 286 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 168 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 118 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.139648 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 4184 # Number of tag accesses +system.cpu.icache.tags.data_accesses 4184 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 1584 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 1584 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 1584 # number of demand (read+write) hits @@ -683,6 +692,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 47.132739 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.004255 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.001438 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.005693 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 350 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 193 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 157 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.010681 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 3887 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 3887 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 20 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::cpu.data 20 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 40 # number of ReadReq hits @@ -815,6 +830,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 87.464066 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.021354 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.021354 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 146 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 57 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 89 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.035645 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 5930 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 5930 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 1767 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 1767 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 606 # number of WriteReq hits diff --git a/tests/quick/se/00.hello/ref/arm/linux/o3-timing/config.ini b/tests/quick/se/00.hello/ref/arm/linux/o3-timing/config.ini index 507cb5799..9b066fde0 100644 --- a/tests/quick/se/00.hello/ref/arm/linux/o3-timing/config.ini +++ b/tests/quick/se/00.hello/ref/arm/linux/o3-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -64,6 +68,8 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 +fetchBufferSize=64 fetchToDecodeDelay=1 fetchTrapLatency=1 fetchWidth=8 @@ -128,6 +134,7 @@ BTBTagSize=16 RASSize=16 choiceCtrBits=2 choicePredictorSize=8192 +eventq_index=0 globalCtrBits=2 globalPredictorSize=8192 instShiftAmt=2 @@ -143,6 +150,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -151,6 +159,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -165,18 +174,22 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] type=ArmTLB children=walker +eventq_index=0 size=64 walker=system.cpu.dtb.walker [system.cpu.dtb.walker] type=ArmTableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=2 sys=system port=system.cpu.toL2Bus.slave[3] @@ -185,15 +198,18 @@ port=system.cpu.toL2Bus.slave[3] type=FUPool children=FUList0 FUList1 FUList2 FUList3 FUList4 FUList5 FUList6 FUList7 FUList8 FUList=system.cpu.fuPool.FUList0 system.cpu.fuPool.FUList1 system.cpu.fuPool.FUList2 system.cpu.fuPool.FUList3 system.cpu.fuPool.FUList4 system.cpu.fuPool.FUList5 system.cpu.fuPool.FUList6 system.cpu.fuPool.FUList7 system.cpu.fuPool.FUList8 +eventq_index=0 [system.cpu.fuPool.FUList0] type=FUDesc children=opList count=6 +eventq_index=0 opList=system.cpu.fuPool.FUList0.opList [system.cpu.fuPool.FUList0.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=IntAlu opLat=1 @@ -202,16 +218,19 @@ opLat=1 type=FUDesc children=opList0 opList1 count=2 +eventq_index=0 opList=system.cpu.fuPool.FUList1.opList0 system.cpu.fuPool.FUList1.opList1 [system.cpu.fuPool.FUList1.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=IntMult opLat=3 [system.cpu.fuPool.FUList1.opList1] type=OpDesc +eventq_index=0 issueLat=19 opClass=IntDiv opLat=20 @@ -220,22 +239,26 @@ opLat=20 type=FUDesc children=opList0 opList1 opList2 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList2.opList0 system.cpu.fuPool.FUList2.opList1 system.cpu.fuPool.FUList2.opList2 [system.cpu.fuPool.FUList2.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatAdd opLat=2 [system.cpu.fuPool.FUList2.opList1] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatCmp opLat=2 [system.cpu.fuPool.FUList2.opList2] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatCvt opLat=2 @@ -244,22 +267,26 @@ opLat=2 type=FUDesc children=opList0 opList1 opList2 count=2 +eventq_index=0 opList=system.cpu.fuPool.FUList3.opList0 system.cpu.fuPool.FUList3.opList1 system.cpu.fuPool.FUList3.opList2 [system.cpu.fuPool.FUList3.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatMult opLat=4 [system.cpu.fuPool.FUList3.opList1] type=OpDesc +eventq_index=0 issueLat=12 opClass=FloatDiv opLat=12 [system.cpu.fuPool.FUList3.opList2] type=OpDesc +eventq_index=0 issueLat=24 opClass=FloatSqrt opLat=24 @@ -268,10 +295,12 @@ opLat=24 type=FUDesc children=opList count=0 +eventq_index=0 opList=system.cpu.fuPool.FUList4.opList [system.cpu.fuPool.FUList4.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemRead opLat=1 @@ -280,124 +309,145 @@ opLat=1 type=FUDesc children=opList00 opList01 opList02 opList03 opList04 opList05 opList06 opList07 opList08 opList09 opList10 opList11 opList12 opList13 opList14 opList15 opList16 opList17 opList18 opList19 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList5.opList00 system.cpu.fuPool.FUList5.opList01 system.cpu.fuPool.FUList5.opList02 system.cpu.fuPool.FUList5.opList03 system.cpu.fuPool.FUList5.opList04 system.cpu.fuPool.FUList5.opList05 system.cpu.fuPool.FUList5.opList06 system.cpu.fuPool.FUList5.opList07 system.cpu.fuPool.FUList5.opList08 system.cpu.fuPool.FUList5.opList09 system.cpu.fuPool.FUList5.opList10 system.cpu.fuPool.FUList5.opList11 system.cpu.fuPool.FUList5.opList12 system.cpu.fuPool.FUList5.opList13 system.cpu.fuPool.FUList5.opList14 system.cpu.fuPool.FUList5.opList15 system.cpu.fuPool.FUList5.opList16 system.cpu.fuPool.FUList5.opList17 system.cpu.fuPool.FUList5.opList18 system.cpu.fuPool.FUList5.opList19 [system.cpu.fuPool.FUList5.opList00] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAdd opLat=1 [system.cpu.fuPool.FUList5.opList01] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAddAcc opLat=1 [system.cpu.fuPool.FUList5.opList02] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAlu opLat=1 [system.cpu.fuPool.FUList5.opList03] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdCmp opLat=1 [system.cpu.fuPool.FUList5.opList04] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdCvt opLat=1 [system.cpu.fuPool.FUList5.opList05] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMisc opLat=1 [system.cpu.fuPool.FUList5.opList06] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMult opLat=1 [system.cpu.fuPool.FUList5.opList07] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMultAcc opLat=1 [system.cpu.fuPool.FUList5.opList08] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdShift opLat=1 [system.cpu.fuPool.FUList5.opList09] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdShiftAcc opLat=1 [system.cpu.fuPool.FUList5.opList10] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdSqrt opLat=1 [system.cpu.fuPool.FUList5.opList11] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatAdd opLat=1 [system.cpu.fuPool.FUList5.opList12] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatAlu opLat=1 [system.cpu.fuPool.FUList5.opList13] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatCmp opLat=1 [system.cpu.fuPool.FUList5.opList14] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatCvt opLat=1 [system.cpu.fuPool.FUList5.opList15] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatDiv opLat=1 [system.cpu.fuPool.FUList5.opList16] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMisc opLat=1 [system.cpu.fuPool.FUList5.opList17] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMult opLat=1 [system.cpu.fuPool.FUList5.opList18] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMultAcc opLat=1 [system.cpu.fuPool.FUList5.opList19] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatSqrt opLat=1 @@ -406,10 +456,12 @@ opLat=1 type=FUDesc children=opList count=0 +eventq_index=0 opList=system.cpu.fuPool.FUList6.opList [system.cpu.fuPool.FUList6.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemWrite opLat=1 @@ -418,16 +470,19 @@ opLat=1 type=FUDesc children=opList0 opList1 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList7.opList0 system.cpu.fuPool.FUList7.opList1 [system.cpu.fuPool.FUList7.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemRead opLat=1 [system.cpu.fuPool.FUList7.opList1] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemWrite opLat=1 @@ -436,10 +491,12 @@ opLat=1 type=FUDesc children=opList count=1 +eventq_index=0 opList=system.cpu.fuPool.FUList8.opList [system.cpu.fuPool.FUList8.opList] type=OpDesc +eventq_index=0 issueLat=3 opClass=IprAccess opLat=3 @@ -450,6 +507,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -458,6 +516,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -472,14 +531,18 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] type=ArmInterrupts +eventq_index=0 [system.cpu.isa] type=ArmISA +eventq_index=0 fpsid=1090793632 id_isar0=34607377 id_isar1=34677009 @@ -498,12 +561,14 @@ midr=890224640 [system.cpu.itb] type=ArmTLB children=walker +eventq_index=0 size=64 walker=system.cpu.itb.walker [system.cpu.itb.walker] type=ArmTableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=2 sys=system port=system.cpu.toL2Bus.slave[2] @@ -514,6 +579,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -522,6 +588,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -536,12 +603,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -551,6 +621,7 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side system.cpu.itb.walke [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -560,7 +631,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/arm/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/arm/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -574,11 +646,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -598,6 +672,7 @@ conf_table_reported=true device_bus_width=8 device_rowbuffer_size=1024 devices_per_rank=8 +eventq_index=0 in_addr_map=true mem_sched_policy=frfcfs null=false @@ -609,17 +684,21 @@ static_backend_latency=10000 static_frontend_latency=10000 tBURST=5000 tCL=13750 +tRAS=35000 tRCD=13750 tREFI=7800000 tRFC=300000 tRP=13750 +tRRD=6250 tWTR=7500 tXAW=40000 write_buffer_size=32 -write_thresh_perc=70 +write_high_thresh_perc=70 +write_low_thresh_perc=0 port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/arm/linux/o3-timing/simerr b/tests/quick/se/00.hello/ref/arm/linux/o3-timing/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/arm/linux/o3-timing/simerr +++ b/tests/quick/se/00.hello/ref/arm/linux/o3-timing/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/arm/linux/o3-timing/simout b/tests/quick/se/00.hello/ref/arm/linux/o3-timing/simout index d3be13c32..5df86194c 100755 --- a/tests/quick/se/00.hello/ref/arm/linux/o3-timing/simout +++ b/tests/quick/se/00.hello/ref/arm/linux/o3-timing/simout @@ -1,11 +1,11 @@ gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Oct 16 2013 01:36:42 -gem5 started Oct 16 2013 01:55:13 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:24:06 +gem5 started Jan 22 2014 17:30:21 +gem5 executing on u200540-lin command line: build/ARM/gem5.opt -d build/ARM/tests/opt/quick/se/00.hello/arm/linux/o3-timing -re tests/run.py build/ARM/tests/opt/quick/se/00.hello/arm/linux/o3-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... Hello world! -Exiting @ tick 16494000 because target called exit() +Exiting @ tick 16981000 because target called exit() diff --git a/tests/quick/se/00.hello/ref/arm/linux/o3-timing/stats.txt b/tests/quick/se/00.hello/ref/arm/linux/o3-timing/stats.txt index 1007daea2..3ffee0645 100644 --- a/tests/quick/se/00.hello/ref/arm/linux/o3-timing/stats.txt +++ b/tests/quick/se/00.hello/ref/arm/linux/o3-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000017 # Nu sim_ticks 16981000 # Number of ticks simulated final_tick 16981000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 59313 # Simulator instruction rate (inst/s) -host_op_rate 73997 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 219275591 # Simulator tick rate (ticks/s) -host_mem_usage 240508 # Number of bytes of host memory used -host_seconds 0.08 # Real time elapsed on the host +host_inst_rate 34743 # Simulator instruction rate (inst/s) +host_op_rate 43351 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 128481440 # Simulator tick rate (ticks/s) +host_mem_usage 246872 # Number of bytes of host memory used +host_seconds 0.13 # Real time elapsed on the host sim_insts 4591 # Number of instructions simulated sim_ops 5729 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 17280 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 7808 # Number of bytes read from this memory system.physmem.bytes_read::total 25088 # Number of bytes read from this memory @@ -215,6 +217,7 @@ system.membus.reqLayer0.occupancy 483500 # La system.membus.reqLayer0.utilization 2.8 # Layer utilization (%) system.membus.respLayer1.occupancy 3646500 # Layer occupancy (ticks) system.membus.respLayer1.utilization 21.5 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.branchPred.lookups 2481 # Number of BP lookups system.cpu.branchPred.condPredicted 1780 # Number of conditional branches predicted system.cpu.branchPred.condIncorrect 482 # Number of conditional branches incorrect @@ -552,6 +555,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 148.072869 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.072301 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.072301 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 286 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 168 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 118 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.139648 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 4184 # Number of tag accesses +system.cpu.icache.tags.data_accesses 4184 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 1584 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 1584 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 1584 # number of demand (read+write) hits @@ -638,6 +647,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 47.132739 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.004255 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.001438 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.005693 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 350 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 193 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 157 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.010681 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 3887 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 3887 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 20 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::cpu.data 20 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 40 # number of ReadReq hits @@ -770,6 +785,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 87.464066 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.021354 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.021354 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 146 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 57 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 89 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.035645 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 5930 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 5930 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 1767 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 1767 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 606 # number of WriteReq hits diff --git a/tests/quick/se/00.hello/ref/arm/linux/simple-atomic-dummychecker/config.ini b/tests/quick/se/00.hello/ref/arm/linux/simple-atomic-dummychecker/config.ini index 05132e433..1158c75dc 100644 --- a/tests/quick/se/00.hello/ref/arm/linux/simple-atomic-dummychecker/config.ini +++ b/tests/quick/se/00.hello/ref/arm/linux/simple-atomic-dummychecker/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 fastmem=false function_trace=false function_trace_start=0 @@ -82,6 +87,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.checker.dtb +eventq_index=0 exitOnError=false function_trace=false function_trace_start=0 @@ -106,17 +112,20 @@ workload=system.cpu.workload [system.cpu.checker.dtb] type=ArmTLB children=walker +eventq_index=0 size=64 walker=system.cpu.checker.dtb.walker [system.cpu.checker.dtb.walker] type=ArmTableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=2 sys=system [system.cpu.checker.isa] type=ArmISA +eventq_index=0 fpsid=1090793632 id_isar0=34607377 id_isar1=34677009 @@ -135,36 +144,43 @@ midr=890224640 [system.cpu.checker.itb] type=ArmTLB children=walker +eventq_index=0 size=64 walker=system.cpu.checker.itb.walker [system.cpu.checker.itb.walker] type=ArmTableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=2 sys=system [system.cpu.checker.tracer] type=ExeTracer +eventq_index=0 [system.cpu.dtb] type=ArmTLB children=walker +eventq_index=0 size=64 walker=system.cpu.dtb.walker [system.cpu.dtb.walker] type=ArmTableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=2 sys=system port=system.membus.slave[4] [system.cpu.interrupts] type=ArmInterrupts +eventq_index=0 [system.cpu.isa] type=ArmISA +eventq_index=0 fpsid=1090793632 id_isar0=34607377 id_isar1=34677009 @@ -183,18 +199,21 @@ midr=890224640 [system.cpu.itb] type=ArmTLB children=walker +eventq_index=0 size=64 walker=system.cpu.itb.walker [system.cpu.itb.walker] type=ArmTableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=2 sys=system port=system.membus.slave[3] [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -204,7 +223,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/arm/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/arm/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -218,11 +238,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -235,6 +257,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -244,5 +267,6 @@ port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/arm/linux/simple-atomic-dummychecker/simerr b/tests/quick/se/00.hello/ref/arm/linux/simple-atomic-dummychecker/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/arm/linux/simple-atomic-dummychecker/simerr +++ b/tests/quick/se/00.hello/ref/arm/linux/simple-atomic-dummychecker/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/arm/linux/simple-atomic-dummychecker/simout b/tests/quick/se/00.hello/ref/arm/linux/simple-atomic-dummychecker/simout index 3a9ca0eef..7509c2dae 100755 --- a/tests/quick/se/00.hello/ref/arm/linux/simple-atomic-dummychecker/simout +++ b/tests/quick/se/00.hello/ref/arm/linux/simple-atomic-dummychecker/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ARM/tests/opt/quick/se/00.hello/arm/linux/simple-atomic-dummychecker/simout -Redirecting stderr to build/ARM/tests/opt/quick/se/00.hello/arm/linux/simple-atomic-dummychecker/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 07:58:15 -gem5 started Sep 22 2013 08:10:56 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:24:06 +gem5 started Jan 22 2014 17:30:32 +gem5 executing on u200540-lin command line: build/ARM/gem5.opt -d build/ARM/tests/opt/quick/se/00.hello/arm/linux/simple-atomic-dummychecker -re tests/run.py build/ARM/tests/opt/quick/se/00.hello/arm/linux/simple-atomic-dummychecker Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/arm/linux/simple-atomic-dummychecker/stats.txt b/tests/quick/se/00.hello/ref/arm/linux/simple-atomic-dummychecker/stats.txt index 05df8bae0..4b1e74a91 100644 --- a/tests/quick/se/00.hello/ref/arm/linux/simple-atomic-dummychecker/stats.txt +++ b/tests/quick/se/00.hello/ref/arm/linux/simple-atomic-dummychecker/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000003 # Nu sim_ticks 2870500 # Number of ticks simulated final_tick 2870500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 686137 # Simulator instruction rate (inst/s) -host_op_rate 854515 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 427336749 # Simulator tick rate (ticks/s) -host_mem_usage 232512 # Number of bytes of host memory used -host_seconds 0.01 # Real time elapsed on the host +host_inst_rate 61907 # Simulator instruction rate (inst/s) +host_op_rate 77238 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 38693079 # Simulator tick rate (ticks/s) +host_mem_usage 237008 # Number of bytes of host memory used +host_seconds 0.07 # Real time elapsed on the host sim_insts 4591 # Number of instructions simulated sim_ops 5729 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 18416 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 4491 # Number of bytes read from this memory system.physmem.bytes_read::total 22907 # Number of bytes read from this memory @@ -36,6 +38,7 @@ system.physmem.bw_total::total 9251001568 # To system.membus.throughput 9251001568 # Throughput (bytes/s) system.membus.data_through_bus 26555 # Total data (bytes) system.membus.snoop_data_through_bus 0 # Total snoop data (bytes) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.checker.dtb.inst_hits 0 # ITB inst hits system.cpu.checker.dtb.inst_misses 0 # ITB inst misses system.cpu.checker.dtb.read_hits 0 # DTB read hits diff --git a/tests/quick/se/00.hello/ref/arm/linux/simple-atomic/config.ini b/tests/quick/se/00.hello/ref/arm/linux/simple-atomic/config.ini index ea8fd73bf..8e0b67b72 100644 --- a/tests/quick/se/00.hello/ref/arm/linux/simple-atomic/config.ini +++ b/tests/quick/se/00.hello/ref/arm/linux/simple-atomic/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 fastmem=false function_trace=false function_trace_start=0 @@ -75,21 +80,25 @@ icache_port=system.membus.slave[1] [system.cpu.dtb] type=ArmTLB children=walker +eventq_index=0 size=64 walker=system.cpu.dtb.walker [system.cpu.dtb.walker] type=ArmTableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=2 sys=system port=system.membus.slave[4] [system.cpu.interrupts] type=ArmInterrupts +eventq_index=0 [system.cpu.isa] type=ArmISA +eventq_index=0 fpsid=1090793632 id_isar0=34607377 id_isar1=34677009 @@ -108,18 +117,21 @@ midr=890224640 [system.cpu.itb] type=ArmTLB children=walker +eventq_index=0 size=64 walker=system.cpu.itb.walker [system.cpu.itb.walker] type=ArmTableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=2 sys=system port=system.membus.slave[3] [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -129,7 +141,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/arm/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/arm/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -143,11 +156,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -160,6 +175,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -169,5 +185,6 @@ port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/arm/linux/simple-atomic/simerr b/tests/quick/se/00.hello/ref/arm/linux/simple-atomic/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/arm/linux/simple-atomic/simerr +++ b/tests/quick/se/00.hello/ref/arm/linux/simple-atomic/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/arm/linux/simple-atomic/simout b/tests/quick/se/00.hello/ref/arm/linux/simple-atomic/simout index 7cee6c9ed..618f6d613 100755 --- a/tests/quick/se/00.hello/ref/arm/linux/simple-atomic/simout +++ b/tests/quick/se/00.hello/ref/arm/linux/simple-atomic/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ARM/tests/opt/quick/se/00.hello/arm/linux/simple-atomic/simout -Redirecting stderr to build/ARM/tests/opt/quick/se/00.hello/arm/linux/simple-atomic/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 07:58:15 -gem5 started Sep 22 2013 08:14:08 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:24:06 +gem5 started Jan 22 2014 17:30:32 +gem5 executing on u200540-lin command line: build/ARM/gem5.opt -d build/ARM/tests/opt/quick/se/00.hello/arm/linux/simple-atomic -re tests/run.py build/ARM/tests/opt/quick/se/00.hello/arm/linux/simple-atomic Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/arm/linux/simple-atomic/stats.txt b/tests/quick/se/00.hello/ref/arm/linux/simple-atomic/stats.txt index ea8a36796..ea0a0e09c 100644 --- a/tests/quick/se/00.hello/ref/arm/linux/simple-atomic/stats.txt +++ b/tests/quick/se/00.hello/ref/arm/linux/simple-atomic/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000003 # Nu sim_ticks 2870500 # Number of ticks simulated final_tick 2870500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 723203 # Simulator instruction rate (inst/s) -host_op_rate 900650 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 450384934 # Simulator tick rate (ticks/s) -host_mem_usage 232532 # Number of bytes of host memory used -host_seconds 0.01 # Real time elapsed on the host +host_inst_rate 81917 # Simulator instruction rate (inst/s) +host_op_rate 102184 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 51187301 # Simulator tick rate (ticks/s) +host_mem_usage 236980 # Number of bytes of host memory used +host_seconds 0.06 # Real time elapsed on the host sim_insts 4591 # Number of instructions simulated sim_ops 5729 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 18416 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 4491 # Number of bytes read from this memory system.physmem.bytes_read::total 22907 # Number of bytes read from this memory @@ -36,6 +38,7 @@ system.physmem.bw_total::total 9251001568 # To system.membus.throughput 9251001568 # Throughput (bytes/s) system.membus.data_through_bus 26555 # Total data (bytes) system.membus.snoop_data_through_bus 0 # Total snoop data (bytes) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.dtb.inst_hits 0 # ITB inst hits system.cpu.dtb.inst_misses 0 # ITB inst misses system.cpu.dtb.read_hits 0 # DTB read hits diff --git a/tests/quick/se/00.hello/ref/arm/linux/simple-timing/config.ini b/tests/quick/se/00.hello/ref/arm/linux/simple-timing/config.ini index aa887d8df..bae9efedf 100644 --- a/tests/quick/se/00.hello/ref/arm/linux/simple-timing/config.ini +++ b/tests/quick/se/00.hello/ref/arm/linux/simple-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -71,6 +76,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -79,6 +85,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -93,18 +100,22 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] type=ArmTLB children=walker +eventq_index=0 size=64 walker=system.cpu.dtb.walker [system.cpu.dtb.walker] type=ArmTableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=2 sys=system port=system.cpu.toL2Bus.slave[3] @@ -115,6 +126,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -123,6 +135,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -137,14 +150,18 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] type=ArmInterrupts +eventq_index=0 [system.cpu.isa] type=ArmISA +eventq_index=0 fpsid=1090793632 id_isar0=34607377 id_isar1=34677009 @@ -163,12 +180,14 @@ midr=890224640 [system.cpu.itb] type=ArmTLB children=walker +eventq_index=0 size=64 walker=system.cpu.itb.walker [system.cpu.itb.walker] type=ArmTableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=2 sys=system port=system.cpu.toL2Bus.slave[2] @@ -179,6 +198,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -187,6 +207,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -201,12 +222,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -216,6 +240,7 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side system.cpu.itb.walke [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -225,7 +250,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/arm/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/arm/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -239,11 +265,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -256,6 +284,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -265,5 +294,6 @@ port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/arm/linux/simple-timing/simerr b/tests/quick/se/00.hello/ref/arm/linux/simple-timing/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/arm/linux/simple-timing/simerr +++ b/tests/quick/se/00.hello/ref/arm/linux/simple-timing/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/arm/linux/simple-timing/simout b/tests/quick/se/00.hello/ref/arm/linux/simple-timing/simout index db0e6caaf..6834abec2 100755 --- a/tests/quick/se/00.hello/ref/arm/linux/simple-timing/simout +++ b/tests/quick/se/00.hello/ref/arm/linux/simple-timing/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ARM/tests/opt/quick/se/00.hello/arm/linux/simple-timing/simout -Redirecting stderr to build/ARM/tests/opt/quick/se/00.hello/arm/linux/simple-timing/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 07:58:15 -gem5 started Sep 22 2013 09:24:32 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:24:06 +gem5 started Jan 22 2014 17:30:42 +gem5 executing on u200540-lin command line: build/ARM/gem5.opt -d build/ARM/tests/opt/quick/se/00.hello/arm/linux/simple-timing -re tests/run.py build/ARM/tests/opt/quick/se/00.hello/arm/linux/simple-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/arm/linux/simple-timing/stats.txt b/tests/quick/se/00.hello/ref/arm/linux/simple-timing/stats.txt index 13e2763d6..a3962cb85 100644 --- a/tests/quick/se/00.hello/ref/arm/linux/simple-timing/stats.txt +++ b/tests/quick/se/00.hello/ref/arm/linux/simple-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000026 # Nu sim_ticks 25969000 # Number of ticks simulated final_tick 25969000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 229244 # Simulator instruction rate (inst/s) -host_op_rate 284503 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 1301168988 # Simulator tick rate (ticks/s) -host_mem_usage 238660 # Number of bytes of host memory used -host_seconds 0.02 # Real time elapsed on the host +host_inst_rate 84539 # Simulator instruction rate (inst/s) +host_op_rate 105013 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 480681007 # Simulator tick rate (ticks/s) +host_mem_usage 245716 # Number of bytes of host memory used +host_seconds 0.05 # Real time elapsed on the host sim_insts 4565 # Number of instructions simulated sim_ops 5672 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 14400 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 8000 # Number of bytes read from this memory system.physmem.bytes_read::total 22400 # Number of bytes read from this memory @@ -42,6 +44,7 @@ system.membus.reqLayer0.occupancy 350000 # La system.membus.reqLayer0.utilization 1.3 # Layer utilization (%) system.membus.respLayer1.occupancy 3150000 # Layer occupancy (ticks) system.membus.respLayer1.utilization 12.1 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.dtb.inst_hits 0 # ITB inst hits system.cpu.dtb.inst_misses 0 # ITB inst misses system.cpu.dtb.read_hits 0 # DTB read hits @@ -116,6 +119,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 114.614391 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.055964 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.055964 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 240 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 107 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 133 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.117188 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 9451 # Number of tag accesses +system.cpu.icache.tags.data_accesses 9451 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 4364 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 4364 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 4364 # number of demand (read+write) hits @@ -196,6 +205,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 48.181371 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.003231 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.001470 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.004702 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 307 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 128 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 179 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.009369 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 3406 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 3406 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 16 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::cpu.data 16 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 32 # number of ReadReq hits @@ -322,6 +337,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 83.000387 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.020264 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.020264 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 141 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 40 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 101 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.034424 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 4303 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 4303 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 1048 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 1048 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 870 # number of WriteReq hits diff --git a/tests/quick/se/00.hello/ref/mips/linux/inorder-timing/config.ini b/tests/quick/se/00.hello/ref/mips/linux/inorder-timing/config.ini index 2a0a5918d..734275a58 100644 --- a/tests/quick/se/00.hello/ref/mips/linux/inorder-timing/config.ini +++ b/tests/quick/se/00.hello/ref/mips/linux/inorder-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -56,6 +60,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 fetchBuffSize=4 function_trace=false function_trace_start=0 @@ -90,6 +95,7 @@ BTBTagSize=16 RASSize=16 choiceCtrBits=2 choicePredictorSize=8192 +eventq_index=0 globalCtrBits=2 globalPredictorSize=8192 instShiftAmt=2 @@ -105,6 +111,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -113,6 +120,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -127,11 +135,14 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] type=MipsTLB +eventq_index=0 size=64 [system.cpu.icache] @@ -140,6 +151,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -148,6 +160,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -162,19 +175,25 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] type=MipsInterrupts +eventq_index=0 [system.cpu.isa] type=MipsISA +eventq_index=0 num_threads=1 num_vpes=1 +system=system [system.cpu.itb] type=MipsTLB +eventq_index=0 size=64 [system.cpu.l2cache] @@ -183,6 +202,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -191,6 +211,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -205,12 +226,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -220,6 +244,7 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -229,7 +254,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/mips/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/mips/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -243,11 +269,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -267,6 +295,7 @@ conf_table_reported=true device_bus_width=8 device_rowbuffer_size=1024 devices_per_rank=8 +eventq_index=0 in_addr_map=true mem_sched_policy=frfcfs null=false @@ -278,17 +307,21 @@ static_backend_latency=10000 static_frontend_latency=10000 tBURST=5000 tCL=13750 +tRAS=35000 tRCD=13750 tREFI=7800000 tRFC=300000 tRP=13750 +tRRD=6250 tWTR=7500 tXAW=40000 write_buffer_size=32 -write_thresh_perc=70 +write_high_thresh_perc=70 +write_low_thresh_perc=0 port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/mips/linux/inorder-timing/simerr b/tests/quick/se/00.hello/ref/mips/linux/inorder-timing/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/mips/linux/inorder-timing/simerr +++ b/tests/quick/se/00.hello/ref/mips/linux/inorder-timing/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/mips/linux/inorder-timing/simout b/tests/quick/se/00.hello/ref/mips/linux/inorder-timing/simout index 0184d25db..5a8e6736f 100755 --- a/tests/quick/se/00.hello/ref/mips/linux/inorder-timing/simout +++ b/tests/quick/se/00.hello/ref/mips/linux/inorder-timing/simout @@ -1,14 +1,12 @@ -Redirecting stdout to build/MIPS/tests/opt/quick/se/00.hello/mips/linux/inorder-timing/simout -Redirecting stderr to build/MIPS/tests/opt/quick/se/00.hello/mips/linux/inorder-timing/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:51:54 -gem5 started Sep 22 2013 05:52:06 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:53:01 +gem5 started Jan 22 2014 17:27:52 +gem5 executing on u200540-lin command line: build/MIPS/gem5.opt -d build/MIPS/tests/opt/quick/se/00.hello/mips/linux/inorder-timing -re tests/run.py build/MIPS/tests/opt/quick/se/00.hello/mips/linux/inorder-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... info: Increasing stack size by one page. Hello World! -Exiting @ tick 24587000 because target called exit() +Exiting @ tick 24975000 because target called exit() diff --git a/tests/quick/se/00.hello/ref/mips/linux/inorder-timing/stats.txt b/tests/quick/se/00.hello/ref/mips/linux/inorder-timing/stats.txt index 3c2a96518..3e4b6f41c 100644 --- a/tests/quick/se/00.hello/ref/mips/linux/inorder-timing/stats.txt +++ b/tests/quick/se/00.hello/ref/mips/linux/inorder-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000025 # Nu sim_ticks 24975000 # Number of ticks simulated final_tick 24975000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 84511 # Simulator instruction rate (inst/s) -host_op_rate 84494 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 362882134 # Simulator tick rate (ticks/s) -host_mem_usage 254488 # Number of bytes of host memory used -host_seconds 0.07 # Real time elapsed on the host +host_inst_rate 42229 # Simulator instruction rate (inst/s) +host_op_rate 42225 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 181364329 # Simulator tick rate (ticks/s) +host_mem_usage 230516 # Number of bytes of host memory used +host_seconds 0.14 # Real time elapsed on the host sim_insts 5814 # Number of instructions simulated sim_ops 5814 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 20288 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 8832 # Number of bytes read from this memory system.physmem.bytes_read::total 29120 # Number of bytes read from this memory @@ -212,6 +214,7 @@ system.membus.reqLayer0.occupancy 552000 # La system.membus.reqLayer0.utilization 2.2 # Layer utilization (%) system.membus.respLayer1.occupancy 4260750 # Layer occupancy (ticks) system.membus.respLayer1.utilization 17.1 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.branchPred.lookups 1156 # Number of BP lookups system.cpu.branchPred.condPredicted 861 # Number of conditional branches predicted system.cpu.branchPred.condIncorrect 603 # Number of conditional branches incorrect @@ -309,6 +312,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 150.636983 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.073553 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.073553 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 306 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 128 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 178 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.149414 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 1875 # Number of tag accesses +system.cpu.icache.tags.data_accesses 1875 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 428 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 428 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 428 # number of demand (read+write) hits @@ -414,6 +423,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 56.102213 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.004648 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.001712 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.006360 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 404 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 155 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 249 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.012329 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 4111 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 4111 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 2 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 2 # number of ReadReq hits system.cpu.l2cache.demand_hits::cpu.inst 2 # number of demand (read+write) hits @@ -537,6 +552,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 90.339752 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.022056 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.022056 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 138 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 30 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 108 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.033691 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 4314 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 4314 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 1066 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 1066 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 572 # number of WriteReq hits diff --git a/tests/quick/se/00.hello/ref/mips/linux/o3-timing/config.ini b/tests/quick/se/00.hello/ref/mips/linux/o3-timing/config.ini index 90b395123..df84ba05d 100644 --- a/tests/quick/se/00.hello/ref/mips/linux/o3-timing/config.ini +++ b/tests/quick/se/00.hello/ref/mips/linux/o3-timing/config.ini @@ -159,6 +159,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -175,6 +176,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] @@ -504,6 +506,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -520,6 +523,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] @@ -531,6 +535,7 @@ type=MipsISA eventq_index=0 num_threads=1 num_vpes=1 +system=system [system.cpu.itb] type=MipsTLB @@ -552,6 +557,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -568,6 +574,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] @@ -594,7 +601,7 @@ env= errout=cerr euid=100 eventq_index=0 -executable=tests/test-progs/hello/bin/mips/linux/hello +executable=/dist/test-progs/hello/bin/mips/linux/hello gid=100 input=cin max_stack_size=67108864 diff --git a/tests/quick/se/00.hello/ref/mips/linux/o3-timing/simerr b/tests/quick/se/00.hello/ref/mips/linux/o3-timing/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/mips/linux/o3-timing/simerr +++ b/tests/quick/se/00.hello/ref/mips/linux/o3-timing/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/mips/linux/o3-timing/simout b/tests/quick/se/00.hello/ref/mips/linux/o3-timing/simout index a390bccf8..3925c4814 100755 --- a/tests/quick/se/00.hello/ref/mips/linux/o3-timing/simout +++ b/tests/quick/se/00.hello/ref/mips/linux/o3-timing/simout @@ -1,12 +1,12 @@ gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Oct 16 2013 01:28:28 -gem5 started Oct 16 2013 01:35:02 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:53:01 +gem5 started Jan 22 2014 17:28:02 +gem5 executing on u200540-lin command line: build/MIPS/gem5.opt -d build/MIPS/tests/opt/quick/se/00.hello/mips/linux/o3-timing -re tests/run.py build/MIPS/tests/opt/quick/se/00.hello/mips/linux/o3-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... info: Increasing stack size by one page. Hello World! -Exiting @ tick 21805500 because target called exit() +Exiting @ tick 21898500 because target called exit() diff --git a/tests/quick/se/00.hello/ref/mips/linux/o3-timing/stats.txt b/tests/quick/se/00.hello/ref/mips/linux/o3-timing/stats.txt index 3589948bc..b4a732973 100644 --- a/tests/quick/se/00.hello/ref/mips/linux/o3-timing/stats.txt +++ b/tests/quick/se/00.hello/ref/mips/linux/o3-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000022 # Nu sim_ticks 21898500 # Number of ticks simulated final_tick 21898500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 34889 # Simulator instruction rate (inst/s) -host_op_rate 34885 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 148144968 # Simulator tick rate (ticks/s) -host_mem_usage 274956 # Number of bytes of host memory used -host_seconds 0.15 # Real time elapsed on the host +host_inst_rate 38049 # Simulator instruction rate (inst/s) +host_op_rate 38045 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 161516903 # Simulator tick rate (ticks/s) +host_mem_usage 231544 # Number of bytes of host memory used +host_seconds 0.14 # Real time elapsed on the host sim_insts 5156 # Number of instructions simulated sim_ops 5156 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 21440 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 9088 # Number of bytes read from this memory system.physmem.bytes_read::total 30528 # Number of bytes read from this memory @@ -212,6 +214,7 @@ system.membus.reqLayer0.occupancy 605000 # La system.membus.reqLayer0.utilization 2.8 # Layer utilization (%) system.membus.respLayer1.occupancy 4474750 # Layer occupancy (ticks) system.membus.respLayer1.utilization 20.4 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.branchPred.lookups 2174 # Number of BP lookups system.cpu.branchPred.condPredicted 1490 # Number of conditional branches predicted system.cpu.branchPred.condIncorrect 438 # Number of conditional branches incorrect @@ -526,6 +529,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 161.632436 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.078922 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.078922 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 321 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 149 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 172 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.156738 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 4268 # Number of tag accesses +system.cpu.icache.tags.data_accesses 4268 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 1514 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 1514 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 1514 # number of demand (read+write) hits @@ -612,6 +621,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 57.877288 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.005003 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.001766 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.006769 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 426 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 188 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 238 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.013000 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 4317 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 4317 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 3 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 3 # number of ReadReq hits system.cpu.l2cache.demand_hits::cpu.inst 3 # number of demand (read+write) hits @@ -735,6 +750,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 91.712882 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.022391 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.022391 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 142 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 39 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 103 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.034668 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 5952 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 5952 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 1832 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 1832 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 563 # number of WriteReq hits diff --git a/tests/quick/se/00.hello/ref/mips/linux/simple-atomic/config.ini b/tests/quick/se/00.hello/ref/mips/linux/simple-atomic/config.ini index 917891d7e..cb74c0ee3 100644 --- a/tests/quick/se/00.hello/ref/mips/linux/simple-atomic/config.ini +++ b/tests/quick/se/00.hello/ref/mips/linux/simple-atomic/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 fastmem=false function_trace=false function_trace_start=0 @@ -74,22 +79,28 @@ icache_port=system.membus.slave[1] [system.cpu.dtb] type=MipsTLB +eventq_index=0 size=64 [system.cpu.interrupts] type=MipsInterrupts +eventq_index=0 [system.cpu.isa] type=MipsISA +eventq_index=0 num_threads=1 num_vpes=1 +system=system [system.cpu.itb] type=MipsTLB +eventq_index=0 size=64 [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -99,7 +110,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/mips/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/mips/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -113,11 +125,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -130,6 +144,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -139,5 +154,6 @@ port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/mips/linux/simple-atomic/simerr b/tests/quick/se/00.hello/ref/mips/linux/simple-atomic/simerr index 7edd901b2..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/mips/linux/simple-atomic/simerr +++ b/tests/quick/se/00.hello/ref/mips/linux/simple-atomic/simerr @@ -1,3 +1 @@ -warn: CoherentBus system.membus has no snooping ports attached! warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/mips/linux/simple-atomic/simout b/tests/quick/se/00.hello/ref/mips/linux/simple-atomic/simout index b1c55ad09..4635935c5 100755 --- a/tests/quick/se/00.hello/ref/mips/linux/simple-atomic/simout +++ b/tests/quick/se/00.hello/ref/mips/linux/simple-atomic/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/MIPS/tests/opt/quick/se/00.hello/mips/linux/simple-atomic/simout -Redirecting stderr to build/MIPS/tests/opt/quick/se/00.hello/mips/linux/simple-atomic/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:51:54 -gem5 started Sep 22 2013 05:52:07 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:53:01 +gem5 started Jan 22 2014 17:28:13 +gem5 executing on u200540-lin command line: build/MIPS/gem5.opt -d build/MIPS/tests/opt/quick/se/00.hello/mips/linux/simple-atomic -re tests/run.py build/MIPS/tests/opt/quick/se/00.hello/mips/linux/simple-atomic Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/mips/linux/simple-atomic/stats.txt b/tests/quick/se/00.hello/ref/mips/linux/simple-atomic/stats.txt index e850cb6a0..fb6eb7154 100644 --- a/tests/quick/se/00.hello/ref/mips/linux/simple-atomic/stats.txt +++ b/tests/quick/se/00.hello/ref/mips/linux/simple-atomic/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000003 # Nu sim_ticks 2907000 # Number of ticks simulated final_tick 2907000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 727521 # Simulator instruction rate (inst/s) -host_op_rate 725084 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 361375060 # Simulator tick rate (ticks/s) -host_mem_usage 216568 # Number of bytes of host memory used -host_seconds 0.01 # Real time elapsed on the host +host_inst_rate 88855 # Simulator instruction rate (inst/s) +host_op_rate 88837 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 44409305 # Simulator tick rate (ticks/s) +host_mem_usage 220784 # Number of bytes of host memory used +host_seconds 0.07 # Real time elapsed on the host sim_insts 5814 # Number of instructions simulated sim_ops 5814 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 23260 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 4374 # Number of bytes read from this memory system.physmem.bytes_read::total 27634 # Number of bytes read from this memory @@ -36,6 +38,7 @@ system.physmem.bw_total::total 10764361885 # To system.membus.throughput 10764361885 # Throughput (bytes/s) system.membus.data_through_bus 31292 # Total data (bytes) system.membus.snoop_data_through_bus 0 # Total snoop data (bytes) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.dtb.read_hits 0 # DTB read hits system.cpu.dtb.read_misses 0 # DTB read misses system.cpu.dtb.read_accesses 0 # DTB read accesses diff --git a/tests/quick/se/00.hello/ref/mips/linux/simple-timing-ruby/config.ini b/tests/quick/se/00.hello/ref/mips/linux/simple-timing-ruby/config.ini index 793123a59..d40656fb3 100644 --- a/tests/quick/se/00.hello/ref/mips/linux/simple-timing-ruby/config.ini +++ b/tests/quick/se/00.hello/ref/mips/linux/simple-timing-ruby/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,6 +14,7 @@ children=clk_domain cpu physmem ruby sys_port_proxy voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -68,26 +73,33 @@ icache_port=system.ruby.l1_cntrl0.sequencer.slave[0] [system.cpu.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu.dtb] type=MipsTLB +eventq_index=0 size=64 [system.cpu.interrupts] type=MipsInterrupts +eventq_index=0 [system.cpu.isa] type=MipsISA +eventq_index=0 num_threads=1 num_vpes=1 +system=system [system.cpu.itb] type=MipsTLB +eventq_index=0 size=64 [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -97,7 +109,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/mips/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/mips/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -113,6 +126,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -121,18 +135,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=1 random_seed=1234 randomization=false -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -140,9 +158,10 @@ type=Directory_Controller children=directory memBuffer buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory directory_latency=12 +eventq_index=0 memBuffer=system.ruby.dir_cntrl0.memBuffer number_of_TBEs=256 peer=Null @@ -153,6 +172,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -169,6 +189,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -188,7 +209,8 @@ buffer_size=0 cacheMemory=system.ruby.l1_cntrl0.cacheMemory cache_response_latency=12 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 +eventq_index=0 issue_latency=2 number_of_TBEs=256 peer=Null @@ -204,6 +226,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -219,6 +242,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.cacheMemory deadlock_threshold=500000 +eventq_index=0 icache=system.ruby.l1_cntrl0.cacheMemory max_outstanding_requests=16 ruby_system=system.ruby @@ -234,6 +258,7 @@ slave=system.cpu.icache_port system.cpu.dcache_port type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -243,6 +268,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 number_of_virtual_networks=10 @@ -253,6 +279,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers0 latency=1 @@ -262,6 +289,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers1 latency=1 @@ -271,6 +299,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=2 node_a=system.ruby.network.routers0 @@ -280,6 +309,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=3 node_a=system.ruby.network.routers1 @@ -289,32 +319,29 @@ weight=1 [system.ruby.network.routers0] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers1] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers2] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=1 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -326,5 +353,6 @@ slave=system.system_port [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/mips/linux/simple-timing-ruby/simerr b/tests/quick/se/00.hello/ref/mips/linux/simple-timing-ruby/simerr index bbc0c797e..86244d4bf 100755 --- a/tests/quick/se/00.hello/ref/mips/linux/simple-timing-ruby/simerr +++ b/tests/quick/se/00.hello/ref/mips/linux/simple-timing-ruby/simerr @@ -3,4 +3,3 @@ warn: rounding error > tolerance warn: rounding error > tolerance 0.072760 rounded to 0 warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/mips/linux/simple-timing-ruby/simout b/tests/quick/se/00.hello/ref/mips/linux/simple-timing-ruby/simout index 5beaf8240..3e5d01afe 100755 --- a/tests/quick/se/00.hello/ref/mips/linux/simple-timing-ruby/simout +++ b/tests/quick/se/00.hello/ref/mips/linux/simple-timing-ruby/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/MIPS/tests/opt/quick/se/00.hello/mips/linux/simple-timing-ruby/simout -Redirecting stderr to build/MIPS/tests/opt/quick/se/00.hello/mips/linux/simple-timing-ruby/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:51:54 -gem5 started Sep 22 2013 05:52:07 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:53:01 +gem5 started Jan 22 2014 17:28:34 +gem5 executing on u200540-lin command line: build/MIPS/gem5.opt -d build/MIPS/tests/opt/quick/se/00.hello/mips/linux/simple-timing-ruby -re tests/run.py build/MIPS/tests/opt/quick/se/00.hello/mips/linux/simple-timing-ruby Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/mips/linux/simple-timing-ruby/stats.txt b/tests/quick/se/00.hello/ref/mips/linux/simple-timing-ruby/stats.txt index eabbcdd0e..f6e1459a7 100644 --- a/tests/quick/se/00.hello/ref/mips/linux/simple-timing-ruby/stats.txt +++ b/tests/quick/se/00.hello/ref/mips/linux/simple-timing-ruby/stats.txt @@ -4,13 +4,16 @@ sim_seconds 0.000125 # Nu sim_ticks 125334 # Number of ticks simulated final_tick 125334 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_inst_rate 30165 # Simulator instruction rate (inst/s) -host_op_rate 30162 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 650140 # Simulator tick rate (ticks/s) -host_mem_usage 172408 # Number of bytes of host memory used -host_seconds 0.19 # Real time elapsed on the host +host_inst_rate 38153 # Simulator instruction rate (inst/s) +host_op_rate 38149 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 822314 # Simulator tick rate (ticks/s) +host_mem_usage 127760 # Number of bytes of host memory used +host_seconds 0.15 # Real time elapsed on the host sim_insts 5814 # Number of instructions simulated sim_ops 5814 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.delayHist::bucket_size 1 # delay histogram for all message system.ruby.delayHist::max_bucket 9 # delay histogram for all message system.ruby.delayHist::samples 2982 # delay histogram for all message @@ -47,6 +50,7 @@ system.ruby.miss_latency_hist::stdev 6.088981 system.ruby.miss_latency_hist | 0 0.00% 0.00% | 0 0.00% 0.00% | 0 0.00% 0.00% | 328 21.97% 21.97% | 1088 72.87% 94.84% | 74 4.96% 99.80% | 3 0.20% 100.00% | 0 0.00% 100.00% | 0 0.00% 100.00% | 0 0.00% 100.00% system.ruby.miss_latency_hist::total 1493 system.ruby.Directory.incomplete_times 1492 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.l1_cntrl0.cacheMemory.demand_hits 6410 # Number of cache demand hits system.ruby.l1_cntrl0.cacheMemory.demand_misses 1493 # Number of cache demand misses system.ruby.l1_cntrl0.cacheMemory.demand_accesses 7903 # Number of cache demand accesses @@ -99,6 +103,7 @@ system.ruby.network.msg_byte.Control 35832 system.ruby.network.msg_byte.Data 321624 system.ruby.network.msg_byte.Response_Data 322488 system.ruby.network.msg_byte.Writeback_Control 35736 +system.cpu.clk_domain.clock 1 # Clock period in ticks system.cpu.dtb.read_hits 0 # DTB read hits system.cpu.dtb.read_misses 0 # DTB read misses system.cpu.dtb.read_accesses 0 # DTB read accesses diff --git a/tests/quick/se/00.hello/ref/mips/linux/simple-timing/config.ini b/tests/quick/se/00.hello/ref/mips/linux/simple-timing/config.ini index aa6f1a156..943508ee9 100644 --- a/tests/quick/se/00.hello/ref/mips/linux/simple-timing/config.ini +++ b/tests/quick/se/00.hello/ref/mips/linux/simple-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -71,6 +76,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -79,6 +85,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -93,11 +100,14 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] type=MipsTLB +eventq_index=0 size=64 [system.cpu.icache] @@ -106,6 +116,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -114,6 +125,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -128,19 +140,25 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] type=MipsInterrupts +eventq_index=0 [system.cpu.isa] type=MipsISA +eventq_index=0 num_threads=1 num_vpes=1 +system=system [system.cpu.itb] type=MipsTLB +eventq_index=0 size=64 [system.cpu.l2cache] @@ -149,6 +167,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -157,6 +176,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -171,12 +191,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -186,6 +209,7 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -195,7 +219,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/mips/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/mips/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -209,11 +234,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -226,6 +253,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -235,5 +263,6 @@ port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/mips/linux/simple-timing/simerr b/tests/quick/se/00.hello/ref/mips/linux/simple-timing/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/mips/linux/simple-timing/simerr +++ b/tests/quick/se/00.hello/ref/mips/linux/simple-timing/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/mips/linux/simple-timing/simout b/tests/quick/se/00.hello/ref/mips/linux/simple-timing/simout index f65ffe2d1..fe019aadb 100755 --- a/tests/quick/se/00.hello/ref/mips/linux/simple-timing/simout +++ b/tests/quick/se/00.hello/ref/mips/linux/simple-timing/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/MIPS/tests/opt/quick/se/00.hello/mips/linux/simple-timing/simout -Redirecting stderr to build/MIPS/tests/opt/quick/se/00.hello/mips/linux/simple-timing/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:51:54 -gem5 started Sep 22 2013 05:52:20 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:53:01 +gem5 started Jan 22 2014 17:28:24 +gem5 executing on u200540-lin command line: build/MIPS/gem5.opt -d build/MIPS/tests/opt/quick/se/00.hello/mips/linux/simple-timing -re tests/run.py build/MIPS/tests/opt/quick/se/00.hello/mips/linux/simple-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/mips/linux/simple-timing/stats.txt b/tests/quick/se/00.hello/ref/mips/linux/simple-timing/stats.txt index d3256ea4d..bed740225 100644 --- a/tests/quick/se/00.hello/ref/mips/linux/simple-timing/stats.txt +++ b/tests/quick/se/00.hello/ref/mips/linux/simple-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000032 # Nu sim_ticks 31633000 # Number of ticks simulated final_tick 31633000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 304637 # Simulator instruction rate (inst/s) -host_op_rate 304230 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 1653175117 # Simulator tick rate (ticks/s) -host_mem_usage 224940 # Number of bytes of host memory used -host_seconds 0.02 # Real time elapsed on the host +host_inst_rate 65946 # Simulator instruction rate (inst/s) +host_op_rate 65935 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 358688094 # Simulator tick rate (ticks/s) +host_mem_usage 230484 # Number of bytes of host memory used +host_seconds 0.09 # Real time elapsed on the host sim_insts 5814 # Number of instructions simulated sim_ops 5814 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 19264 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 8832 # Number of bytes read from this memory system.physmem.bytes_read::total 28096 # Number of bytes read from this memory @@ -42,6 +44,7 @@ system.membus.reqLayer0.occupancy 439000 # La system.membus.reqLayer0.utilization 1.4 # Layer utilization (%) system.membus.respLayer1.occupancy 3951000 # Layer occupancy (ticks) system.membus.respLayer1.utilization 12.5 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.dtb.read_hits 0 # DTB read hits system.cpu.dtb.read_misses 0 # DTB read misses system.cpu.dtb.read_accesses 0 # DTB read accesses @@ -92,6 +95,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 132.545353 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.064719 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.064719 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 290 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 114 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 176 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.141602 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 11935 # Number of tag accesses +system.cpu.icache.tags.data_accesses 11935 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 5513 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 5513 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 5513 # number of demand (read+write) hits @@ -172,6 +181,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 54.223533 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.004086 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.001655 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.005741 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 388 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 136 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 252 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.011841 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 3967 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 3967 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 2 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 2 # number of ReadReq hits system.cpu.l2cache.demand_hits::cpu.inst 2 # number of demand (read+write) hits @@ -295,6 +310,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 87.492114 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.021360 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.021360 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 138 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 24 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 114 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.033691 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 4314 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 4314 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 1076 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 1076 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 874 # number of WriteReq hits diff --git a/tests/quick/se/00.hello/ref/power/linux/o3-timing/config.ini b/tests/quick/se/00.hello/ref/power/linux/o3-timing/config.ini index 3e13ce8e1..31323532b 100644 --- a/tests/quick/se/00.hello/ref/power/linux/o3-timing/config.ini +++ b/tests/quick/se/00.hello/ref/power/linux/o3-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -65,6 +69,8 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 +fetchBufferSize=64 fetchToDecodeDelay=1 fetchTrapLatency=1 fetchWidth=8 @@ -129,6 +135,7 @@ BTBTagSize=16 RASSize=16 choiceCtrBits=2 choicePredictorSize=8192 +eventq_index=0 globalCtrBits=2 globalPredictorSize=8192 instShiftAmt=2 @@ -144,6 +151,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -152,6 +160,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -166,26 +175,32 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] type=PowerTLB +eventq_index=0 size=64 [system.cpu.fuPool] type=FUPool children=FUList0 FUList1 FUList2 FUList3 FUList4 FUList5 FUList6 FUList7 FUList8 FUList=system.cpu.fuPool.FUList0 system.cpu.fuPool.FUList1 system.cpu.fuPool.FUList2 system.cpu.fuPool.FUList3 system.cpu.fuPool.FUList4 system.cpu.fuPool.FUList5 system.cpu.fuPool.FUList6 system.cpu.fuPool.FUList7 system.cpu.fuPool.FUList8 +eventq_index=0 [system.cpu.fuPool.FUList0] type=FUDesc children=opList count=6 +eventq_index=0 opList=system.cpu.fuPool.FUList0.opList [system.cpu.fuPool.FUList0.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=IntAlu opLat=1 @@ -194,16 +209,19 @@ opLat=1 type=FUDesc children=opList0 opList1 count=2 +eventq_index=0 opList=system.cpu.fuPool.FUList1.opList0 system.cpu.fuPool.FUList1.opList1 [system.cpu.fuPool.FUList1.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=IntMult opLat=3 [system.cpu.fuPool.FUList1.opList1] type=OpDesc +eventq_index=0 issueLat=19 opClass=IntDiv opLat=20 @@ -212,22 +230,26 @@ opLat=20 type=FUDesc children=opList0 opList1 opList2 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList2.opList0 system.cpu.fuPool.FUList2.opList1 system.cpu.fuPool.FUList2.opList2 [system.cpu.fuPool.FUList2.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatAdd opLat=2 [system.cpu.fuPool.FUList2.opList1] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatCmp opLat=2 [system.cpu.fuPool.FUList2.opList2] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatCvt opLat=2 @@ -236,22 +258,26 @@ opLat=2 type=FUDesc children=opList0 opList1 opList2 count=2 +eventq_index=0 opList=system.cpu.fuPool.FUList3.opList0 system.cpu.fuPool.FUList3.opList1 system.cpu.fuPool.FUList3.opList2 [system.cpu.fuPool.FUList3.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatMult opLat=4 [system.cpu.fuPool.FUList3.opList1] type=OpDesc +eventq_index=0 issueLat=12 opClass=FloatDiv opLat=12 [system.cpu.fuPool.FUList3.opList2] type=OpDesc +eventq_index=0 issueLat=24 opClass=FloatSqrt opLat=24 @@ -260,10 +286,12 @@ opLat=24 type=FUDesc children=opList count=0 +eventq_index=0 opList=system.cpu.fuPool.FUList4.opList [system.cpu.fuPool.FUList4.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemRead opLat=1 @@ -272,124 +300,145 @@ opLat=1 type=FUDesc children=opList00 opList01 opList02 opList03 opList04 opList05 opList06 opList07 opList08 opList09 opList10 opList11 opList12 opList13 opList14 opList15 opList16 opList17 opList18 opList19 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList5.opList00 system.cpu.fuPool.FUList5.opList01 system.cpu.fuPool.FUList5.opList02 system.cpu.fuPool.FUList5.opList03 system.cpu.fuPool.FUList5.opList04 system.cpu.fuPool.FUList5.opList05 system.cpu.fuPool.FUList5.opList06 system.cpu.fuPool.FUList5.opList07 system.cpu.fuPool.FUList5.opList08 system.cpu.fuPool.FUList5.opList09 system.cpu.fuPool.FUList5.opList10 system.cpu.fuPool.FUList5.opList11 system.cpu.fuPool.FUList5.opList12 system.cpu.fuPool.FUList5.opList13 system.cpu.fuPool.FUList5.opList14 system.cpu.fuPool.FUList5.opList15 system.cpu.fuPool.FUList5.opList16 system.cpu.fuPool.FUList5.opList17 system.cpu.fuPool.FUList5.opList18 system.cpu.fuPool.FUList5.opList19 [system.cpu.fuPool.FUList5.opList00] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAdd opLat=1 [system.cpu.fuPool.FUList5.opList01] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAddAcc opLat=1 [system.cpu.fuPool.FUList5.opList02] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAlu opLat=1 [system.cpu.fuPool.FUList5.opList03] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdCmp opLat=1 [system.cpu.fuPool.FUList5.opList04] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdCvt opLat=1 [system.cpu.fuPool.FUList5.opList05] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMisc opLat=1 [system.cpu.fuPool.FUList5.opList06] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMult opLat=1 [system.cpu.fuPool.FUList5.opList07] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMultAcc opLat=1 [system.cpu.fuPool.FUList5.opList08] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdShift opLat=1 [system.cpu.fuPool.FUList5.opList09] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdShiftAcc opLat=1 [system.cpu.fuPool.FUList5.opList10] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdSqrt opLat=1 [system.cpu.fuPool.FUList5.opList11] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatAdd opLat=1 [system.cpu.fuPool.FUList5.opList12] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatAlu opLat=1 [system.cpu.fuPool.FUList5.opList13] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatCmp opLat=1 [system.cpu.fuPool.FUList5.opList14] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatCvt opLat=1 [system.cpu.fuPool.FUList5.opList15] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatDiv opLat=1 [system.cpu.fuPool.FUList5.opList16] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMisc opLat=1 [system.cpu.fuPool.FUList5.opList17] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMult opLat=1 [system.cpu.fuPool.FUList5.opList18] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMultAcc opLat=1 [system.cpu.fuPool.FUList5.opList19] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatSqrt opLat=1 @@ -398,10 +447,12 @@ opLat=1 type=FUDesc children=opList count=0 +eventq_index=0 opList=system.cpu.fuPool.FUList6.opList [system.cpu.fuPool.FUList6.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemWrite opLat=1 @@ -410,16 +461,19 @@ opLat=1 type=FUDesc children=opList0 opList1 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList7.opList0 system.cpu.fuPool.FUList7.opList1 [system.cpu.fuPool.FUList7.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemRead opLat=1 [system.cpu.fuPool.FUList7.opList1] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemWrite opLat=1 @@ -428,10 +482,12 @@ opLat=1 type=FUDesc children=opList count=1 +eventq_index=0 opList=system.cpu.fuPool.FUList8.opList [system.cpu.fuPool.FUList8.opList] type=OpDesc +eventq_index=0 issueLat=3 opClass=IprAccess opLat=3 @@ -442,6 +498,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -450,6 +507,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -464,17 +522,22 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] type=PowerInterrupts +eventq_index=0 [system.cpu.isa] type=PowerISA +eventq_index=0 [system.cpu.itb] type=PowerTLB +eventq_index=0 size=64 [system.cpu.l2cache] @@ -483,6 +546,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -491,6 +555,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -505,12 +570,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -520,6 +588,7 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -529,7 +598,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/power/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/power/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -543,11 +613,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -567,6 +639,7 @@ conf_table_reported=true device_bus_width=8 device_rowbuffer_size=1024 devices_per_rank=8 +eventq_index=0 in_addr_map=true mem_sched_policy=frfcfs null=false @@ -578,17 +651,21 @@ static_backend_latency=10000 static_frontend_latency=10000 tBURST=5000 tCL=13750 +tRAS=35000 tRCD=13750 tREFI=7800000 tRFC=300000 tRP=13750 +tRRD=6250 tWTR=7500 tXAW=40000 write_buffer_size=32 -write_thresh_perc=70 +write_high_thresh_perc=70 +write_low_thresh_perc=0 port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/power/linux/o3-timing/simerr b/tests/quick/se/00.hello/ref/power/linux/o3-timing/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/power/linux/o3-timing/simerr +++ b/tests/quick/se/00.hello/ref/power/linux/o3-timing/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/power/linux/o3-timing/simout b/tests/quick/se/00.hello/ref/power/linux/o3-timing/simout index 92c45258c..bf0b02582 100755 --- a/tests/quick/se/00.hello/ref/power/linux/o3-timing/simout +++ b/tests/quick/se/00.hello/ref/power/linux/o3-timing/simout @@ -1,11 +1,11 @@ gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Oct 16 2013 01:29:56 -gem5 started Oct 16 2013 01:35:14 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:58:44 +gem5 started Jan 22 2014 17:29:11 +gem5 executing on u200540-lin command line: build/POWER/gem5.opt -d build/POWER/tests/opt/quick/se/00.hello/power/linux/o3-timing -re tests/run.py build/POWER/tests/opt/quick/se/00.hello/power/linux/o3-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... Hello world! -Exiting @ tick 18469500 because target called exit() +Exiting @ tick 18905500 because target called exit() diff --git a/tests/quick/se/00.hello/ref/power/linux/o3-timing/stats.txt b/tests/quick/se/00.hello/ref/power/linux/o3-timing/stats.txt index 800440e86..66a92381f 100644 --- a/tests/quick/se/00.hello/ref/power/linux/o3-timing/stats.txt +++ b/tests/quick/se/00.hello/ref/power/linux/o3-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000019 # Nu sim_ticks 18905500 # Number of ticks simulated final_tick 18905500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 83485 # Simulator instruction rate (inst/s) -host_op_rate 83467 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 272386071 # Simulator tick rate (ticks/s) -host_mem_usage 250488 # Number of bytes of host memory used -host_seconds 0.07 # Real time elapsed on the host +host_inst_rate 44009 # Simulator instruction rate (inst/s) +host_op_rate 44004 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 143620144 # Simulator tick rate (ticks/s) +host_mem_usage 227496 # Number of bytes of host memory used +host_seconds 0.13 # Real time elapsed on the host sim_insts 5792 # Number of instructions simulated sim_ops 5792 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 22080 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 6464 # Number of bytes read from this memory system.physmem.bytes_read::total 28544 # Number of bytes read from this memory @@ -215,6 +217,7 @@ system.membus.reqLayer0.occupancy 566000 # La system.membus.reqLayer0.utilization 3.0 # Layer utilization (%) system.membus.respLayer1.occupancy 4177750 # Layer occupancy (ticks) system.membus.respLayer1.utilization 22.1 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.branchPred.lookups 2238 # Number of BP lookups system.cpu.branchPred.condPredicted 1804 # Number of conditional branches predicted system.cpu.branchPred.condIncorrect 419 # Number of conditional branches incorrect @@ -526,6 +529,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 169.362417 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.082696 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.082696 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 351 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 196 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 155 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.171387 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 3979 # Number of tag accesses +system.cpu.icache.tags.data_accesses 3979 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 1372 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 1372 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 1372 # number of demand (read+write) hits @@ -612,6 +621,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 31.521966 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.005134 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.000962 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.006096 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 399 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 216 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 183 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.012177 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 4070 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 4070 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 6 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::cpu.data 1 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 7 # number of ReadReq hits @@ -738,6 +753,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 63.784946 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.015572 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.015572 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 102 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 37 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 65 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.024902 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 5348 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 5348 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 1473 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 1473 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 715 # number of WriteReq hits diff --git a/tests/quick/se/00.hello/ref/power/linux/simple-atomic/config.ini b/tests/quick/se/00.hello/ref/power/linux/simple-atomic/config.ini index 0bfe98e66..ab39b14ed 100644 --- a/tests/quick/se/00.hello/ref/power/linux/simple-atomic/config.ini +++ b/tests/quick/se/00.hello/ref/power/linux/simple-atomic/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -46,6 +50,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 fastmem=false function_trace=false function_trace_start=0 @@ -75,20 +80,25 @@ icache_port=system.membus.slave[1] [system.cpu.dtb] type=PowerTLB +eventq_index=0 size=64 [system.cpu.interrupts] type=PowerInterrupts +eventq_index=0 [system.cpu.isa] type=PowerISA +eventq_index=0 [system.cpu.itb] type=PowerTLB +eventq_index=0 size=64 [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -98,7 +108,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/power/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/power/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -112,11 +123,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -129,6 +142,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -138,5 +152,6 @@ port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/power/linux/simple-atomic/simerr b/tests/quick/se/00.hello/ref/power/linux/simple-atomic/simerr index 7edd901b2..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/power/linux/simple-atomic/simerr +++ b/tests/quick/se/00.hello/ref/power/linux/simple-atomic/simerr @@ -1,3 +1 @@ -warn: CoherentBus system.membus has no snooping ports attached! warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/power/linux/simple-atomic/simout b/tests/quick/se/00.hello/ref/power/linux/simple-atomic/simout index df127b542..b419f1ee5 100755 --- a/tests/quick/se/00.hello/ref/power/linux/simple-atomic/simout +++ b/tests/quick/se/00.hello/ref/power/linux/simple-atomic/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/POWER/tests/opt/quick/se/00.hello/power/linux/simple-atomic/simout -Redirecting stderr to build/POWER/tests/opt/quick/se/00.hello/power/linux/simple-atomic/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:59:47 -gem5 started Sep 22 2013 05:59:59 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:58:44 +gem5 started Jan 22 2014 17:29:13 +gem5 executing on u200540-lin command line: build/POWER/gem5.opt -d build/POWER/tests/opt/quick/se/00.hello/power/linux/simple-atomic -re tests/run.py build/POWER/tests/opt/quick/se/00.hello/power/linux/simple-atomic Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/power/linux/simple-atomic/stats.txt b/tests/quick/se/00.hello/ref/power/linux/simple-atomic/stats.txt index 759fbed05..a91187fc2 100644 --- a/tests/quick/se/00.hello/ref/power/linux/simple-atomic/stats.txt +++ b/tests/quick/se/00.hello/ref/power/linux/simple-atomic/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000003 # Nu sim_ticks 2896000 # Number of ticks simulated final_tick 2896000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 671850 # Simulator instruction rate (inst/s) -host_op_rate 669870 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 333940022 # Simulator tick rate (ticks/s) -host_mem_usage 212612 # Number of bytes of host memory used -host_seconds 0.01 # Real time elapsed on the host +host_inst_rate 80864 # Simulator instruction rate (inst/s) +host_op_rate 80849 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 40410591 # Simulator tick rate (ticks/s) +host_mem_usage 216708 # Number of bytes of host memory used +host_seconds 0.07 # Real time elapsed on the host sim_insts 5793 # Number of instructions simulated sim_ops 5793 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 23172 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 3720 # Number of bytes read from this memory system.physmem.bytes_read::total 26892 # Number of bytes read from this memory @@ -36,6 +38,7 @@ system.physmem.bw_total::total 10739295580 # To system.membus.throughput 10739295580 # Throughput (bytes/s) system.membus.data_through_bus 31101 # Total data (bytes) system.membus.snoop_data_through_bus 0 # Total snoop data (bytes) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.dtb.read_hits 0 # DTB read hits system.cpu.dtb.read_misses 0 # DTB read misses system.cpu.dtb.read_accesses 0 # DTB read accesses diff --git a/tests/quick/se/00.hello/ref/sparc/linux/inorder-timing/config.ini b/tests/quick/se/00.hello/ref/sparc/linux/inorder-timing/config.ini index 803d2e67f..74f6fdcd8 100644 --- a/tests/quick/se/00.hello/ref/sparc/linux/inorder-timing/config.ini +++ b/tests/quick/se/00.hello/ref/sparc/linux/inorder-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -56,6 +60,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 fetchBuffSize=4 function_trace=false function_trace_start=0 @@ -90,6 +95,7 @@ BTBTagSize=16 RASSize=16 choiceCtrBits=2 choicePredictorSize=8192 +eventq_index=0 globalCtrBits=2 globalPredictorSize=8192 instShiftAmt=2 @@ -105,6 +111,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -113,6 +120,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -127,11 +135,14 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] type=SparcTLB +eventq_index=0 size=64 [system.cpu.icache] @@ -140,6 +151,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -148,6 +160,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -162,17 +175,22 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] type=SparcInterrupts +eventq_index=0 [system.cpu.isa] type=SparcISA +eventq_index=0 [system.cpu.itb] type=SparcTLB +eventq_index=0 size=64 [system.cpu.l2cache] @@ -181,6 +199,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -189,6 +208,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -203,12 +223,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -218,6 +241,7 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -227,7 +251,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/sparc/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/sparc/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -241,11 +266,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -265,6 +292,7 @@ conf_table_reported=true device_bus_width=8 device_rowbuffer_size=1024 devices_per_rank=8 +eventq_index=0 in_addr_map=true mem_sched_policy=frfcfs null=false @@ -276,17 +304,21 @@ static_backend_latency=10000 static_frontend_latency=10000 tBURST=5000 tCL=13750 +tRAS=35000 tRCD=13750 tREFI=7800000 tRFC=300000 tRP=13750 +tRRD=6250 tWTR=7500 tXAW=40000 write_buffer_size=32 -write_thresh_perc=70 +write_high_thresh_perc=70 +write_low_thresh_perc=0 port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/sparc/linux/inorder-timing/simerr b/tests/quick/se/00.hello/ref/sparc/linux/inorder-timing/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/sparc/linux/inorder-timing/simerr +++ b/tests/quick/se/00.hello/ref/sparc/linux/inorder-timing/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/sparc/linux/inorder-timing/simout b/tests/quick/se/00.hello/ref/sparc/linux/inorder-timing/simout index 5555171c3..bce99f509 100755 --- a/tests/quick/se/00.hello/ref/sparc/linux/inorder-timing/simout +++ b/tests/quick/se/00.hello/ref/sparc/linux/inorder-timing/simout @@ -1,12 +1,10 @@ -Redirecting stdout to build/SPARC/tests/opt/quick/se/00.hello/sparc/linux/inorder-timing/simout -Redirecting stderr to build/SPARC/tests/opt/quick/se/00.hello/sparc/linux/inorder-timing/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 06:07:13 -gem5 started Sep 22 2013 06:10:26 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:04:27 +gem5 started Jan 22 2014 17:29:22 +gem5 executing on u200540-lin command line: build/SPARC/gem5.opt -d build/SPARC/tests/opt/quick/se/00.hello/sparc/linux/inorder-timing -re tests/run.py build/SPARC/tests/opt/quick/se/00.hello/sparc/linux/inorder-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... -Hello World!Exiting @ tick 20802500 because target called exit() +Hello World!Exiting @ tick 20892500 because target called exit() diff --git a/tests/quick/se/00.hello/ref/sparc/linux/inorder-timing/stats.txt b/tests/quick/se/00.hello/ref/sparc/linux/inorder-timing/stats.txt index b34a38ab7..005c21949 100644 --- a/tests/quick/se/00.hello/ref/sparc/linux/inorder-timing/stats.txt +++ b/tests/quick/se/00.hello/ref/sparc/linux/inorder-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000021 # Nu sim_ticks 20892500 # Number of ticks simulated final_tick 20892500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 70791 # Simulator instruction rate (inst/s) -host_op_rate 70777 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 277537926 # Simulator tick rate (ticks/s) -host_mem_usage 260788 # Number of bytes of host memory used -host_seconds 0.08 # Real time elapsed on the host +host_inst_rate 24019 # Simulator instruction rate (inst/s) +host_op_rate 24017 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 94189663 # Simulator tick rate (ticks/s) +host_mem_usage 236900 # Number of bytes of host memory used +host_seconds 0.22 # Real time elapsed on the host sim_insts 5327 # Number of instructions simulated sim_ops 5327 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 18496 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 8576 # Number of bytes read from this memory system.physmem.bytes_read::total 27072 # Number of bytes read from this memory @@ -213,6 +215,7 @@ system.membus.reqLayer0.occupancy 502000 # La system.membus.reqLayer0.utilization 2.4 # Layer utilization (%) system.membus.respLayer1.occupancy 3930250 # Layer occupancy (ticks) system.membus.respLayer1.utilization 18.8 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.branchPred.lookups 1636 # Number of BP lookups system.cpu.branchPred.condPredicted 1090 # Number of conditional branches predicted system.cpu.branchPred.condIncorrect 897 # Number of conditional branches incorrect @@ -292,6 +295,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 142.907558 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.069779 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.069779 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 291 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 143 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 148 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.142090 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 2807 # Number of tag accesses +system.cpu.icache.tags.data_accesses 2807 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 892 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 892 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 892 # number of demand (read+write) hits @@ -397,6 +406,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 27.076177 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.004343 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.000826 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.005170 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 342 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 171 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 171 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.010437 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 3831 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 3831 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 2 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::cpu.data 1 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 3 # number of ReadReq hits @@ -523,6 +538,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 85.407936 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.020852 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.020852 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 135 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 38 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 97 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.032959 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 2911 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 2911 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 654 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 654 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 260 # number of WriteReq hits diff --git a/tests/quick/se/00.hello/ref/sparc/linux/simple-atomic/config.ini b/tests/quick/se/00.hello/ref/sparc/linux/simple-atomic/config.ini index 5f0f231f3..ea4a95481 100644 --- a/tests/quick/se/00.hello/ref/sparc/linux/simple-atomic/config.ini +++ b/tests/quick/se/00.hello/ref/sparc/linux/simple-atomic/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 fastmem=false function_trace=false function_trace_start=0 @@ -74,20 +79,25 @@ icache_port=system.membus.slave[1] [system.cpu.dtb] type=SparcTLB +eventq_index=0 size=64 [system.cpu.interrupts] type=SparcInterrupts +eventq_index=0 [system.cpu.isa] type=SparcISA +eventq_index=0 [system.cpu.itb] type=SparcTLB +eventq_index=0 size=64 [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -97,7 +107,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/sparc/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/sparc/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -111,11 +122,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -128,6 +141,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -137,5 +151,6 @@ port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/sparc/linux/simple-atomic/simerr b/tests/quick/se/00.hello/ref/sparc/linux/simple-atomic/simerr index 7edd901b2..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/sparc/linux/simple-atomic/simerr +++ b/tests/quick/se/00.hello/ref/sparc/linux/simple-atomic/simerr @@ -1,3 +1 @@ -warn: CoherentBus system.membus has no snooping ports attached! warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/sparc/linux/simple-atomic/simout b/tests/quick/se/00.hello/ref/sparc/linux/simple-atomic/simout index 3faafe3e1..c85cb4f07 100755 --- a/tests/quick/se/00.hello/ref/sparc/linux/simple-atomic/simout +++ b/tests/quick/se/00.hello/ref/sparc/linux/simple-atomic/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/SPARC/tests/opt/quick/se/00.hello/sparc/linux/simple-atomic/simout -Redirecting stderr to build/SPARC/tests/opt/quick/se/00.hello/sparc/linux/simple-atomic/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 06:07:13 -gem5 started Sep 22 2013 06:09:49 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:04:27 +gem5 started Jan 22 2014 17:29:22 +gem5 executing on u200540-lin command line: build/SPARC/gem5.opt -d build/SPARC/tests/opt/quick/se/00.hello/sparc/linux/simple-atomic -re tests/run.py build/SPARC/tests/opt/quick/se/00.hello/sparc/linux/simple-atomic Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/sparc/linux/simple-atomic/stats.txt b/tests/quick/se/00.hello/ref/sparc/linux/simple-atomic/stats.txt index b27d1e6f6..a26cb7265 100644 --- a/tests/quick/se/00.hello/ref/sparc/linux/simple-atomic/stats.txt +++ b/tests/quick/se/00.hello/ref/sparc/linux/simple-atomic/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000003 # Nu sim_ticks 2694500 # Number of ticks simulated final_tick 2694500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 565055 # Simulator instruction rate (inst/s) -host_op_rate 563581 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 284338324 # Simulator tick rate (ticks/s) -host_mem_usage 222908 # Number of bytes of host memory used -host_seconds 0.01 # Real time elapsed on the host +host_inst_rate 53422 # Simulator instruction rate (inst/s) +host_op_rate 53415 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 27014162 # Simulator tick rate (ticks/s) +host_mem_usage 227132 # Number of bytes of host memory used +host_seconds 0.10 # Real time elapsed on the host sim_insts 5327 # Number of instructions simulated sim_ops 5327 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 21480 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 4602 # Number of bytes read from this memory system.physmem.bytes_read::total 26082 # Number of bytes read from this memory @@ -36,6 +38,7 @@ system.physmem.bw_total::total 11559473001 # To system.membus.throughput 11559473001 # Throughput (bytes/s) system.membus.data_through_bus 31147 # Total data (bytes) system.membus.snoop_data_through_bus 0 # Total snoop data (bytes) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.workload.num_syscalls 11 # Number of system calls system.cpu.numCycles 5390 # number of cpu cycles simulated system.cpu.numWorkItemsStarted 0 # number of work items this cpu started diff --git a/tests/quick/se/00.hello/ref/sparc/linux/simple-timing-ruby/config.ini b/tests/quick/se/00.hello/ref/sparc/linux/simple-timing-ruby/config.ini index 0e46b888b..cb65490fc 100644 --- a/tests/quick/se/00.hello/ref/sparc/linux/simple-timing-ruby/config.ini +++ b/tests/quick/se/00.hello/ref/sparc/linux/simple-timing-ruby/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,6 +14,7 @@ children=clk_domain cpu physmem ruby sys_port_proxy voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -68,24 +73,30 @@ icache_port=system.ruby.l1_cntrl0.sequencer.slave[0] [system.cpu.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu.dtb] type=SparcTLB +eventq_index=0 size=64 [system.cpu.interrupts] type=SparcInterrupts +eventq_index=0 [system.cpu.isa] type=SparcISA +eventq_index=0 [system.cpu.itb] type=SparcTLB +eventq_index=0 size=64 [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -95,7 +106,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/sparc/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/sparc/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -111,6 +123,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -119,18 +132,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=1 random_seed=1234 randomization=false -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -138,9 +155,10 @@ type=Directory_Controller children=directory memBuffer buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory directory_latency=12 +eventq_index=0 memBuffer=system.ruby.dir_cntrl0.memBuffer number_of_TBEs=256 peer=Null @@ -151,6 +169,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -167,6 +186,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -186,7 +206,8 @@ buffer_size=0 cacheMemory=system.ruby.l1_cntrl0.cacheMemory cache_response_latency=12 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 +eventq_index=0 issue_latency=2 number_of_TBEs=256 peer=Null @@ -202,6 +223,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -217,6 +239,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.cacheMemory deadlock_threshold=500000 +eventq_index=0 icache=system.ruby.l1_cntrl0.cacheMemory max_outstanding_requests=16 ruby_system=system.ruby @@ -232,6 +255,7 @@ slave=system.cpu.icache_port system.cpu.dcache_port type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -241,6 +265,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 number_of_virtual_networks=10 @@ -251,6 +276,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers0 latency=1 @@ -260,6 +286,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers1 latency=1 @@ -269,6 +296,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=2 node_a=system.ruby.network.routers0 @@ -278,6 +306,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=3 node_a=system.ruby.network.routers1 @@ -287,32 +316,29 @@ weight=1 [system.ruby.network.routers0] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers1] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers2] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=1 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -324,5 +350,6 @@ slave=system.system_port [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/sparc/linux/simple-timing-ruby/simerr b/tests/quick/se/00.hello/ref/sparc/linux/simple-timing-ruby/simerr index bbc0c797e..86244d4bf 100755 --- a/tests/quick/se/00.hello/ref/sparc/linux/simple-timing-ruby/simerr +++ b/tests/quick/se/00.hello/ref/sparc/linux/simple-timing-ruby/simerr @@ -3,4 +3,3 @@ warn: rounding error > tolerance warn: rounding error > tolerance 0.072760 rounded to 0 warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/sparc/linux/simple-timing-ruby/simout b/tests/quick/se/00.hello/ref/sparc/linux/simple-timing-ruby/simout index fe6ceebff..a7fbcbb0c 100755 --- a/tests/quick/se/00.hello/ref/sparc/linux/simple-timing-ruby/simout +++ b/tests/quick/se/00.hello/ref/sparc/linux/simple-timing-ruby/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/SPARC/tests/opt/quick/se/00.hello/sparc/linux/simple-timing-ruby/simout -Redirecting stderr to build/SPARC/tests/opt/quick/se/00.hello/sparc/linux/simple-timing-ruby/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 06:07:13 -gem5 started Sep 22 2013 06:10:00 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:04:27 +gem5 started Jan 22 2014 17:29:33 +gem5 executing on u200540-lin command line: build/SPARC/gem5.opt -d build/SPARC/tests/opt/quick/se/00.hello/sparc/linux/simple-timing-ruby -re tests/run.py build/SPARC/tests/opt/quick/se/00.hello/sparc/linux/simple-timing-ruby Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/sparc/linux/simple-timing-ruby/stats.txt b/tests/quick/se/00.hello/ref/sparc/linux/simple-timing-ruby/stats.txt index d8d5f48fa..ff67fbecb 100644 --- a/tests/quick/se/00.hello/ref/sparc/linux/simple-timing-ruby/stats.txt +++ b/tests/quick/se/00.hello/ref/sparc/linux/simple-timing-ruby/stats.txt @@ -4,13 +4,16 @@ sim_seconds 0.000108 # Nu sim_ticks 107952 # Number of ticks simulated final_tick 107952 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_inst_rate 352 # Simulator instruction rate (inst/s) -host_op_rate 352 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 7141 # Simulator tick rate (ticks/s) -host_mem_usage 177732 # Number of bytes of host memory used -host_seconds 15.12 # Real time elapsed on the host +host_inst_rate 32230 # Simulator instruction rate (inst/s) +host_op_rate 32227 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 653032 # Simulator tick rate (ticks/s) +host_mem_usage 134144 # Number of bytes of host memory used +host_seconds 0.17 # Real time elapsed on the host sim_insts 5327 # Number of instructions simulated sim_ops 5327 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.delayHist::bucket_size 1 # delay histogram for all message system.ruby.delayHist::max_bucket 9 # delay histogram for all message system.ruby.delayHist::samples 2574 # delay histogram for all message @@ -47,6 +50,7 @@ system.ruby.miss_latency_hist::stdev 6.536157 system.ruby.miss_latency_hist | 0 0.00% 0.00% | 0 0.00% 0.00% | 0 0.00% 0.00% | 306 23.74% 23.74% | 913 70.83% 94.57% | 68 5.28% 99.84% | 1 0.08% 99.92% | 1 0.08% 100.00% | 0 0.00% 100.00% | 0 0.00% 100.00% system.ruby.miss_latency_hist::total 1289 system.ruby.Directory.incomplete_times 1288 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.l1_cntrl0.cacheMemory.demand_hits 5469 # Number of cache demand hits system.ruby.l1_cntrl0.cacheMemory.demand_misses 1289 # Number of cache demand misses system.ruby.l1_cntrl0.cacheMemory.demand_accesses 6758 # Number of cache demand accesses @@ -99,6 +103,7 @@ system.ruby.network.msg_byte.Control 30936 system.ruby.network.msg_byte.Data 277560 system.ruby.network.msg_byte.Response_Data 278424 system.ruby.network.msg_byte.Writeback_Control 30840 +system.cpu.clk_domain.clock 1 # Clock period in ticks system.cpu.workload.num_syscalls 11 # Number of system calls system.cpu.numCycles 107952 # number of cpu cycles simulated system.cpu.numWorkItemsStarted 0 # number of work items this cpu started diff --git a/tests/quick/se/00.hello/ref/sparc/linux/simple-timing/config.ini b/tests/quick/se/00.hello/ref/sparc/linux/simple-timing/config.ini index 794c187b4..32f16be8d 100644 --- a/tests/quick/se/00.hello/ref/sparc/linux/simple-timing/config.ini +++ b/tests/quick/se/00.hello/ref/sparc/linux/simple-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -71,6 +76,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -79,6 +85,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -93,11 +100,14 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] type=SparcTLB +eventq_index=0 size=64 [system.cpu.icache] @@ -106,6 +116,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -114,6 +125,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -128,17 +140,22 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] type=SparcInterrupts +eventq_index=0 [system.cpu.isa] type=SparcISA +eventq_index=0 [system.cpu.itb] type=SparcTLB +eventq_index=0 size=64 [system.cpu.l2cache] @@ -147,6 +164,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -155,6 +173,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -169,12 +188,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -184,6 +206,7 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -193,7 +216,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/sparc/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/sparc/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -207,11 +231,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -224,6 +250,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -233,5 +260,6 @@ port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/sparc/linux/simple-timing/simerr b/tests/quick/se/00.hello/ref/sparc/linux/simple-timing/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/sparc/linux/simple-timing/simerr +++ b/tests/quick/se/00.hello/ref/sparc/linux/simple-timing/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/sparc/linux/simple-timing/simout b/tests/quick/se/00.hello/ref/sparc/linux/simple-timing/simout index c2df02496..73a8d6161 100755 --- a/tests/quick/se/00.hello/ref/sparc/linux/simple-timing/simout +++ b/tests/quick/se/00.hello/ref/sparc/linux/simple-timing/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/SPARC/tests/opt/quick/se/00.hello/sparc/linux/simple-timing/simout -Redirecting stderr to build/SPARC/tests/opt/quick/se/00.hello/sparc/linux/simple-timing/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 06:07:13 -gem5 started Sep 22 2013 06:07:31 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:04:27 +gem5 started Jan 22 2014 17:29:24 +gem5 executing on u200540-lin command line: build/SPARC/gem5.opt -d build/SPARC/tests/opt/quick/se/00.hello/sparc/linux/simple-timing -re tests/run.py build/SPARC/tests/opt/quick/se/00.hello/sparc/linux/simple-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/sparc/linux/simple-timing/stats.txt b/tests/quick/se/00.hello/ref/sparc/linux/simple-timing/stats.txt index b76f909df..b7dc82e89 100644 --- a/tests/quick/se/00.hello/ref/sparc/linux/simple-timing/stats.txt +++ b/tests/quick/se/00.hello/ref/sparc/linux/simple-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000028 # Nu sim_ticks 27800000 # Number of ticks simulated final_tick 27800000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 441877 # Simulator instruction rate (inst/s) -host_op_rate 441389 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 2300957264 # Simulator tick rate (ticks/s) -host_mem_usage 230904 # Number of bytes of host memory used -host_seconds 0.01 # Real time elapsed on the host +host_inst_rate 44522 # Simulator instruction rate (inst/s) +host_op_rate 44517 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 232295322 # Simulator tick rate (ticks/s) +host_mem_usage 236896 # Number of bytes of host memory used +host_seconds 0.12 # Real time elapsed on the host sim_insts 5327 # Number of instructions simulated sim_ops 5327 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 16320 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 8576 # Number of bytes read from this memory system.physmem.bytes_read::total 24896 # Number of bytes read from this memory @@ -42,6 +44,7 @@ system.membus.reqLayer0.occupancy 389000 # La system.membus.reqLayer0.utilization 1.4 # Layer utilization (%) system.membus.respLayer1.occupancy 3501000 # Layer occupancy (ticks) system.membus.respLayer1.utilization 12.6 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.workload.num_syscalls 11 # Number of system calls system.cpu.numCycles 55600 # number of cpu cycles simulated system.cpu.numWorkItemsStarted 0 # number of work items this cpu started @@ -74,6 +77,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 117.043638 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.057150 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.057150 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 257 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 109 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 148 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.125488 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 10999 # Number of tag accesses +system.cpu.icache.tags.data_accesses 10999 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 5114 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 5114 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 5114 # number of demand (read+write) hits @@ -154,6 +163,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 25.664749 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.003556 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.000783 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.004339 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 308 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 132 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 176 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.009399 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 3525 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 3525 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 2 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::cpu.data 1 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 3 # number of ReadReq hits @@ -280,6 +295,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 82.118455 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.020048 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.020048 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 135 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 32 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 103 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.032959 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 2911 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 2911 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 661 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 661 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 592 # number of WriteReq hits diff --git a/tests/quick/se/00.hello/ref/x86/linux/o3-timing/config.ini b/tests/quick/se/00.hello/ref/x86/linux/o3-timing/config.ini index 12dff19e9..b8e6ab850 100644 --- a/tests/quick/se/00.hello/ref/x86/linux/o3-timing/config.ini +++ b/tests/quick/se/00.hello/ref/x86/linux/o3-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -64,6 +68,8 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 +fetchBufferSize=64 fetchToDecodeDelay=1 fetchTrapLatency=1 fetchWidth=8 @@ -125,6 +131,7 @@ icache_port=system.cpu.icache.cpu_side type=DerivedClockDomain clk_divider=16 clk_domain=system.cpu_clk_domain +eventq_index=0 [system.cpu.branchPred] type=BranchPredictor @@ -133,6 +140,7 @@ BTBTagSize=16 RASSize=16 choiceCtrBits=2 choicePredictorSize=8192 +eventq_index=0 globalCtrBits=2 globalPredictorSize=8192 instShiftAmt=2 @@ -148,6 +156,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -156,6 +165,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -170,18 +180,22 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] type=X86TLB children=walker +eventq_index=0 size=64 walker=system.cpu.dtb.walker [system.cpu.dtb.walker] type=X86PagetableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=4 system=system port=system.cpu.toL2Bus.slave[3] @@ -190,15 +204,18 @@ port=system.cpu.toL2Bus.slave[3] type=FUPool children=FUList0 FUList1 FUList2 FUList3 FUList4 FUList5 FUList6 FUList7 FUList8 FUList=system.cpu.fuPool.FUList0 system.cpu.fuPool.FUList1 system.cpu.fuPool.FUList2 system.cpu.fuPool.FUList3 system.cpu.fuPool.FUList4 system.cpu.fuPool.FUList5 system.cpu.fuPool.FUList6 system.cpu.fuPool.FUList7 system.cpu.fuPool.FUList8 +eventq_index=0 [system.cpu.fuPool.FUList0] type=FUDesc children=opList count=6 +eventq_index=0 opList=system.cpu.fuPool.FUList0.opList [system.cpu.fuPool.FUList0.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=IntAlu opLat=1 @@ -207,16 +224,19 @@ opLat=1 type=FUDesc children=opList0 opList1 count=2 +eventq_index=0 opList=system.cpu.fuPool.FUList1.opList0 system.cpu.fuPool.FUList1.opList1 [system.cpu.fuPool.FUList1.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=IntMult opLat=3 [system.cpu.fuPool.FUList1.opList1] type=OpDesc +eventq_index=0 issueLat=19 opClass=IntDiv opLat=20 @@ -225,22 +245,26 @@ opLat=20 type=FUDesc children=opList0 opList1 opList2 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList2.opList0 system.cpu.fuPool.FUList2.opList1 system.cpu.fuPool.FUList2.opList2 [system.cpu.fuPool.FUList2.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatAdd opLat=2 [system.cpu.fuPool.FUList2.opList1] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatCmp opLat=2 [system.cpu.fuPool.FUList2.opList2] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatCvt opLat=2 @@ -249,22 +273,26 @@ opLat=2 type=FUDesc children=opList0 opList1 opList2 count=2 +eventq_index=0 opList=system.cpu.fuPool.FUList3.opList0 system.cpu.fuPool.FUList3.opList1 system.cpu.fuPool.FUList3.opList2 [system.cpu.fuPool.FUList3.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatMult opLat=4 [system.cpu.fuPool.FUList3.opList1] type=OpDesc +eventq_index=0 issueLat=12 opClass=FloatDiv opLat=12 [system.cpu.fuPool.FUList3.opList2] type=OpDesc +eventq_index=0 issueLat=24 opClass=FloatSqrt opLat=24 @@ -273,10 +301,12 @@ opLat=24 type=FUDesc children=opList count=0 +eventq_index=0 opList=system.cpu.fuPool.FUList4.opList [system.cpu.fuPool.FUList4.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemRead opLat=1 @@ -285,124 +315,145 @@ opLat=1 type=FUDesc children=opList00 opList01 opList02 opList03 opList04 opList05 opList06 opList07 opList08 opList09 opList10 opList11 opList12 opList13 opList14 opList15 opList16 opList17 opList18 opList19 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList5.opList00 system.cpu.fuPool.FUList5.opList01 system.cpu.fuPool.FUList5.opList02 system.cpu.fuPool.FUList5.opList03 system.cpu.fuPool.FUList5.opList04 system.cpu.fuPool.FUList5.opList05 system.cpu.fuPool.FUList5.opList06 system.cpu.fuPool.FUList5.opList07 system.cpu.fuPool.FUList5.opList08 system.cpu.fuPool.FUList5.opList09 system.cpu.fuPool.FUList5.opList10 system.cpu.fuPool.FUList5.opList11 system.cpu.fuPool.FUList5.opList12 system.cpu.fuPool.FUList5.opList13 system.cpu.fuPool.FUList5.opList14 system.cpu.fuPool.FUList5.opList15 system.cpu.fuPool.FUList5.opList16 system.cpu.fuPool.FUList5.opList17 system.cpu.fuPool.FUList5.opList18 system.cpu.fuPool.FUList5.opList19 [system.cpu.fuPool.FUList5.opList00] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAdd opLat=1 [system.cpu.fuPool.FUList5.opList01] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAddAcc opLat=1 [system.cpu.fuPool.FUList5.opList02] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAlu opLat=1 [system.cpu.fuPool.FUList5.opList03] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdCmp opLat=1 [system.cpu.fuPool.FUList5.opList04] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdCvt opLat=1 [system.cpu.fuPool.FUList5.opList05] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMisc opLat=1 [system.cpu.fuPool.FUList5.opList06] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMult opLat=1 [system.cpu.fuPool.FUList5.opList07] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMultAcc opLat=1 [system.cpu.fuPool.FUList5.opList08] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdShift opLat=1 [system.cpu.fuPool.FUList5.opList09] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdShiftAcc opLat=1 [system.cpu.fuPool.FUList5.opList10] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdSqrt opLat=1 [system.cpu.fuPool.FUList5.opList11] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatAdd opLat=1 [system.cpu.fuPool.FUList5.opList12] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatAlu opLat=1 [system.cpu.fuPool.FUList5.opList13] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatCmp opLat=1 [system.cpu.fuPool.FUList5.opList14] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatCvt opLat=1 [system.cpu.fuPool.FUList5.opList15] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatDiv opLat=1 [system.cpu.fuPool.FUList5.opList16] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMisc opLat=1 [system.cpu.fuPool.FUList5.opList17] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMult opLat=1 [system.cpu.fuPool.FUList5.opList18] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMultAcc opLat=1 [system.cpu.fuPool.FUList5.opList19] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatSqrt opLat=1 @@ -411,10 +462,12 @@ opLat=1 type=FUDesc children=opList count=0 +eventq_index=0 opList=system.cpu.fuPool.FUList6.opList [system.cpu.fuPool.FUList6.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemWrite opLat=1 @@ -423,16 +476,19 @@ opLat=1 type=FUDesc children=opList0 opList1 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList7.opList0 system.cpu.fuPool.FUList7.opList1 [system.cpu.fuPool.FUList7.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemRead opLat=1 [system.cpu.fuPool.FUList7.opList1] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemWrite opLat=1 @@ -441,10 +497,12 @@ opLat=1 type=FUDesc children=opList count=1 +eventq_index=0 opList=system.cpu.fuPool.FUList8.opList [system.cpu.fuPool.FUList8.opList] type=OpDesc +eventq_index=0 issueLat=3 opClass=IprAccess opLat=3 @@ -455,6 +513,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -463,6 +522,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -477,12 +537,15 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] type=X86LocalApic clk_domain=system.cpu.apic_clk_domain +eventq_index=0 int_latency=1000 pio_addr=2305843009213693952 pio_latency=100000 @@ -493,16 +556,19 @@ pio=system.membus.master[1] [system.cpu.isa] type=X86ISA +eventq_index=0 [system.cpu.itb] type=X86TLB children=walker +eventq_index=0 size=64 walker=system.cpu.itb.walker [system.cpu.itb.walker] type=X86PagetableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=4 system=system port=system.cpu.toL2Bus.slave[2] @@ -513,6 +579,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -521,6 +588,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -535,12 +603,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -550,6 +621,7 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side system.cpu.itb.walke [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -559,7 +631,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/x86/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/x86/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -573,11 +646,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -597,6 +672,7 @@ conf_table_reported=true device_bus_width=8 device_rowbuffer_size=1024 devices_per_rank=8 +eventq_index=0 in_addr_map=true mem_sched_policy=frfcfs null=false @@ -608,17 +684,21 @@ static_backend_latency=10000 static_frontend_latency=10000 tBURST=5000 tCL=13750 +tRAS=35000 tRCD=13750 tREFI=7800000 tRFC=300000 tRP=13750 +tRRD=6250 tWTR=7500 tXAW=40000 write_buffer_size=32 -write_thresh_perc=70 +write_high_thresh_perc=70 +write_low_thresh_perc=0 port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/x86/linux/o3-timing/simerr b/tests/quick/se/00.hello/ref/x86/linux/o3-timing/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/x86/linux/o3-timing/simerr +++ b/tests/quick/se/00.hello/ref/x86/linux/o3-timing/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/x86/linux/o3-timing/simout b/tests/quick/se/00.hello/ref/x86/linux/o3-timing/simout index 6fd808106..7bb858e94 100755 --- a/tests/quick/se/00.hello/ref/x86/linux/o3-timing/simout +++ b/tests/quick/se/00.hello/ref/x86/linux/o3-timing/simout @@ -1,11 +1,11 @@ gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Oct 16 2013 01:35:57 -gem5 started Oct 16 2013 01:54:57 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:10:34 +gem5 started Jan 22 2014 17:29:56 +gem5 executing on u200540-lin command line: build/X86/gem5.opt -d build/X86/tests/opt/quick/se/00.hello/x86/linux/o3-timing -re tests/run.py build/X86/tests/opt/quick/se/00.hello/x86/linux/o3-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... Hello world! -Exiting @ tick 19639500 because target called exit() +Exiting @ tick 19970500 because target called exit() diff --git a/tests/quick/se/00.hello/ref/x86/linux/o3-timing/stats.txt b/tests/quick/se/00.hello/ref/x86/linux/o3-timing/stats.txt index b42a03bbb..d0b8bca45 100644 --- a/tests/quick/se/00.hello/ref/x86/linux/o3-timing/stats.txt +++ b/tests/quick/se/00.hello/ref/x86/linux/o3-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000020 # Nu sim_ticks 19970500 # Number of ticks simulated final_tick 19970500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 37809 # Simulator instruction rate (inst/s) -host_op_rate 68492 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 140319695 # Simulator tick rate (ticks/s) -host_mem_usage 243588 # Number of bytes of host memory used -host_seconds 0.14 # Real time elapsed on the host +host_inst_rate 4162 # Simulator instruction rate (inst/s) +host_op_rate 7540 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 15448311 # Simulator tick rate (ticks/s) +host_mem_usage 248568 # Number of bytes of host memory used +host_seconds 1.29 # Real time elapsed on the host sim_insts 5380 # Number of instructions simulated sim_ops 9747 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 17472 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 9024 # Number of bytes read from this memory system.physmem.bytes_read::total 26496 # Number of bytes read from this memory @@ -212,6 +214,7 @@ system.membus.reqLayer0.occupancy 500500 # La system.membus.reqLayer0.utilization 2.5 # Layer utilization (%) system.membus.respLayer1.occupancy 3871500 # Layer occupancy (ticks) system.membus.respLayer1.utilization 19.4 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.branchPred.lookups 3084 # Number of BP lookups system.cpu.branchPred.condPredicted 3084 # Number of conditional branches predicted system.cpu.branchPred.condIncorrect 542 # Number of conditional branches incorrect @@ -221,6 +224,7 @@ system.cpu.branchPred.BTBCorrect 0 # Nu system.cpu.branchPred.BTBHitPct 31.800263 # BTB Hit Percentage system.cpu.branchPred.usedRAS 207 # Number of times the RAS was used to get a target. system.cpu.branchPred.RASInCorrect 74 # Number of incorrect RAS predictions. +system.cpu.apic_clk_domain.clock 8000 # Clock period in ticks system.cpu.workload.num_syscalls 11 # Number of system calls system.cpu.numCycles 39942 # number of cpu cycles simulated system.cpu.numWorkItemsStarted 0 # number of work items this cpu started @@ -509,6 +513,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 130.946729 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.063939 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.063939 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 274 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 150 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 124 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.133789 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 4234 # Number of tag accesses +system.cpu.icache.tags.data_accesses 4234 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 1609 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 1609 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 1609 # number of demand (read+write) hits @@ -595,6 +605,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 32.750233 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.003998 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.000999 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.004998 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 337 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 182 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 155 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.010284 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 3750 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 3750 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 1 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::cpu.data 1 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 2 # number of ReadReq hits @@ -721,6 +737,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 83.239431 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.020322 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.020322 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 142 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 51 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 91 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.034668 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 5234 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 5234 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 1479 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 1479 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 858 # number of WriteReq hits diff --git a/tests/quick/se/00.hello/ref/x86/linux/simple-atomic/config.ini b/tests/quick/se/00.hello/ref/x86/linux/simple-atomic/config.ini index 6906721ce..eb1883caa 100644 --- a/tests/quick/se/00.hello/ref/x86/linux/simple-atomic/config.ini +++ b/tests/quick/se/00.hello/ref/x86/linux/simple-atomic/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 fastmem=false function_trace=false function_trace_start=0 @@ -76,16 +81,19 @@ icache_port=system.membus.slave[1] type=DerivedClockDomain clk_divider=16 clk_domain=system.cpu_clk_domain +eventq_index=0 [system.cpu.dtb] type=X86TLB children=walker +eventq_index=0 size=64 walker=system.cpu.dtb.walker [system.cpu.dtb.walker] type=X86PagetableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=4 system=system port=system.membus.slave[4] @@ -93,6 +101,7 @@ port=system.membus.slave[4] [system.cpu.interrupts] type=X86LocalApic clk_domain=system.cpu.apic_clk_domain +eventq_index=0 int_latency=1000 pio_addr=2305843009213693952 pio_latency=100000 @@ -103,22 +112,26 @@ pio=system.membus.master[1] [system.cpu.isa] type=X86ISA +eventq_index=0 [system.cpu.itb] type=X86TLB children=walker +eventq_index=0 size=64 walker=system.cpu.itb.walker [system.cpu.itb.walker] type=X86PagetableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=4 system=system port=system.membus.slave[3] [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -128,7 +141,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/x86/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/x86/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -142,11 +156,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -159,6 +175,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -168,5 +185,6 @@ port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/x86/linux/simple-atomic/simerr b/tests/quick/se/00.hello/ref/x86/linux/simple-atomic/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/x86/linux/simple-atomic/simerr +++ b/tests/quick/se/00.hello/ref/x86/linux/simple-atomic/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/x86/linux/simple-atomic/simout b/tests/quick/se/00.hello/ref/x86/linux/simple-atomic/simout index 551fc8a46..6330d042a 100755 --- a/tests/quick/se/00.hello/ref/x86/linux/simple-atomic/simout +++ b/tests/quick/se/00.hello/ref/x86/linux/simple-atomic/simout @@ -1,9 +1,9 @@ gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Oct 16 2013 01:35:57 -gem5 started Oct 16 2013 01:45:55 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:10:34 +gem5 started Jan 22 2014 17:30:08 +gem5 executing on u200540-lin command line: build/X86/gem5.opt -d build/X86/tests/opt/quick/se/00.hello/x86/linux/simple-atomic -re tests/run.py build/X86/tests/opt/quick/se/00.hello/x86/linux/simple-atomic Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/x86/linux/simple-atomic/stats.txt b/tests/quick/se/00.hello/ref/x86/linux/simple-atomic/stats.txt index 34f6daec3..f285016ae 100644 --- a/tests/quick/se/00.hello/ref/x86/linux/simple-atomic/stats.txt +++ b/tests/quick/se/00.hello/ref/x86/linux/simple-atomic/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000006 # Nu sim_ticks 5615000 # Number of ticks simulated final_tick 5615000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 57992 # Simulator instruction rate (inst/s) -host_op_rate 105036 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 60491180 # Simulator tick rate (ticks/s) -host_mem_usage 236648 # Number of bytes of host memory used +host_inst_rate 57117 # Simulator instruction rate (inst/s) +host_op_rate 103440 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 59566456 # Simulator tick rate (ticks/s) +host_mem_usage 237684 # Number of bytes of host memory used host_seconds 0.09 # Real time elapsed on the host sim_insts 5381 # Number of instructions simulated sim_ops 9748 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 54912 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 7066 # Number of bytes read from this memory system.physmem.bytes_read::total 61978 # Number of bytes read from this memory @@ -36,6 +38,8 @@ system.physmem.bw_total::total 12304541407 # To system.membus.throughput 12304541407 # Throughput (bytes/s) system.membus.data_through_bus 69090 # Total data (bytes) system.membus.snoop_data_through_bus 0 # Total snoop data (bytes) +system.cpu_clk_domain.clock 500 # Clock period in ticks +system.cpu.apic_clk_domain.clock 8000 # Clock period in ticks system.cpu.workload.num_syscalls 11 # Number of system calls system.cpu.numCycles 11231 # number of cpu cycles simulated system.cpu.numWorkItemsStarted 0 # number of work items this cpu started diff --git a/tests/quick/se/00.hello/ref/x86/linux/simple-timing-ruby/config.ini b/tests/quick/se/00.hello/ref/x86/linux/simple-timing-ruby/config.ini index 3bbe64bb8..d7786b69e 100644 --- a/tests/quick/se/00.hello/ref/x86/linux/simple-timing-ruby/config.ini +++ b/tests/quick/se/00.hello/ref/x86/linux/simple-timing-ruby/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,6 +14,7 @@ children=clk_domain cpu physmem ruby sys_port_proxy voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -69,21 +74,25 @@ icache_port=system.ruby.l1_cntrl0.sequencer.slave[0] type=DerivedClockDomain clk_divider=16 clk_domain=system.cpu.clk_domain +eventq_index=0 [system.cpu.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu.dtb] type=X86TLB children=walker +eventq_index=0 size=64 walker=system.cpu.dtb.walker [system.cpu.dtb.walker] type=X86PagetableWalker clk_domain=system.cpu.clk_domain +eventq_index=0 num_squash_per_cycle=4 system=system port=system.ruby.l1_cntrl0.sequencer.slave[3] @@ -91,6 +100,7 @@ port=system.ruby.l1_cntrl0.sequencer.slave[3] [system.cpu.interrupts] type=X86LocalApic clk_domain=system.cpu.apic_clk_domain +eventq_index=0 int_latency=1 pio_addr=2305843009213693952 pio_latency=100 @@ -101,22 +111,26 @@ pio=system.ruby.l1_cntrl0.sequencer.master[0] [system.cpu.isa] type=X86ISA +eventq_index=0 [system.cpu.itb] type=X86TLB children=walker +eventq_index=0 size=64 walker=system.cpu.itb.walker [system.cpu.itb.walker] type=X86PagetableWalker clk_domain=system.cpu.clk_domain +eventq_index=0 num_squash_per_cycle=4 system=system port=system.ruby.l1_cntrl0.sequencer.slave[2] [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -126,7 +140,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/x86/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/x86/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -142,6 +157,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -150,18 +166,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=1 random_seed=1234 randomization=false -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -169,9 +189,10 @@ type=Directory_Controller children=directory memBuffer buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory directory_latency=12 +eventq_index=0 memBuffer=system.ruby.dir_cntrl0.memBuffer number_of_TBEs=256 peer=Null @@ -182,6 +203,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -198,6 +220,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -217,7 +240,8 @@ buffer_size=0 cacheMemory=system.ruby.l1_cntrl0.cacheMemory cache_response_latency=12 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 +eventq_index=0 issue_latency=2 number_of_TBEs=256 peer=Null @@ -233,6 +257,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -248,6 +273,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.cacheMemory deadlock_threshold=500000 +eventq_index=0 icache=system.ruby.l1_cntrl0.cacheMemory max_outstanding_requests=16 ruby_system=system.ruby @@ -264,6 +290,7 @@ slave=system.cpu.icache_port system.cpu.dcache_port system.cpu.itb.walker.port s type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -273,6 +300,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 number_of_virtual_networks=10 @@ -283,6 +311,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers0 latency=1 @@ -292,6 +321,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers1 latency=1 @@ -301,6 +331,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=2 node_a=system.ruby.network.routers0 @@ -310,6 +341,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=3 node_a=system.ruby.network.routers1 @@ -319,32 +351,29 @@ weight=1 [system.ruby.network.routers0] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers1] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers2] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=1 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -356,5 +385,6 @@ slave=system.system_port [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/x86/linux/simple-timing-ruby/simerr b/tests/quick/se/00.hello/ref/x86/linux/simple-timing-ruby/simerr index bbc0c797e..86244d4bf 100755 --- a/tests/quick/se/00.hello/ref/x86/linux/simple-timing-ruby/simerr +++ b/tests/quick/se/00.hello/ref/x86/linux/simple-timing-ruby/simerr @@ -3,4 +3,3 @@ warn: rounding error > tolerance warn: rounding error > tolerance 0.072760 rounded to 0 warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/x86/linux/simple-timing-ruby/simout b/tests/quick/se/00.hello/ref/x86/linux/simple-timing-ruby/simout index 78a38ee4d..53e9ad058 100755 --- a/tests/quick/se/00.hello/ref/x86/linux/simple-timing-ruby/simout +++ b/tests/quick/se/00.hello/ref/x86/linux/simple-timing-ruby/simout @@ -1,9 +1,9 @@ gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Oct 16 2013 01:35:57 -gem5 started Oct 16 2013 01:54:46 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:10:34 +gem5 started Jan 22 2014 17:30:11 +gem5 executing on u200540-lin command line: build/X86/gem5.opt -d build/X86/tests/opt/quick/se/00.hello/x86/linux/simple-timing-ruby -re tests/run.py build/X86/tests/opt/quick/se/00.hello/x86/linux/simple-timing-ruby Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/x86/linux/simple-timing-ruby/stats.txt b/tests/quick/se/00.hello/ref/x86/linux/simple-timing-ruby/stats.txt index 1b1365419..9b8cf8013 100644 --- a/tests/quick/se/00.hello/ref/x86/linux/simple-timing-ruby/stats.txt +++ b/tests/quick/se/00.hello/ref/x86/linux/simple-timing-ruby/stats.txt @@ -4,13 +4,16 @@ sim_seconds 0.000122 # Nu sim_ticks 121759 # Number of ticks simulated final_tick 121759 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_inst_rate 9034 # Simulator instruction rate (inst/s) -host_op_rate 16364 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 204397 # Simulator tick rate (ticks/s) -host_mem_usage 189696 # Number of bytes of host memory used -host_seconds 0.60 # Real time elapsed on the host +host_inst_rate 33614 # Simulator instruction rate (inst/s) +host_op_rate 60888 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 760469 # Simulator tick rate (ticks/s) +host_mem_usage 144688 # Number of bytes of host memory used +host_seconds 0.16 # Real time elapsed on the host sim_insts 5381 # Number of instructions simulated sim_ops 9748 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.delayHist::bucket_size 1 # delay histogram for all message system.ruby.delayHist::max_bucket 9 # delay histogram for all message system.ruby.delayHist::samples 2750 # delay histogram for all message @@ -47,6 +50,7 @@ system.ruby.miss_latency_hist::stdev 6.315805 system.ruby.miss_latency_hist | 0 0.00% 0.00% | 0 0.00% 0.00% | 0 0.00% 0.00% | 329 23.89% 23.89% | 977 70.95% 94.84% | 69 5.01% 99.85% | 1 0.07% 99.93% | 1 0.07% 100.00% | 0 0.00% 100.00% | 0 0.00% 100.00% system.ruby.miss_latency_hist::total 1377 system.ruby.Directory.incomplete_times 1376 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.l1_cntrl0.cacheMemory.demand_hits 7475 # Number of cache demand hits system.ruby.l1_cntrl0.cacheMemory.demand_misses 1377 # Number of cache demand misses system.ruby.l1_cntrl0.cacheMemory.demand_accesses 8852 # Number of cache demand accesses @@ -99,6 +103,8 @@ system.ruby.network.msg_byte.Control 33048 system.ruby.network.msg_byte.Data 296568 system.ruby.network.msg_byte.Response_Data 297432 system.ruby.network.msg_byte.Writeback_Control 32952 +system.cpu.clk_domain.clock 1 # Clock period in ticks +system.cpu.apic_clk_domain.clock 16 # Clock period in ticks system.cpu.workload.num_syscalls 11 # Number of system calls system.cpu.numCycles 121759 # number of cpu cycles simulated system.cpu.numWorkItemsStarted 0 # number of work items this cpu started diff --git a/tests/quick/se/00.hello/ref/x86/linux/simple-timing/config.ini b/tests/quick/se/00.hello/ref/x86/linux/simple-timing/config.ini index 2a7188a36..b6193f8c7 100644 --- a/tests/quick/se/00.hello/ref/x86/linux/simple-timing/config.ini +++ b/tests/quick/se/00.hello/ref/x86/linux/simple-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -69,6 +74,7 @@ icache_port=system.cpu.icache.cpu_side type=DerivedClockDomain clk_divider=16 clk_domain=system.cpu_clk_domain +eventq_index=0 [system.cpu.dcache] type=BaseCache @@ -76,6 +82,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -84,6 +91,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -98,18 +106,22 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] type=X86TLB children=walker +eventq_index=0 size=64 walker=system.cpu.dtb.walker [system.cpu.dtb.walker] type=X86PagetableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=4 system=system port=system.cpu.toL2Bus.slave[3] @@ -120,6 +132,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -128,6 +141,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -142,12 +156,15 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] type=X86LocalApic clk_domain=system.cpu.apic_clk_domain +eventq_index=0 int_latency=1000 pio_addr=2305843009213693952 pio_latency=100000 @@ -158,16 +175,19 @@ pio=system.membus.master[1] [system.cpu.isa] type=X86ISA +eventq_index=0 [system.cpu.itb] type=X86TLB children=walker +eventq_index=0 size=64 walker=system.cpu.itb.walker [system.cpu.itb.walker] type=X86PagetableWalker clk_domain=system.cpu_clk_domain +eventq_index=0 num_squash_per_cycle=4 system=system port=system.cpu.toL2Bus.slave[2] @@ -178,6 +198,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -186,6 +207,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -200,12 +222,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -215,6 +240,7 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side system.cpu.itb.walke [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -224,7 +250,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/hello/bin/x86/linux/hello +eventq_index=0 +executable=/dist/test-progs/hello/bin/x86/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -238,11 +265,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -255,6 +284,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -264,5 +294,6 @@ port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/00.hello/ref/x86/linux/simple-timing/simerr b/tests/quick/se/00.hello/ref/x86/linux/simple-timing/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/00.hello/ref/x86/linux/simple-timing/simerr +++ b/tests/quick/se/00.hello/ref/x86/linux/simple-timing/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/00.hello/ref/x86/linux/simple-timing/simout b/tests/quick/se/00.hello/ref/x86/linux/simple-timing/simout index c59dbb17e..bb364e541 100755 --- a/tests/quick/se/00.hello/ref/x86/linux/simple-timing/simout +++ b/tests/quick/se/00.hello/ref/x86/linux/simple-timing/simout @@ -1,9 +1,9 @@ gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Oct 16 2013 01:35:57 -gem5 started Oct 16 2013 01:41:56 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:10:34 +gem5 started Jan 22 2014 17:30:10 +gem5 executing on u200540-lin command line: build/X86/gem5.opt -d build/X86/tests/opt/quick/se/00.hello/x86/linux/simple-timing -re tests/run.py build/X86/tests/opt/quick/se/00.hello/x86/linux/simple-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/00.hello/ref/x86/linux/simple-timing/stats.txt b/tests/quick/se/00.hello/ref/x86/linux/simple-timing/stats.txt index ff37d4bfb..017ee7525 100644 --- a/tests/quick/se/00.hello/ref/x86/linux/simple-timing/stats.txt +++ b/tests/quick/se/00.hello/ref/x86/linux/simple-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000028 # Nu sim_ticks 28358000 # Number of ticks simulated final_tick 28358000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 41834 # Simulator instruction rate (inst/s) -host_op_rate 75775 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 220409714 # Simulator tick rate (ticks/s) -host_mem_usage 245252 # Number of bytes of host memory used -host_seconds 0.13 # Real time elapsed on the host +host_inst_rate 44998 # Simulator instruction rate (inst/s) +host_op_rate 81497 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 237030591 # Simulator tick rate (ticks/s) +host_mem_usage 247544 # Number of bytes of host memory used +host_seconds 0.12 # Real time elapsed on the host sim_insts 5381 # Number of instructions simulated sim_ops 9748 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 14528 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 8576 # Number of bytes read from this memory system.physmem.bytes_read::total 23104 # Number of bytes read from this memory @@ -44,6 +46,8 @@ system.membus.reqLayer0.occupancy 361000 # La system.membus.reqLayer0.utilization 1.3 # Layer utilization (%) system.membus.respLayer1.occupancy 3249000 # Layer occupancy (ticks) system.membus.respLayer1.utilization 11.5 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks +system.cpu.apic_clk_domain.clock 8000 # Clock period in ticks system.cpu.workload.num_syscalls 11 # Number of system calls system.cpu.numCycles 56716 # number of cpu cycles simulated system.cpu.numWorkItemsStarted 0 # number of work items this cpu started @@ -78,6 +82,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 105.550219 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.051538 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.051538 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 228 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 96 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 132 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.111328 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 13958 # Number of tag accesses +system.cpu.icache.tags.data_accesses 13958 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 6637 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 6637 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 6637 # number of demand (read+write) hits @@ -158,6 +168,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 28.475810 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.003221 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.000869 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.004090 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 282 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 115 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 167 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.008606 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 3257 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 3257 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 1 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 1 # number of ReadReq hits system.cpu.l2cache.demand_hits::cpu.inst 1 # number of demand (read+write) hits @@ -281,6 +297,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 80.797237 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.019726 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.019726 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 134 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 33 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 101 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.032715 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 4110 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 4110 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 998 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 998 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 856 # number of WriteReq hits diff --git a/tests/quick/se/01.hello-2T-smt/ref/alpha/linux/o3-timing/config.ini b/tests/quick/se/01.hello-2T-smt/ref/alpha/linux/o3-timing/config.ini index 708085ca5..39d7de978 100644 --- a/tests/quick/se/01.hello-2T-smt/ref/alpha/linux/o3-timing/config.ini +++ b/tests/quick/se/01.hello-2T-smt/ref/alpha/linux/o3-timing/config.ini @@ -159,6 +159,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -175,6 +176,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] @@ -504,6 +506,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -520,6 +523,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] @@ -529,10 +533,12 @@ eventq_index=0 [system.cpu.isa0] type=AlphaISA eventq_index=0 +system=system [system.cpu.isa1] type=AlphaISA eventq_index=0 +system=system [system.cpu.itb] type=AlphaTLB @@ -554,6 +560,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -570,6 +577,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] @@ -596,7 +604,7 @@ env= errout=cerr euid=100 eventq_index=0 -executable=tests/test-progs/hello/bin/alpha/linux/hello +executable=/dist/test-progs/hello/bin/alpha/linux/hello gid=100 input=cin max_stack_size=67108864 @@ -616,7 +624,7 @@ env= errout=cerr euid=100 eventq_index=0 -executable=tests/test-progs/hello/bin/alpha/linux/hello +executable=/dist/test-progs/hello/bin/alpha/linux/hello gid=100 input=cin max_stack_size=67108864 diff --git a/tests/quick/se/01.hello-2T-smt/ref/alpha/linux/o3-timing/simerr b/tests/quick/se/01.hello-2T-smt/ref/alpha/linux/o3-timing/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/01.hello-2T-smt/ref/alpha/linux/o3-timing/simerr +++ b/tests/quick/se/01.hello-2T-smt/ref/alpha/linux/o3-timing/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/01.hello-2T-smt/ref/alpha/linux/o3-timing/simout b/tests/quick/se/01.hello-2T-smt/ref/alpha/linux/o3-timing/simout index d74926aee..262de0632 100755 --- a/tests/quick/se/01.hello-2T-smt/ref/alpha/linux/o3-timing/simout +++ b/tests/quick/se/01.hello-2T-smt/ref/alpha/linux/o3-timing/simout @@ -1,9 +1,9 @@ gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Oct 15 2013 18:24:51 -gem5 started Oct 16 2013 01:34:33 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:27:55 +gem5 started Jan 22 2014 17:24:31 +gem5 executing on u200540-lin command line: build/ALPHA/gem5.opt -d build/ALPHA/tests/opt/quick/se/01.hello-2T-smt/alpha/linux/o3-timing -re tests/run.py build/ALPHA/tests/opt/quick/se/01.hello-2T-smt/alpha/linux/o3-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... @@ -11,4 +11,4 @@ info: Increasing stack size by one page. info: Increasing stack size by one page. Hello world! Hello world! -Exiting @ tick 24404000 because target called exit() +Exiting @ tick 24229500 because target called exit() diff --git a/tests/quick/se/01.hello-2T-smt/ref/alpha/linux/o3-timing/stats.txt b/tests/quick/se/01.hello-2T-smt/ref/alpha/linux/o3-timing/stats.txt index b48213381..941a3afbf 100644 --- a/tests/quick/se/01.hello-2T-smt/ref/alpha/linux/o3-timing/stats.txt +++ b/tests/quick/se/01.hello-2T-smt/ref/alpha/linux/o3-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000024 # Nu sim_ticks 24229500 # Number of ticks simulated final_tick 24229500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 38113 # Simulator instruction rate (inst/s) -host_op_rate 38111 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 72448291 # Simulator tick rate (ticks/s) -host_mem_usage 273720 # Number of bytes of host memory used -host_seconds 0.33 # Real time elapsed on the host +host_inst_rate 46987 # Simulator instruction rate (inst/s) +host_op_rate 46985 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 89318295 # Simulator tick rate (ticks/s) +host_mem_usage 231368 # Number of bytes of host memory used +host_seconds 0.27 # Real time elapsed on the host sim_insts 12745 # Number of instructions simulated sim_ops 12745 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 39936 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 22464 # Number of bytes read from this memory system.physmem.bytes_read::total 62400 # Number of bytes read from this memory @@ -218,6 +220,7 @@ system.membus.reqLayer0.occupancy 1237000 # La system.membus.reqLayer0.utilization 5.1 # Layer utilization (%) system.membus.respLayer1.occupancy 9059500 # Layer occupancy (ticks) system.membus.respLayer1.utilization 37.4 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.branchPred.lookups 6676 # Number of BP lookups system.cpu.branchPred.condPredicted 3772 # Number of conditional branches predicted system.cpu.branchPred.condIncorrect 1441 # Number of conditional branches incorrect @@ -656,6 +659,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 312.493120 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.152585 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.152585 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 620 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 263 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 357 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.302734 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 11364 # Number of tag accesses +system.cpu.icache.tags.data_accesses 11364 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 4320 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 4320 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 4320 # number of demand (read+write) hits @@ -744,6 +753,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 120.164328 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.009552 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.003667 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.013219 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 829 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 337 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 492 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.025299 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 8791 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 8791 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 2 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 2 # number of ReadReq hits system.cpu.l2cache.demand_hits::cpu.inst 2 # number of demand (read+write) hits @@ -869,6 +884,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 214.018929 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.052251 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.052251 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 351 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 97 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 254 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.085693 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 11365 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 11365 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 3448 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 3448 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 1022 # number of WriteReq hits diff --git a/tests/quick/se/02.insttest/ref/sparc/linux/inorder-timing/config.ini b/tests/quick/se/02.insttest/ref/sparc/linux/inorder-timing/config.ini index 86810fed8..a5a69d897 100644 --- a/tests/quick/se/02.insttest/ref/sparc/linux/inorder-timing/config.ini +++ b/tests/quick/se/02.insttest/ref/sparc/linux/inorder-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -56,6 +60,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 fetchBuffSize=4 function_trace=false function_trace_start=0 @@ -90,6 +95,7 @@ BTBTagSize=16 RASSize=16 choiceCtrBits=2 choicePredictorSize=8192 +eventq_index=0 globalCtrBits=2 globalPredictorSize=8192 instShiftAmt=2 @@ -105,6 +111,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -113,6 +120,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -127,11 +135,14 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] type=SparcTLB +eventq_index=0 size=64 [system.cpu.icache] @@ -140,6 +151,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -148,6 +160,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -162,17 +175,22 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] type=SparcInterrupts +eventq_index=0 [system.cpu.isa] type=SparcISA +eventq_index=0 [system.cpu.itb] type=SparcTLB +eventq_index=0 size=64 [system.cpu.l2cache] @@ -181,6 +199,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -189,6 +208,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -203,12 +223,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -218,6 +241,7 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -227,7 +251,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/insttest/bin/sparc/linux/insttest +eventq_index=0 +executable=/dist/test-progs/insttest/bin/sparc/linux/insttest gid=100 input=cin max_stack_size=67108864 @@ -241,11 +266,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -265,6 +292,7 @@ conf_table_reported=true device_bus_width=8 device_rowbuffer_size=1024 devices_per_rank=8 +eventq_index=0 in_addr_map=true mem_sched_policy=frfcfs null=false @@ -276,17 +304,21 @@ static_backend_latency=10000 static_frontend_latency=10000 tBURST=5000 tCL=13750 +tRAS=35000 tRCD=13750 tREFI=7800000 tRFC=300000 tRP=13750 +tRRD=6250 tWTR=7500 tXAW=40000 write_buffer_size=32 -write_thresh_perc=70 +write_high_thresh_perc=70 +write_low_thresh_perc=0 port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/02.insttest/ref/sparc/linux/inorder-timing/simerr b/tests/quick/se/02.insttest/ref/sparc/linux/inorder-timing/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/02.insttest/ref/sparc/linux/inorder-timing/simerr +++ b/tests/quick/se/02.insttest/ref/sparc/linux/inorder-timing/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/02.insttest/ref/sparc/linux/inorder-timing/simout b/tests/quick/se/02.insttest/ref/sparc/linux/inorder-timing/simout index 947073917..8b0aca80b 100755 --- a/tests/quick/se/02.insttest/ref/sparc/linux/inorder-timing/simout +++ b/tests/quick/se/02.insttest/ref/sparc/linux/inorder-timing/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/SPARC/tests/opt/quick/se/02.insttest/sparc/linux/inorder-timing/simout -Redirecting stderr to build/SPARC/tests/opt/quick/se/02.insttest/sparc/linux/inorder-timing/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 06:07:13 -gem5 started Sep 22 2013 06:11:33 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:04:27 +gem5 started Jan 22 2014 17:29:33 +gem5 executing on u200540-lin command line: build/SPARC/gem5.opt -d build/SPARC/tests/opt/quick/se/02.insttest/sparc/linux/inorder-timing -re tests/run.py build/SPARC/tests/opt/quick/se/02.insttest/sparc/linux/inorder-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... @@ -20,4 +18,4 @@ LDTX: Passed LDTW: Passed STTW: Passed Done -Exiting @ tick 27282000 because target called exit() +Exiting @ tick 27705000 because target called exit() diff --git a/tests/quick/se/02.insttest/ref/sparc/linux/inorder-timing/stats.txt b/tests/quick/se/02.insttest/ref/sparc/linux/inorder-timing/stats.txt index 9f174a09c..4c8817e23 100644 --- a/tests/quick/se/02.insttest/ref/sparc/linux/inorder-timing/stats.txt +++ b/tests/quick/se/02.insttest/ref/sparc/linux/inorder-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000028 # Nu sim_ticks 27705000 # Number of ticks simulated final_tick 27705000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 72386 # Simulator instruction rate (inst/s) -host_op_rate 72381 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 132251643 # Simulator tick rate (ticks/s) -host_mem_usage 260736 # Number of bytes of host memory used -host_seconds 0.21 # Real time elapsed on the host +host_inst_rate 23200 # Simulator instruction rate (inst/s) +host_op_rate 23199 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 42390050 # Simulator tick rate (ticks/s) +host_mem_usage 236824 # Number of bytes of host memory used +host_seconds 0.65 # Real time elapsed on the host sim_insts 15162 # Number of instructions simulated sim_ops 15162 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 19072 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 8832 # Number of bytes read from this memory system.physmem.bytes_read::total 27904 # Number of bytes read from this memory @@ -215,6 +217,7 @@ system.membus.reqLayer0.occupancy 519000 # La system.membus.reqLayer0.utilization 1.9 # Layer utilization (%) system.membus.respLayer1.occupancy 4048750 # Layer occupancy (ticks) system.membus.respLayer1.utilization 14.6 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.branchPred.lookups 5146 # Number of BP lookups system.cpu.branchPred.condPredicted 3529 # Number of conditional branches predicted system.cpu.branchPred.condIncorrect 2366 # Number of conditional branches incorrect @@ -294,6 +297,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 169.234439 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.082634 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.082634 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 299 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 78 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 221 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.145996 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 7069 # Number of tag accesses +system.cpu.icache.tags.data_accesses 7069 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 3004 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 3004 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 3004 # number of demand (read+write) hits @@ -399,6 +408,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 31.741320 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.005144 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.000969 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.006113 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 350 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 90 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 260 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.010681 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 3947 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 3947 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 2 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 2 # number of ReadReq hits system.cpu.l2cache.demand_hits::cpu.inst 2 # number of demand (read+write) hits @@ -522,6 +537,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 98.671839 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.024090 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.024090 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 138 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 16 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 122 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.033691 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 7484 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 7484 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 2167 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 2167 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 1020 # number of WriteReq hits diff --git a/tests/quick/se/02.insttest/ref/sparc/linux/o3-timing/config.ini b/tests/quick/se/02.insttest/ref/sparc/linux/o3-timing/config.ini index e50ecc67e..48563010b 100644 --- a/tests/quick/se/02.insttest/ref/sparc/linux/o3-timing/config.ini +++ b/tests/quick/se/02.insttest/ref/sparc/linux/o3-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -64,6 +68,8 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 +fetchBufferSize=64 fetchToDecodeDelay=1 fetchTrapLatency=1 fetchWidth=8 @@ -128,6 +134,7 @@ BTBTagSize=16 RASSize=16 choiceCtrBits=2 choicePredictorSize=8192 +eventq_index=0 globalCtrBits=2 globalPredictorSize=8192 instShiftAmt=2 @@ -143,6 +150,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -151,6 +159,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -165,26 +174,32 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] type=SparcTLB +eventq_index=0 size=64 [system.cpu.fuPool] type=FUPool children=FUList0 FUList1 FUList2 FUList3 FUList4 FUList5 FUList6 FUList7 FUList8 FUList=system.cpu.fuPool.FUList0 system.cpu.fuPool.FUList1 system.cpu.fuPool.FUList2 system.cpu.fuPool.FUList3 system.cpu.fuPool.FUList4 system.cpu.fuPool.FUList5 system.cpu.fuPool.FUList6 system.cpu.fuPool.FUList7 system.cpu.fuPool.FUList8 +eventq_index=0 [system.cpu.fuPool.FUList0] type=FUDesc children=opList count=6 +eventq_index=0 opList=system.cpu.fuPool.FUList0.opList [system.cpu.fuPool.FUList0.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=IntAlu opLat=1 @@ -193,16 +208,19 @@ opLat=1 type=FUDesc children=opList0 opList1 count=2 +eventq_index=0 opList=system.cpu.fuPool.FUList1.opList0 system.cpu.fuPool.FUList1.opList1 [system.cpu.fuPool.FUList1.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=IntMult opLat=3 [system.cpu.fuPool.FUList1.opList1] type=OpDesc +eventq_index=0 issueLat=19 opClass=IntDiv opLat=20 @@ -211,22 +229,26 @@ opLat=20 type=FUDesc children=opList0 opList1 opList2 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList2.opList0 system.cpu.fuPool.FUList2.opList1 system.cpu.fuPool.FUList2.opList2 [system.cpu.fuPool.FUList2.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatAdd opLat=2 [system.cpu.fuPool.FUList2.opList1] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatCmp opLat=2 [system.cpu.fuPool.FUList2.opList2] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatCvt opLat=2 @@ -235,22 +257,26 @@ opLat=2 type=FUDesc children=opList0 opList1 opList2 count=2 +eventq_index=0 opList=system.cpu.fuPool.FUList3.opList0 system.cpu.fuPool.FUList3.opList1 system.cpu.fuPool.FUList3.opList2 [system.cpu.fuPool.FUList3.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=FloatMult opLat=4 [system.cpu.fuPool.FUList3.opList1] type=OpDesc +eventq_index=0 issueLat=12 opClass=FloatDiv opLat=12 [system.cpu.fuPool.FUList3.opList2] type=OpDesc +eventq_index=0 issueLat=24 opClass=FloatSqrt opLat=24 @@ -259,10 +285,12 @@ opLat=24 type=FUDesc children=opList count=0 +eventq_index=0 opList=system.cpu.fuPool.FUList4.opList [system.cpu.fuPool.FUList4.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemRead opLat=1 @@ -271,124 +299,145 @@ opLat=1 type=FUDesc children=opList00 opList01 opList02 opList03 opList04 opList05 opList06 opList07 opList08 opList09 opList10 opList11 opList12 opList13 opList14 opList15 opList16 opList17 opList18 opList19 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList5.opList00 system.cpu.fuPool.FUList5.opList01 system.cpu.fuPool.FUList5.opList02 system.cpu.fuPool.FUList5.opList03 system.cpu.fuPool.FUList5.opList04 system.cpu.fuPool.FUList5.opList05 system.cpu.fuPool.FUList5.opList06 system.cpu.fuPool.FUList5.opList07 system.cpu.fuPool.FUList5.opList08 system.cpu.fuPool.FUList5.opList09 system.cpu.fuPool.FUList5.opList10 system.cpu.fuPool.FUList5.opList11 system.cpu.fuPool.FUList5.opList12 system.cpu.fuPool.FUList5.opList13 system.cpu.fuPool.FUList5.opList14 system.cpu.fuPool.FUList5.opList15 system.cpu.fuPool.FUList5.opList16 system.cpu.fuPool.FUList5.opList17 system.cpu.fuPool.FUList5.opList18 system.cpu.fuPool.FUList5.opList19 [system.cpu.fuPool.FUList5.opList00] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAdd opLat=1 [system.cpu.fuPool.FUList5.opList01] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAddAcc opLat=1 [system.cpu.fuPool.FUList5.opList02] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdAlu opLat=1 [system.cpu.fuPool.FUList5.opList03] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdCmp opLat=1 [system.cpu.fuPool.FUList5.opList04] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdCvt opLat=1 [system.cpu.fuPool.FUList5.opList05] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMisc opLat=1 [system.cpu.fuPool.FUList5.opList06] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMult opLat=1 [system.cpu.fuPool.FUList5.opList07] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdMultAcc opLat=1 [system.cpu.fuPool.FUList5.opList08] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdShift opLat=1 [system.cpu.fuPool.FUList5.opList09] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdShiftAcc opLat=1 [system.cpu.fuPool.FUList5.opList10] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdSqrt opLat=1 [system.cpu.fuPool.FUList5.opList11] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatAdd opLat=1 [system.cpu.fuPool.FUList5.opList12] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatAlu opLat=1 [system.cpu.fuPool.FUList5.opList13] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatCmp opLat=1 [system.cpu.fuPool.FUList5.opList14] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatCvt opLat=1 [system.cpu.fuPool.FUList5.opList15] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatDiv opLat=1 [system.cpu.fuPool.FUList5.opList16] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMisc opLat=1 [system.cpu.fuPool.FUList5.opList17] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMult opLat=1 [system.cpu.fuPool.FUList5.opList18] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatMultAcc opLat=1 [system.cpu.fuPool.FUList5.opList19] type=OpDesc +eventq_index=0 issueLat=1 opClass=SimdFloatSqrt opLat=1 @@ -397,10 +446,12 @@ opLat=1 type=FUDesc children=opList count=0 +eventq_index=0 opList=system.cpu.fuPool.FUList6.opList [system.cpu.fuPool.FUList6.opList] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemWrite opLat=1 @@ -409,16 +460,19 @@ opLat=1 type=FUDesc children=opList0 opList1 count=4 +eventq_index=0 opList=system.cpu.fuPool.FUList7.opList0 system.cpu.fuPool.FUList7.opList1 [system.cpu.fuPool.FUList7.opList0] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemRead opLat=1 [system.cpu.fuPool.FUList7.opList1] type=OpDesc +eventq_index=0 issueLat=1 opClass=MemWrite opLat=1 @@ -427,10 +481,12 @@ opLat=1 type=FUDesc children=opList count=1 +eventq_index=0 opList=system.cpu.fuPool.FUList8.opList [system.cpu.fuPool.FUList8.opList] type=OpDesc +eventq_index=0 issueLat=3 opClass=IprAccess opLat=3 @@ -441,6 +497,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -449,6 +506,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -463,17 +521,22 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] type=SparcInterrupts +eventq_index=0 [system.cpu.isa] type=SparcISA +eventq_index=0 [system.cpu.itb] type=SparcTLB +eventq_index=0 size=64 [system.cpu.l2cache] @@ -482,6 +545,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -490,6 +554,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -504,12 +569,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -519,6 +587,7 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -528,7 +597,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/insttest/bin/sparc/linux/insttest +eventq_index=0 +executable=/dist/test-progs/insttest/bin/sparc/linux/insttest gid=100 input=cin max_stack_size=67108864 @@ -542,11 +612,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -566,6 +638,7 @@ conf_table_reported=true device_bus_width=8 device_rowbuffer_size=1024 devices_per_rank=8 +eventq_index=0 in_addr_map=true mem_sched_policy=frfcfs null=false @@ -577,17 +650,21 @@ static_backend_latency=10000 static_frontend_latency=10000 tBURST=5000 tCL=13750 +tRAS=35000 tRCD=13750 tREFI=7800000 tRFC=300000 tRP=13750 +tRRD=6250 tWTR=7500 tXAW=40000 write_buffer_size=32 -write_thresh_perc=70 +write_high_thresh_perc=70 +write_low_thresh_perc=0 port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/02.insttest/ref/sparc/linux/o3-timing/simerr b/tests/quick/se/02.insttest/ref/sparc/linux/o3-timing/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/02.insttest/ref/sparc/linux/o3-timing/simerr +++ b/tests/quick/se/02.insttest/ref/sparc/linux/o3-timing/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/02.insttest/ref/sparc/linux/o3-timing/simout b/tests/quick/se/02.insttest/ref/sparc/linux/o3-timing/simout index 3384dd19c..9f4e08c11 100755 --- a/tests/quick/se/02.insttest/ref/sparc/linux/o3-timing/simout +++ b/tests/quick/se/02.insttest/ref/sparc/linux/o3-timing/simout @@ -1,9 +1,9 @@ gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Oct 16 2013 01:31:26 -gem5 started Oct 16 2013 01:35:23 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:04:27 +gem5 started Jan 22 2014 17:29:34 +gem5 executing on u200540-lin command line: build/SPARC/gem5.opt -d build/SPARC/tests/opt/quick/se/02.insttest/sparc/linux/o3-timing -re tests/run.py build/SPARC/tests/opt/quick/se/02.insttest/sparc/linux/o3-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... @@ -18,4 +18,4 @@ LDTX: Passed LDTW: Passed STTW: Passed Done -Exiting @ tick 26524500 because target called exit() +Exiting @ tick 26616500 because target called exit() diff --git a/tests/quick/se/02.insttest/ref/sparc/linux/o3-timing/stats.txt b/tests/quick/se/02.insttest/ref/sparc/linux/o3-timing/stats.txt index dcf709c59..7bcabaaf6 100644 --- a/tests/quick/se/02.insttest/ref/sparc/linux/o3-timing/stats.txt +++ b/tests/quick/se/02.insttest/ref/sparc/linux/o3-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000027 # Nu sim_ticks 26616500 # Number of ticks simulated final_tick 26616500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 75478 # Simulator instruction rate (inst/s) -host_op_rate 75473 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 139143595 # Simulator tick rate (ticks/s) -host_mem_usage 260732 # Number of bytes of host memory used -host_seconds 0.19 # Real time elapsed on the host +host_inst_rate 19079 # Simulator instruction rate (inst/s) +host_op_rate 19079 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 35176168 # Simulator tick rate (ticks/s) +host_mem_usage 237844 # Number of bytes of host memory used +host_seconds 0.76 # Real time elapsed on the host sim_insts 14436 # Number of instructions simulated sim_ops 14436 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 21440 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 9408 # Number of bytes read from this memory system.physmem.bytes_read::total 30848 # Number of bytes read from this memory @@ -215,6 +217,7 @@ system.membus.reqLayer0.occupancy 610000 # La system.membus.reqLayer0.utilization 2.3 # Layer utilization (%) system.membus.respLayer1.occupancy 4495750 # Layer occupancy (ticks) system.membus.respLayer1.utilization 16.9 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.branchPred.lookups 6713 # Number of BP lookups system.cpu.branchPred.condPredicted 4454 # Number of conditional branches predicted system.cpu.branchPred.condIncorrect 1076 # Number of conditional branches incorrect @@ -506,6 +509,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 187.514405 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.091560 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.091560 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 337 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 92 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 245 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.164551 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 11095 # Number of tag accesses +system.cpu.icache.tags.data_accesses 11095 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 4872 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 4872 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 4872 # number of demand (read+write) hits @@ -592,6 +601,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 34.456006 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.005704 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.001052 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.006755 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 399 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 111 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 288 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.012177 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 4354 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 4354 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 2 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 2 # number of ReadReq hits system.cpu.l2cache.demand_hits::cpu.inst 2 # number of demand (read+write) hits @@ -715,6 +730,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 99.106073 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.024196 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.024196 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 147 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 23 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 124 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.035889 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 9219 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 9219 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 2962 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 2962 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 1033 # number of WriteReq hits diff --git a/tests/quick/se/02.insttest/ref/sparc/linux/simple-atomic/config.ini b/tests/quick/se/02.insttest/ref/sparc/linux/simple-atomic/config.ini index 72cf29eda..4f177ecd0 100644 --- a/tests/quick/se/02.insttest/ref/sparc/linux/simple-atomic/config.ini +++ b/tests/quick/se/02.insttest/ref/sparc/linux/simple-atomic/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 fastmem=false function_trace=false function_trace_start=0 @@ -74,20 +79,25 @@ icache_port=system.membus.slave[1] [system.cpu.dtb] type=SparcTLB +eventq_index=0 size=64 [system.cpu.interrupts] type=SparcInterrupts +eventq_index=0 [system.cpu.isa] type=SparcISA +eventq_index=0 [system.cpu.itb] type=SparcTLB +eventq_index=0 size=64 [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -97,7 +107,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/insttest/bin/sparc/linux/insttest +eventq_index=0 +executable=/dist/test-progs/insttest/bin/sparc/linux/insttest gid=100 input=cin max_stack_size=67108864 @@ -111,11 +122,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -128,6 +141,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -137,5 +151,6 @@ port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/02.insttest/ref/sparc/linux/simple-atomic/simerr b/tests/quick/se/02.insttest/ref/sparc/linux/simple-atomic/simerr index 7edd901b2..1a4f96712 100755 --- a/tests/quick/se/02.insttest/ref/sparc/linux/simple-atomic/simerr +++ b/tests/quick/se/02.insttest/ref/sparc/linux/simple-atomic/simerr @@ -1,3 +1 @@ -warn: CoherentBus system.membus has no snooping ports attached! warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/02.insttest/ref/sparc/linux/simple-atomic/simout b/tests/quick/se/02.insttest/ref/sparc/linux/simple-atomic/simout index 24f0721ea..13e87da70 100755 --- a/tests/quick/se/02.insttest/ref/sparc/linux/simple-atomic/simout +++ b/tests/quick/se/02.insttest/ref/sparc/linux/simple-atomic/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/SPARC/tests/opt/quick/se/02.insttest/sparc/linux/simple-atomic/simout -Redirecting stderr to build/SPARC/tests/opt/quick/se/02.insttest/sparc/linux/simple-atomic/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 06:07:13 -gem5 started Sep 22 2013 06:11:45 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:04:27 +gem5 started Jan 22 2014 17:29:44 +gem5 executing on u200540-lin command line: build/SPARC/gem5.opt -d build/SPARC/tests/opt/quick/se/02.insttest/sparc/linux/simple-atomic -re tests/run.py build/SPARC/tests/opt/quick/se/02.insttest/sparc/linux/simple-atomic Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/02.insttest/ref/sparc/linux/simple-atomic/stats.txt b/tests/quick/se/02.insttest/ref/sparc/linux/simple-atomic/stats.txt index 082962efb..9bfbb56dc 100644 --- a/tests/quick/se/02.insttest/ref/sparc/linux/simple-atomic/stats.txt +++ b/tests/quick/se/02.insttest/ref/sparc/linux/simple-atomic/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000008 # Nu sim_ticks 7612000 # Number of ticks simulated final_tick 7612000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 451796 # Simulator instruction rate (inst/s) -host_op_rate 451441 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 226479305 # Simulator tick rate (ticks/s) -host_mem_usage 222832 # Number of bytes of host memory used -host_seconds 0.03 # Real time elapsed on the host +host_inst_rate 25833 # Simulator instruction rate (inst/s) +host_op_rate 25832 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 12968653 # Simulator tick rate (ticks/s) +host_mem_usage 227056 # Number of bytes of host memory used +host_seconds 0.59 # Real time elapsed on the host sim_insts 15162 # Number of instructions simulated sim_ops 15162 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 60828 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 11342 # Number of bytes read from this memory system.physmem.bytes_read::total 72170 # Number of bytes read from this memory @@ -38,6 +40,7 @@ system.physmem.bw_total::total 10668943773 # To system.membus.throughput 10676563321 # Throughput (bytes/s) system.membus.data_through_bus 81270 # Total data (bytes) system.membus.snoop_data_through_bus 0 # Total snoop data (bytes) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.workload.num_syscalls 18 # Number of system calls system.cpu.numCycles 15225 # number of cpu cycles simulated system.cpu.numWorkItemsStarted 0 # number of work items this cpu started diff --git a/tests/quick/se/02.insttest/ref/sparc/linux/simple-timing/config.ini b/tests/quick/se/02.insttest/ref/sparc/linux/simple-timing/config.ini index 77bbda99d..f1f91f6d4 100644 --- a/tests/quick/se/02.insttest/ref/sparc/linux/simple-timing/config.ini +++ b/tests/quick/se/02.insttest/ref/sparc/linux/simple-timing/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu cpu_clk_domain membus physmem voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu.dtb +eventq_index=0 function_trace=false function_trace_start=0 interrupts=system.cpu.interrupts @@ -71,6 +76,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -79,6 +85,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=262144 system=system tags=system.cpu.dcache.tags @@ -93,11 +100,14 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=262144 [system.cpu.dtb] type=SparcTLB +eventq_index=0 size=64 [system.cpu.icache] @@ -106,6 +116,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=2 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -114,6 +125,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=131072 system=system tags=system.cpu.icache.tags @@ -128,17 +140,22 @@ type=LRU assoc=2 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=131072 [system.cpu.interrupts] type=SparcInterrupts +eventq_index=0 [system.cpu.isa] type=SparcISA +eventq_index=0 [system.cpu.itb] type=SparcTLB +eventq_index=0 size=64 [system.cpu.l2cache] @@ -147,6 +164,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -155,6 +173,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=2097152 system=system tags=system.cpu.l2cache.tags @@ -169,12 +188,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=2097152 [system.cpu.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -184,6 +206,7 @@ slave=system.cpu.icache.mem_side system.cpu.dcache.mem_side [system.cpu.tracer] type=ExeTracer +eventq_index=0 [system.cpu.workload] type=LiveProcess @@ -193,7 +216,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/insttest/bin/sparc/linux/insttest +eventq_index=0 +executable=/dist/test-progs/insttest/bin/sparc/linux/insttest gid=100 input=cin max_stack_size=67108864 @@ -207,11 +231,13 @@ uid=100 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -224,6 +250,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -233,5 +260,6 @@ port=system.membus.master[0] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/02.insttest/ref/sparc/linux/simple-timing/simerr b/tests/quick/se/02.insttest/ref/sparc/linux/simple-timing/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/02.insttest/ref/sparc/linux/simple-timing/simerr +++ b/tests/quick/se/02.insttest/ref/sparc/linux/simple-timing/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/02.insttest/ref/sparc/linux/simple-timing/simout b/tests/quick/se/02.insttest/ref/sparc/linux/simple-timing/simout index de66adf5c..543b5de56 100755 --- a/tests/quick/se/02.insttest/ref/sparc/linux/simple-timing/simout +++ b/tests/quick/se/02.insttest/ref/sparc/linux/simple-timing/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/SPARC/tests/opt/quick/se/02.insttest/sparc/linux/simple-timing/simout -Redirecting stderr to build/SPARC/tests/opt/quick/se/02.insttest/sparc/linux/simple-timing/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 06:07:13 -gem5 started Sep 22 2013 06:07:35 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:04:27 +gem5 started Jan 22 2014 17:29:44 +gem5 executing on u200540-lin command line: build/SPARC/gem5.opt -d build/SPARC/tests/opt/quick/se/02.insttest/sparc/linux/simple-timing -re tests/run.py build/SPARC/tests/opt/quick/se/02.insttest/sparc/linux/simple-timing Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/02.insttest/ref/sparc/linux/simple-timing/stats.txt b/tests/quick/se/02.insttest/ref/sparc/linux/simple-timing/stats.txt index 45bd7d946..6f76b1103 100644 --- a/tests/quick/se/02.insttest/ref/sparc/linux/simple-timing/stats.txt +++ b/tests/quick/se/02.insttest/ref/sparc/linux/simple-timing/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000041 # Nu sim_ticks 41368000 # Number of ticks simulated final_tick 41368000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 554996 # Simulator instruction rate (inst/s) -host_op_rate 554737 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 1512828488 # Simulator tick rate (ticks/s) -host_mem_usage 230824 # Number of bytes of host memory used -host_seconds 0.03 # Real time elapsed on the host +host_inst_rate 30355 # Simulator instruction rate (inst/s) +host_op_rate 30353 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 82811452 # Simulator tick rate (ticks/s) +host_mem_usage 236788 # Number of bytes of host memory used +host_seconds 0.50 # Real time elapsed on the host sim_insts 15162 # Number of instructions simulated sim_ops 15162 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu.inst 17792 # Number of bytes read from this memory system.physmem.bytes_read::cpu.data 8832 # Number of bytes read from this memory system.physmem.bytes_read::total 26624 # Number of bytes read from this memory @@ -42,6 +44,7 @@ system.membus.reqLayer0.occupancy 416000 # La system.membus.reqLayer0.utilization 1.0 # Layer utilization (%) system.membus.respLayer1.occupancy 3744000 # Layer occupancy (ticks) system.membus.respLayer1.utilization 9.1 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.cpu.workload.num_syscalls 18 # Number of system calls system.cpu.numCycles 82736 # number of cpu cycles simulated system.cpu.numWorkItemsStarted 0 # number of work items this cpu started @@ -74,6 +77,12 @@ system.cpu.icache.tags.warmup_cycle 0 # Cy system.cpu.icache.tags.occ_blocks::cpu.inst 153.782734 # Average occupied blocks per requestor system.cpu.icache.tags.occ_percent::cpu.inst 0.075089 # Average percentage of cache occupancy system.cpu.icache.tags.occ_percent::total 0.075089 # Average percentage of cache occupancy +system.cpu.icache.tags.occ_task_id_blocks::1024 280 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::0 46 # Occupied blocks per task id +system.cpu.icache.tags.age_task_id_blocks_1024::1 234 # Occupied blocks per task id +system.cpu.icache.tags.occ_task_id_percent::1024 0.136719 # Percentage of cache occupancy per task id +system.cpu.icache.tags.tag_accesses 30696 # Number of tag accesses +system.cpu.icache.tags.data_accesses 30696 # Number of data accesses system.cpu.icache.ReadReq_hits::cpu.inst 14928 # number of ReadReq hits system.cpu.icache.ReadReq_hits::total 14928 # number of ReadReq hits system.cpu.icache.demand_hits::cpu.inst 14928 # number of demand (read+write) hits @@ -154,6 +163,12 @@ system.cpu.l2cache.tags.occ_blocks::cpu.data 31.521152 system.cpu.l2cache.tags.occ_percent::cpu.inst 0.004673 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::cpu.data 0.000962 # Average percentage of cache occupancy system.cpu.l2cache.tags.occ_percent::total 0.005635 # Average percentage of cache occupancy +system.cpu.l2cache.tags.occ_task_id_blocks::1024 331 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::0 56 # Occupied blocks per task id +system.cpu.l2cache.tags.age_task_id_blocks_1024::1 275 # Occupied blocks per task id +system.cpu.l2cache.tags.occ_task_id_percent::1024 0.010101 # Percentage of cache occupancy per task id +system.cpu.l2cache.tags.tag_accesses 3760 # Number of tag accesses +system.cpu.l2cache.tags.data_accesses 3760 # Number of data accesses system.cpu.l2cache.ReadReq_hits::cpu.inst 2 # number of ReadReq hits system.cpu.l2cache.ReadReq_hits::total 2 # number of ReadReq hits system.cpu.l2cache.demand_hits::cpu.inst 2 # number of demand (read+write) hits @@ -277,6 +292,12 @@ system.cpu.dcache.tags.warmup_cycle 0 # Cy system.cpu.dcache.tags.occ_blocks::cpu.data 97.994344 # Average occupied blocks per requestor system.cpu.dcache.tags.occ_percent::cpu.data 0.023924 # Average percentage of cache occupancy system.cpu.dcache.tags.occ_percent::total 0.023924 # Average percentage of cache occupancy +system.cpu.dcache.tags.occ_task_id_blocks::1024 138 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::0 11 # Occupied blocks per task id +system.cpu.dcache.tags.age_task_id_blocks_1024::1 127 # Occupied blocks per task id +system.cpu.dcache.tags.occ_task_id_percent::1024 0.033691 # Percentage of cache occupancy per task id +system.cpu.dcache.tags.tag_accesses 7484 # Number of tag accesses +system.cpu.dcache.tags.data_accesses 7484 # Number of data accesses system.cpu.dcache.ReadReq_hits::cpu.data 2172 # number of ReadReq hits system.cpu.dcache.ReadReq_hits::total 2172 # number of ReadReq hits system.cpu.dcache.WriteReq_hits::cpu.data 1357 # number of WriteReq hits diff --git a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/o3-timing-mp/config.ini b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/o3-timing-mp/config.ini index de3e77970..1b54fd806 100644 --- a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/o3-timing-mp/config.ini +++ b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/o3-timing-mp/config.ini @@ -159,6 +159,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu0.dcache.tags @@ -175,6 +176,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu0.dtb] @@ -504,6 +506,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu0.icache.tags @@ -520,6 +523,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu0.interrupts] @@ -548,7 +552,7 @@ env= errout=cerr euid=100 eventq_index=0 -executable=tests/test-progs/m5threads/bin/sparc/linux/test_atomic +executable=/dist/test-progs/m5threads/bin/sparc/linux/test_atomic gid=100 input=cin max_stack_size=67108864 @@ -679,6 +683,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu1.dcache.tags @@ -695,6 +700,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu1.dtb] @@ -1024,6 +1030,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu1.icache.tags @@ -1040,6 +1047,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu1.interrupts] @@ -1179,6 +1187,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu2.dcache.tags @@ -1195,6 +1204,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu2.dtb] @@ -1524,6 +1534,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu2.icache.tags @@ -1540,6 +1551,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu2.interrupts] @@ -1679,6 +1691,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu3.dcache.tags @@ -1695,6 +1708,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu3.dtb] @@ -2024,6 +2038,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu3.icache.tags @@ -2040,6 +2055,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu3.interrupts] @@ -2080,6 +2096,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=4194304 system=system tags=system.l2c.tags @@ -2096,6 +2113,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=20 +sequential_access=false size=4194304 [system.membus] diff --git a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/o3-timing-mp/simerr b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/o3-timing-mp/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/o3-timing-mp/simerr +++ b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/o3-timing-mp/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/o3-timing-mp/simout b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/o3-timing-mp/simout index 0b0b9c7cf..26a87e082 100755 --- a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/o3-timing-mp/simout +++ b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/o3-timing-mp/simout @@ -1,26 +1,26 @@ gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Oct 16 2013 01:31:26 -gem5 started Oct 16 2013 01:35:27 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:04:27 +gem5 started Jan 22 2014 17:29:46 +gem5 executing on u200540-lin command line: build/SPARC/gem5.opt -d build/SPARC/tests/opt/quick/se/40.m5threads-test-atomic/sparc/linux/o3-timing-mp -re tests/run.py build/SPARC/tests/opt/quick/se/40.m5threads-test-atomic/sparc/linux/o3-timing-mp Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... Init done [Iteration 1, Thread 2] Got lock [Iteration 1, Thread 2] Critical section done, previously next=0, now next=2 -[Iteration 1, Thread 1] Got lock -[Iteration 1, Thread 1] Critical section done, previously next=2, now next=1 [Iteration 1, Thread 3] Got lock -[Iteration 1, Thread 3] Critical section done, previously next=1, now next=3 +[Iteration 1, Thread 3] Critical section done, previously next=2, now next=3 +[Iteration 1, Thread 1] Got lock +[Iteration 1, Thread 1] Critical section done, previously next=3, now next=1 Iteration 1 completed -[Iteration 2, Thread 3] Got lock -[Iteration 2, Thread 3] Critical section done, previously next=0, now next=3 -[Iteration 2, Thread 2] Got lock -[Iteration 2, Thread 2] Critical section done, previously next=3, now next=2 [Iteration 2, Thread 1] Got lock -[Iteration 2, Thread 1] Critical section done, previously next=2, now next=1 +[Iteration 2, Thread 1] Critical section done, previously next=0, now next=1 +[Iteration 2, Thread 2] Got lock +[Iteration 2, Thread 2] Critical section done, previously next=1, now next=2 +[Iteration 2, Thread 3] Got lock +[Iteration 2, Thread 3] Critical section done, previously next=2, now next=3 Iteration 2 completed [Iteration 3, Thread 3] Got lock [Iteration 3, Thread 3] Critical section done, previously next=0, now next=3 @@ -29,47 +29,47 @@ Iteration 2 completed [Iteration 3, Thread 2] Got lock [Iteration 3, Thread 2] Critical section done, previously next=1, now next=2 Iteration 3 completed -[Iteration 4, Thread 1] Got lock -[Iteration 4, Thread 1] Critical section done, previously next=0, now next=1 [Iteration 4, Thread 3] Got lock -[Iteration 4, Thread 3] Critical section done, previously next=1, now next=3 +[Iteration 4, Thread 3] Critical section done, previously next=0, now next=3 [Iteration 4, Thread 2] Got lock [Iteration 4, Thread 2] Critical section done, previously next=3, now next=2 +[Iteration 4, Thread 1] Got lock +[Iteration 4, Thread 1] Critical section done, previously next=2, now next=1 Iteration 4 completed -[Iteration 5, Thread 3] Got lock -[Iteration 5, Thread 3] Critical section done, previously next=0, now next=3 [Iteration 5, Thread 2] Got lock -[Iteration 5, Thread 2] Critical section done, previously next=3, now next=2 +[Iteration 5, Thread 2] Critical section done, previously next=0, now next=2 [Iteration 5, Thread 1] Got lock [Iteration 5, Thread 1] Critical section done, previously next=2, now next=1 +[Iteration 5, Thread 3] Got lock +[Iteration 5, Thread 3] Critical section done, previously next=1, now next=3 Iteration 5 completed +[Iteration 6, Thread 3] Got lock +[Iteration 6, Thread 3] Critical section done, previously next=0, now next=3 [Iteration 6, Thread 1] Got lock -[Iteration 6, Thread 1] Critical section done, previously next=0, now next=1 +[Iteration 6, Thread 1] Critical section done, previously next=3, now next=1 [Iteration 6, Thread 2] Got lock [Iteration 6, Thread 2] Critical section done, previously next=1, now next=2 -[Iteration 6, Thread 3] Got lock -[Iteration 6, Thread 3] Critical section done, previously next=2, now next=3 Iteration 6 completed -[Iteration 7, Thread 3] Got lock -[Iteration 7, Thread 3] Critical section done, previously next=0, now next=3 [Iteration 7, Thread 2] Got lock -[Iteration 7, Thread 2] Critical section done, previously next=3, now next=2 +[Iteration 7, Thread 2] Critical section done, previously next=0, now next=2 [Iteration 7, Thread 1] Got lock [Iteration 7, Thread 1] Critical section done, previously next=2, now next=1 +[Iteration 7, Thread 3] Got lock +[Iteration 7, Thread 3] Critical section done, previously next=1, now next=3 Iteration 7 completed -[Iteration 8, Thread 2] Got lock -[Iteration 8, Thread 2] Critical section done, previously next=0, now next=2 [Iteration 8, Thread 3] Got lock -[Iteration 8, Thread 3] Critical section done, previously next=2, now next=3 +[Iteration 8, Thread 3] Critical section done, previously next=0, now next=3 [Iteration 8, Thread 1] Got lock [Iteration 8, Thread 1] Critical section done, previously next=3, now next=1 +[Iteration 8, Thread 2] Got lock +[Iteration 8, Thread 2] Critical section done, previously next=1, now next=2 Iteration 8 completed -[Iteration 9, Thread 3] Got lock -[Iteration 9, Thread 3] Critical section done, previously next=0, now next=3 -[Iteration 9, Thread 2] Got lock -[Iteration 9, Thread 2] Critical section done, previously next=3, now next=2 [Iteration 9, Thread 1] Got lock -[Iteration 9, Thread 1] Critical section done, previously next=2, now next=1 +[Iteration 9, Thread 1] Critical section done, previously next=0, now next=1 +[Iteration 9, Thread 2] Got lock +[Iteration 9, Thread 2] Critical section done, previously next=1, now next=2 +[Iteration 9, Thread 3] Got lock +[Iteration 9, Thread 3] Critical section done, previously next=2, now next=3 Iteration 9 completed [Iteration 10, Thread 3] Got lock [Iteration 10, Thread 3] Critical section done, previously next=0, now next=3 @@ -79,4 +79,4 @@ Iteration 9 completed [Iteration 10, Thread 1] Critical section done, previously next=2, now next=1 Iteration 10 completed PASSED :-) -Exiting @ tick 110804500 because target called exit() +Exiting @ tick 111025500 because target called exit() diff --git a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/o3-timing-mp/stats.txt b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/o3-timing-mp/stats.txt index 34d426284..db4434e5e 100644 --- a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/o3-timing-mp/stats.txt +++ b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/o3-timing-mp/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000111 # Nu sim_ticks 111025500 # Number of ticks simulated final_tick 111025500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 77886 # Simulator instruction rate (inst/s) -host_op_rate 77886 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 8289137 # Simulator tick rate (ticks/s) -host_mem_usage 295244 # Number of bytes of host memory used -host_seconds 13.39 # Real time elapsed on the host +host_inst_rate 93081 # Simulator instruction rate (inst/s) +host_op_rate 93081 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 9906240 # Simulator tick rate (ticks/s) +host_mem_usage 253180 # Number of bytes of host memory used +host_seconds 11.21 # Real time elapsed on the host sim_insts 1043212 # Number of instructions simulated sim_ops 1043212 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu0.inst 22784 # Number of bytes read from this memory system.physmem.bytes_read::cpu0.data 10752 # Number of bytes read from this memory system.physmem.bytes_read::cpu1.inst 640 # Number of bytes read from this memory @@ -246,6 +248,7 @@ system.membus.reqLayer0.occupancy 932000 # La system.membus.reqLayer0.utilization 0.8 # Layer utilization (%) system.membus.respLayer1.occupancy 6290425 # Layer occupancy (ticks) system.membus.respLayer1.utilization 5.7 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.l2c.tags.replacements 0 # number of replacements system.l2c.tags.tagsinuse 417.163639 # Cycle average of tags in use system.l2c.tags.total_refs 1442 # Total number of references to valid blocks. @@ -271,6 +274,13 @@ system.l2c.tags.occ_percent::cpu2.data 0.000083 # Av system.l2c.tags.occ_percent::cpu3.inst 0.000047 # Average percentage of cache occupancy system.l2c.tags.occ_percent::cpu3.data 0.000011 # Average percentage of cache occupancy system.l2c.tags.occ_percent::total 0.006365 # Average percentage of cache occupancy +system.l2c.tags.occ_task_id_blocks::1024 526 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::0 51 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::1 293 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::2 182 # Occupied blocks per task id +system.l2c.tags.occ_task_id_percent::1024 0.008026 # Percentage of cache occupancy per task id +system.l2c.tags.tag_accesses 18228 # Number of tag accesses +system.l2c.tags.data_accesses 18228 # Number of data accesses system.l2c.ReadReq_hits::cpu0.inst 229 # number of ReadReq hits system.l2c.ReadReq_hits::cpu0.data 5 # number of ReadReq hits system.l2c.ReadReq_hits::cpu1.inst 412 # number of ReadReq hits @@ -970,6 +980,13 @@ system.cpu0.icache.tags.warmup_cycle 0 # Cy system.cpu0.icache.tags.occ_blocks::cpu0.inst 241.312438 # Average occupied blocks per requestor system.cpu0.icache.tags.occ_percent::cpu0.inst 0.471313 # Average percentage of cache occupancy system.cpu0.icache.tags.occ_percent::total 0.471313 # Average percentage of cache occupancy +system.cpu0.icache.tags.occ_task_id_blocks::1024 290 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::0 60 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::1 145 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::2 85 # Occupied blocks per task id +system.cpu0.icache.tags.occ_task_id_percent::1024 0.566406 # Percentage of cache occupancy per task id +system.cpu0.icache.tags.tag_accesses 6456 # Number of tag accesses +system.cpu0.icache.tags.data_accesses 6456 # Number of data accesses system.cpu0.icache.ReadReq_hits::cpu0.inst 5113 # number of ReadReq hits system.cpu0.icache.ReadReq_hits::total 5113 # number of ReadReq hits system.cpu0.icache.demand_hits::cpu0.inst 5113 # number of demand (read+write) hits @@ -1054,6 +1071,13 @@ system.cpu0.dcache.tags.warmup_cycle 0 # Cy system.cpu0.dcache.tags.occ_blocks::cpu0.data 142.026071 # Average occupied blocks per requestor system.cpu0.dcache.tags.occ_percent::cpu0.data 0.277395 # Average percentage of cache occupancy system.cpu0.dcache.tags.occ_percent::total 0.277395 # Average percentage of cache occupancy +system.cpu0.dcache.tags.occ_task_id_blocks::1024 168 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::0 17 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::1 51 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::2 100 # Occupied blocks per task id +system.cpu0.dcache.tags.occ_task_id_percent::1024 0.328125 # Percentage of cache occupancy per task id +system.cpu0.dcache.tags.tag_accesses 627950 # Number of tag accesses +system.cpu0.dcache.tags.data_accesses 627950 # Number of data accesses system.cpu0.dcache.ReadReq_hits::cpu0.data 79085 # number of ReadReq hits system.cpu0.dcache.ReadReq_hits::total 79085 # number of ReadReq hits system.cpu0.dcache.WriteReq_hits::cpu0.data 76817 # number of WriteReq hits @@ -1447,6 +1471,12 @@ system.cpu1.icache.tags.warmup_cycle 0 # Cy system.cpu1.icache.tags.occ_blocks::cpu1.inst 76.730517 # Average occupied blocks per requestor system.cpu1.icache.tags.occ_percent::cpu1.inst 0.149864 # Average percentage of cache occupancy system.cpu1.icache.tags.occ_percent::total 0.149864 # Average percentage of cache occupancy +system.cpu1.icache.tags.occ_task_id_blocks::1024 110 # Occupied blocks per task id +system.cpu1.icache.tags.age_task_id_blocks_1024::0 11 # Occupied blocks per task id +system.cpu1.icache.tags.age_task_id_blocks_1024::1 99 # Occupied blocks per task id +system.cpu1.icache.tags.occ_task_id_percent::1024 0.214844 # Percentage of cache occupancy per task id +system.cpu1.icache.tags.tag_accesses 23807 # Number of tag accesses +system.cpu1.icache.tags.data_accesses 23807 # Number of data accesses system.cpu1.icache.ReadReq_hits::cpu1.inst 22903 # number of ReadReq hits system.cpu1.icache.ReadReq_hits::total 22903 # number of ReadReq hits system.cpu1.icache.demand_hits::cpu1.inst 22903 # number of demand (read+write) hits @@ -1531,6 +1561,12 @@ system.cpu1.dcache.tags.warmup_cycle 0 # Cy system.cpu1.dcache.tags.occ_blocks::cpu1.data 23.664777 # Average occupied blocks per requestor system.cpu1.dcache.tags.occ_percent::cpu1.data 0.046220 # Average percentage of cache occupancy system.cpu1.dcache.tags.occ_percent::total 0.046220 # Average percentage of cache occupancy +system.cpu1.dcache.tags.occ_task_id_blocks::1024 29 # Occupied blocks per task id +system.cpu1.dcache.tags.age_task_id_blocks_1024::0 1 # Occupied blocks per task id +system.cpu1.dcache.tags.age_task_id_blocks_1024::1 28 # Occupied blocks per task id +system.cpu1.dcache.tags.occ_task_id_percent::1024 0.056641 # Percentage of cache occupancy per task id +system.cpu1.dcache.tags.tag_accesses 290684 # Number of tag accesses +system.cpu1.dcache.tags.data_accesses 290684 # Number of data accesses system.cpu1.dcache.ReadReq_hits::cpu1.data 41736 # number of ReadReq hits system.cpu1.dcache.ReadReq_hits::total 41736 # number of ReadReq hits system.cpu1.dcache.WriteReq_hits::cpu1.data 30310 # number of WriteReq hits @@ -1921,6 +1957,12 @@ system.cpu2.icache.tags.warmup_cycle 0 # Cy system.cpu2.icache.tags.occ_blocks::cpu2.inst 82.236554 # Average occupied blocks per requestor system.cpu2.icache.tags.occ_percent::cpu2.inst 0.160618 # Average percentage of cache occupancy system.cpu2.icache.tags.occ_percent::total 0.160618 # Average percentage of cache occupancy +system.cpu2.icache.tags.occ_task_id_blocks::1024 108 # Occupied blocks per task id +system.cpu2.icache.tags.age_task_id_blocks_1024::0 11 # Occupied blocks per task id +system.cpu2.icache.tags.age_task_id_blocks_1024::1 97 # Occupied blocks per task id +system.cpu2.icache.tags.occ_task_id_percent::1024 0.210938 # Percentage of cache occupancy per task id +system.cpu2.icache.tags.tag_accesses 20176 # Number of tag accesses +system.cpu2.icache.tags.data_accesses 20176 # Number of data accesses system.cpu2.icache.ReadReq_hits::cpu2.inst 19258 # number of ReadReq hits system.cpu2.icache.ReadReq_hits::total 19258 # number of ReadReq hits system.cpu2.icache.demand_hits::cpu2.inst 19258 # number of demand (read+write) hits @@ -2005,6 +2047,11 @@ system.cpu2.dcache.tags.warmup_cycle 0 # Cy system.cpu2.dcache.tags.occ_blocks::cpu2.data 26.142591 # Average occupied blocks per requestor system.cpu2.dcache.tags.occ_percent::cpu2.data 0.051060 # Average percentage of cache occupancy system.cpu2.dcache.tags.occ_percent::total 0.051060 # Average percentage of cache occupancy +system.cpu2.dcache.tags.occ_task_id_blocks::1024 28 # Occupied blocks per task id +system.cpu2.dcache.tags.age_task_id_blocks_1024::1 28 # Occupied blocks per task id +system.cpu2.dcache.tags.occ_task_id_percent::1024 0.054688 # Percentage of cache occupancy per task id +system.cpu2.dcache.tags.tag_accesses 328789 # Number of tag accesses +system.cpu2.dcache.tags.data_accesses 328789 # Number of data accesses system.cpu2.dcache.ReadReq_hits::cpu2.data 45613 # number of ReadReq hits system.cpu2.dcache.ReadReq_hits::total 45613 # number of ReadReq hits system.cpu2.dcache.WriteReq_hits::cpu2.data 35966 # number of WriteReq hits @@ -2396,6 +2443,12 @@ system.cpu3.icache.tags.warmup_cycle 0 # Cy system.cpu3.icache.tags.occ_blocks::cpu3.inst 79.942822 # Average occupied blocks per requestor system.cpu3.icache.tags.occ_percent::cpu3.inst 0.156138 # Average percentage of cache occupancy system.cpu3.icache.tags.occ_percent::total 0.156138 # Average percentage of cache occupancy +system.cpu3.icache.tags.occ_task_id_blocks::1024 110 # Occupied blocks per task id +system.cpu3.icache.tags.age_task_id_blocks_1024::0 10 # Occupied blocks per task id +system.cpu3.icache.tags.age_task_id_blocks_1024::1 100 # Occupied blocks per task id +system.cpu3.icache.tags.occ_task_id_percent::1024 0.214844 # Percentage of cache occupancy per task id +system.cpu3.icache.tags.tag_accesses 20994 # Number of tag accesses +system.cpu3.icache.tags.data_accesses 20994 # Number of data accesses system.cpu3.icache.ReadReq_hits::cpu3.inst 20090 # number of ReadReq hits system.cpu3.icache.ReadReq_hits::total 20090 # number of ReadReq hits system.cpu3.icache.demand_hits::cpu3.inst 20090 # number of demand (read+write) hits @@ -2480,6 +2533,11 @@ system.cpu3.dcache.tags.warmup_cycle 0 # Cy system.cpu3.dcache.tags.occ_blocks::cpu3.data 24.692248 # Average occupied blocks per requestor system.cpu3.dcache.tags.occ_percent::cpu3.data 0.048227 # Average percentage of cache occupancy system.cpu3.dcache.tags.occ_percent::total 0.048227 # Average percentage of cache occupancy +system.cpu3.dcache.tags.occ_task_id_blocks::1024 28 # Occupied blocks per task id +system.cpu3.dcache.tags.age_task_id_blocks_1024::1 28 # Occupied blocks per task id +system.cpu3.dcache.tags.occ_task_id_percent::1024 0.054688 # Percentage of cache occupancy per task id +system.cpu3.dcache.tags.tag_accesses 335202 # Number of tag accesses +system.cpu3.dcache.tags.data_accesses 335202 # Number of data accesses system.cpu3.dcache.ReadReq_hits::cpu3.data 46656 # number of ReadReq hits system.cpu3.dcache.ReadReq_hits::total 46656 # number of ReadReq hits system.cpu3.dcache.WriteReq_hits::cpu3.data 36553 # number of WriteReq hits diff --git a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-atomic-mp/config.ini b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-atomic-mp/config.ini index aa7fc3405..aa003cad3 100644 --- a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-atomic-mp/config.ini +++ b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-atomic-mp/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu0 cpu1 cpu2 cpu3 cpu_clk_domain l2c membus physmem toL2Bu boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.membus.slave[0] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu0] @@ -45,6 +49,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu0.dtb +eventq_index=0 fastmem=false function_trace=false function_trace_start=0 @@ -78,6 +83,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=4 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -86,6 +92,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu0.dcache.tags @@ -100,11 +107,14 @@ type=LRU assoc=4 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu0.dtb] type=SparcTLB +eventq_index=0 size=64 [system.cpu0.icache] @@ -113,6 +123,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=1 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -121,6 +132,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu0.icache.tags @@ -135,21 +147,27 @@ type=LRU assoc=1 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu0.interrupts] type=SparcInterrupts +eventq_index=0 [system.cpu0.isa] type=SparcISA +eventq_index=0 [system.cpu0.itb] type=SparcTLB +eventq_index=0 size=64 [system.cpu0.tracer] type=ExeTracer +eventq_index=0 [system.cpu0.workload] type=LiveProcess @@ -159,7 +177,8 @@ egid=100 env= errout=cerr euid=100 -executable=/dist/m5/regression/test-progs/m5threads/bin/sparc/linux/test_atomic +eventq_index=0 +executable=/dist/test-progs/m5threads/bin/sparc/linux/test_atomic gid=100 input=cin max_stack_size=67108864 @@ -180,6 +199,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu1.dtb +eventq_index=0 fastmem=false function_trace=false function_trace_start=0 @@ -213,6 +233,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=4 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -221,6 +242,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu1.dcache.tags @@ -235,11 +257,14 @@ type=LRU assoc=4 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu1.dtb] type=SparcTLB +eventq_index=0 size=64 [system.cpu1.icache] @@ -248,6 +273,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=1 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -256,6 +282,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu1.icache.tags @@ -270,21 +297,27 @@ type=LRU assoc=1 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu1.interrupts] type=SparcInterrupts +eventq_index=0 [system.cpu1.isa] type=SparcISA +eventq_index=0 [system.cpu1.itb] type=SparcTLB +eventq_index=0 size=64 [system.cpu1.tracer] type=ExeTracer +eventq_index=0 [system.cpu2] type=AtomicSimpleCPU @@ -296,6 +329,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu2.dtb +eventq_index=0 fastmem=false function_trace=false function_trace_start=0 @@ -329,6 +363,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=4 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -337,6 +372,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu2.dcache.tags @@ -351,11 +387,14 @@ type=LRU assoc=4 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu2.dtb] type=SparcTLB +eventq_index=0 size=64 [system.cpu2.icache] @@ -364,6 +403,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=1 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -372,6 +412,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu2.icache.tags @@ -386,21 +427,27 @@ type=LRU assoc=1 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu2.interrupts] type=SparcInterrupts +eventq_index=0 [system.cpu2.isa] type=SparcISA +eventq_index=0 [system.cpu2.itb] type=SparcTLB +eventq_index=0 size=64 [system.cpu2.tracer] type=ExeTracer +eventq_index=0 [system.cpu3] type=AtomicSimpleCPU @@ -412,6 +459,7 @@ do_checkpoint_insts=true do_quiesce=true do_statistics_insts=true dtb=system.cpu3.dtb +eventq_index=0 fastmem=false function_trace=false function_trace_start=0 @@ -445,6 +493,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=4 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -453,6 +502,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu3.dcache.tags @@ -467,11 +517,14 @@ type=LRU assoc=4 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu3.dtb] type=SparcTLB +eventq_index=0 size=64 [system.cpu3.icache] @@ -480,6 +533,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=1 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -488,6 +542,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu3.icache.tags @@ -502,25 +557,32 @@ type=LRU assoc=1 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu3.interrupts] type=SparcInterrupts +eventq_index=0 [system.cpu3.isa] type=SparcISA +eventq_index=0 [system.cpu3.itb] type=SparcTLB +eventq_index=0 size=64 [system.cpu3.tracer] type=ExeTracer +eventq_index=0 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.l2c] @@ -529,6 +591,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -537,6 +600,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=4194304 system=system tags=system.l2c.tags @@ -551,12 +615,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=4194304 [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -569,6 +636,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -579,6 +647,7 @@ port=system.membus.master[0] [system.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -588,5 +657,6 @@ slave=system.cpu0.icache.mem_side system.cpu0.dcache.mem_side system.cpu1.icache [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-atomic-mp/simerr b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-atomic-mp/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-atomic-mp/simerr +++ b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-atomic-mp/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-atomic-mp/simout b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-atomic-mp/simout index a3bbfbbb8..79edf9761 100755 --- a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-atomic-mp/simout +++ b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-atomic-mp/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/SPARC/tests/opt/quick/se/40.m5threads-test-atomic/sparc/linux/simple-atomic-mp/simout -Redirecting stderr to build/SPARC/tests/opt/quick/se/40.m5threads-test-atomic/sparc/linux/simple-atomic-mp/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 06:07:13 -gem5 started Sep 22 2013 06:09:34 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:04:27 +gem5 started Jan 22 2014 17:29:52 +gem5 executing on u200540-lin command line: build/SPARC/gem5.opt -d build/SPARC/tests/opt/quick/se/40.m5threads-test-atomic/sparc/linux/simple-atomic-mp -re tests/run.py build/SPARC/tests/opt/quick/se/40.m5threads-test-atomic/sparc/linux/simple-atomic-mp Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-atomic-mp/stats.txt b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-atomic-mp/stats.txt index 8179c99d9..6a51ab3a8 100644 --- a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-atomic-mp/stats.txt +++ b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-atomic-mp/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000088 # Nu sim_ticks 87707000 # Number of ticks simulated final_tick 87707000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 170274 # Simulator instruction rate (inst/s) -host_op_rate 170274 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 22048637 # Simulator tick rate (ticks/s) -host_mem_usage 246052 # Number of bytes of host memory used -host_seconds 3.98 # Real time elapsed on the host +host_inst_rate 84570 # Simulator instruction rate (inst/s) +host_op_rate 84570 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 10950892 # Simulator tick rate (ticks/s) +host_mem_usage 249088 # Number of bytes of host memory used +host_seconds 8.01 # Real time elapsed on the host sim_insts 677327 # Number of instructions simulated sim_ops 677327 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu0.inst 18048 # Number of bytes read from this memory system.physmem.bytes_read::cpu0.data 10560 # Number of bytes read from this memory system.physmem.bytes_read::cpu1.inst 3968 # Number of bytes read from this memory @@ -60,6 +62,7 @@ system.physmem.bw_total::total 407903588 # To system.membus.throughput 407903588 # Throughput (bytes/s) system.membus.data_through_bus 35776 # Total data (bytes) system.membus.snoop_data_through_bus 0 # Total snoop data (bytes) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.l2c.tags.replacements 0 # number of replacements system.l2c.tags.tagsinuse 366.582542 # Cycle average of tags in use system.l2c.tags.total_refs 1220 # Total number of references to valid blocks. @@ -85,6 +88,12 @@ system.l2c.tags.occ_percent::cpu2.data 0.000014 # Av system.l2c.tags.occ_percent::cpu3.inst 0.000015 # Average percentage of cache occupancy system.l2c.tags.occ_percent::cpu3.data 0.000014 # Average percentage of cache occupancy system.l2c.tags.occ_percent::total 0.005594 # Average percentage of cache occupancy +system.l2c.tags.occ_task_id_blocks::1024 421 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::0 48 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::1 373 # Occupied blocks per task id +system.l2c.tags.occ_task_id_percent::1024 0.006424 # Percentage of cache occupancy per task id +system.l2c.tags.tag_accesses 15488 # Number of tag accesses +system.l2c.tags.data_accesses 15488 # Number of data accesses system.l2c.ReadReq_hits::cpu0.inst 185 # number of ReadReq hits system.l2c.ReadReq_hits::cpu0.data 5 # number of ReadReq hits system.l2c.ReadReq_hits::cpu1.inst 296 # number of ReadReq hits @@ -273,6 +282,12 @@ system.cpu0.icache.tags.warmup_cycle 0 # Cy system.cpu0.icache.tags.occ_blocks::cpu0.inst 222.772698 # Average occupied blocks per requestor system.cpu0.icache.tags.occ_percent::cpu0.inst 0.435103 # Average percentage of cache occupancy system.cpu0.icache.tags.occ_percent::total 0.435103 # Average percentage of cache occupancy +system.cpu0.icache.tags.occ_task_id_blocks::1024 252 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::0 53 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::1 199 # Occupied blocks per task id +system.cpu0.icache.tags.occ_task_id_percent::1024 0.492188 # Percentage of cache occupancy per task id +system.cpu0.icache.tags.tag_accesses 175855 # Number of tag accesses +system.cpu0.icache.tags.data_accesses 175855 # Number of data accesses system.cpu0.icache.ReadReq_hits::cpu0.inst 174921 # number of ReadReq hits system.cpu0.icache.ReadReq_hits::total 174921 # number of ReadReq hits system.cpu0.icache.demand_hits::cpu0.inst 174921 # number of demand (read+write) hits @@ -315,6 +330,12 @@ system.cpu0.dcache.tags.warmup_cycle 0 # Cy system.cpu0.dcache.tags.occ_blocks::cpu0.data 150.745494 # Average occupied blocks per requestor system.cpu0.dcache.tags.occ_percent::cpu0.data 0.294425 # Average percentage of cache occupancy system.cpu0.dcache.tags.occ_percent::total 0.294425 # Average percentage of cache occupancy +system.cpu0.dcache.tags.occ_task_id_blocks::1024 165 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::0 16 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::1 149 # Occupied blocks per task id +system.cpu0.dcache.tags.occ_task_id_percent::1024 0.322266 # Percentage of cache occupancy per task id +system.cpu0.dcache.tags.tag_accesses 329803 # Number of tag accesses +system.cpu0.dcache.tags.data_accesses 329803 # Number of data accesses system.cpu0.dcache.ReadReq_hits::cpu0.data 54430 # number of ReadReq hits system.cpu0.dcache.ReadReq_hits::total 54430 # number of ReadReq hits system.cpu0.dcache.WriteReq_hits::cpu0.data 27578 # number of WriteReq hits @@ -397,6 +418,12 @@ system.cpu1.icache.tags.warmup_cycle 0 # Cy system.cpu1.icache.tags.occ_blocks::cpu1.inst 76.751702 # Average occupied blocks per requestor system.cpu1.icache.tags.occ_percent::cpu1.inst 0.149906 # Average percentage of cache occupancy system.cpu1.icache.tags.occ_percent::total 0.149906 # Average percentage of cache occupancy +system.cpu1.icache.tags.occ_task_id_blocks::1024 80 # Occupied blocks per task id +system.cpu1.icache.tags.age_task_id_blocks_1024::0 9 # Occupied blocks per task id +system.cpu1.icache.tags.age_task_id_blocks_1024::1 71 # Occupied blocks per task id +system.cpu1.icache.tags.occ_task_id_percent::1024 0.156250 # Percentage of cache occupancy per task id +system.cpu1.icache.tags.tag_accesses 167788 # Number of tag accesses +system.cpu1.icache.tags.data_accesses 167788 # Number of data accesses system.cpu1.icache.ReadReq_hits::cpu1.inst 167072 # number of ReadReq hits system.cpu1.icache.ReadReq_hits::total 167072 # number of ReadReq hits system.cpu1.icache.demand_hits::cpu1.inst 167072 # number of demand (read+write) hits @@ -439,6 +466,11 @@ system.cpu1.dcache.tags.warmup_cycle 0 # Cy system.cpu1.dcache.tags.occ_blocks::cpu1.data 30.316999 # Average occupied blocks per requestor system.cpu1.dcache.tags.occ_percent::cpu1.data 0.059213 # Average percentage of cache occupancy system.cpu1.dcache.tags.occ_percent::total 0.059213 # Average percentage of cache occupancy +system.cpu1.dcache.tags.occ_task_id_blocks::1024 26 # Occupied blocks per task id +system.cpu1.dcache.tags.age_task_id_blocks_1024::1 26 # Occupied blocks per task id +system.cpu1.dcache.tags.occ_task_id_percent::1024 0.050781 # Percentage of cache occupancy per task id +system.cpu1.dcache.tags.tag_accesses 213800 # Number of tag accesses +system.cpu1.dcache.tags.data_accesses 213800 # Number of data accesses system.cpu1.dcache.ReadReq_hits::cpu1.data 40470 # number of ReadReq hits system.cpu1.dcache.ReadReq_hits::total 40470 # number of ReadReq hits system.cpu1.dcache.WriteReq_hits::cpu1.data 12563 # number of WriteReq hits @@ -519,6 +551,12 @@ system.cpu2.icache.tags.warmup_cycle 0 # Cy system.cpu2.icache.tags.occ_blocks::cpu2.inst 74.781015 # Average occupied blocks per requestor system.cpu2.icache.tags.occ_percent::cpu2.inst 0.146057 # Average percentage of cache occupancy system.cpu2.icache.tags.occ_percent::total 0.146057 # Average percentage of cache occupancy +system.cpu2.icache.tags.occ_task_id_blocks::1024 80 # Occupied blocks per task id +system.cpu2.icache.tags.age_task_id_blocks_1024::0 9 # Occupied blocks per task id +system.cpu2.icache.tags.age_task_id_blocks_1024::1 71 # Occupied blocks per task id +system.cpu2.icache.tags.occ_task_id_percent::1024 0.156250 # Percentage of cache occupancy per task id +system.cpu2.icache.tags.tag_accesses 167724 # Number of tag accesses +system.cpu2.icache.tags.data_accesses 167724 # Number of data accesses system.cpu2.icache.ReadReq_hits::cpu2.inst 167008 # number of ReadReq hits system.cpu2.icache.ReadReq_hits::total 167008 # number of ReadReq hits system.cpu2.icache.demand_hits::cpu2.inst 167008 # number of demand (read+write) hits @@ -561,6 +599,11 @@ system.cpu2.dcache.tags.warmup_cycle 0 # Cy system.cpu2.dcache.tags.occ_blocks::cpu2.data 29.605505 # Average occupied blocks per requestor system.cpu2.dcache.tags.occ_percent::cpu2.data 0.057823 # Average percentage of cache occupancy system.cpu2.dcache.tags.occ_percent::total 0.057823 # Average percentage of cache occupancy +system.cpu2.dcache.tags.occ_task_id_blocks::1024 26 # Occupied blocks per task id +system.cpu2.dcache.tags.age_task_id_blocks_1024::1 26 # Occupied blocks per task id +system.cpu2.dcache.tags.occ_task_id_percent::1024 0.050781 # Percentage of cache occupancy per task id +system.cpu2.dcache.tags.tag_accesses 234360 # Number of tag accesses +system.cpu2.dcache.tags.data_accesses 234360 # Number of data accesses system.cpu2.dcache.ReadReq_hits::cpu2.data 42194 # number of ReadReq hits system.cpu2.dcache.ReadReq_hits::total 42194 # number of ReadReq hits system.cpu2.dcache.WriteReq_hits::cpu2.data 15998 # number of WriteReq hits @@ -641,6 +684,12 @@ system.cpu3.icache.tags.warmup_cycle 0 # Cy system.cpu3.icache.tags.occ_blocks::cpu3.inst 72.874497 # Average occupied blocks per requestor system.cpu3.icache.tags.occ_percent::cpu3.inst 0.142333 # Average percentage of cache occupancy system.cpu3.icache.tags.occ_percent::total 0.142333 # Average percentage of cache occupancy +system.cpu3.icache.tags.occ_task_id_blocks::1024 80 # Occupied blocks per task id +system.cpu3.icache.tags.age_task_id_blocks_1024::0 9 # Occupied blocks per task id +system.cpu3.icache.tags.age_task_id_blocks_1024::1 71 # Occupied blocks per task id +system.cpu3.icache.tags.occ_task_id_percent::1024 0.156250 # Percentage of cache occupancy per task id +system.cpu3.icache.tags.tag_accesses 167660 # Number of tag accesses +system.cpu3.icache.tags.data_accesses 167660 # Number of data accesses system.cpu3.icache.ReadReq_hits::cpu3.inst 166942 # number of ReadReq hits system.cpu3.icache.ReadReq_hits::total 166942 # number of ReadReq hits system.cpu3.icache.demand_hits::cpu3.inst 166942 # number of demand (read+write) hits @@ -683,6 +732,12 @@ system.cpu3.dcache.tags.warmup_cycle 0 # Cy system.cpu3.dcache.tags.occ_blocks::cpu3.data 28.795404 # Average occupied blocks per requestor system.cpu3.dcache.tags.occ_percent::cpu3.data 0.056241 # Average percentage of cache occupancy system.cpu3.dcache.tags.occ_percent::total 0.056241 # Average percentage of cache occupancy +system.cpu3.dcache.tags.occ_task_id_blocks::1024 27 # Occupied blocks per task id +system.cpu3.dcache.tags.age_task_id_blocks_1024::0 1 # Occupied blocks per task id +system.cpu3.dcache.tags.age_task_id_blocks_1024::1 26 # Occupied blocks per task id +system.cpu3.dcache.tags.occ_task_id_percent::1024 0.052734 # Percentage of cache occupancy per task id +system.cpu3.dcache.tags.tag_accesses 223805 # Number of tag accesses +system.cpu3.dcache.tags.data_accesses 223805 # Number of data accesses system.cpu3.dcache.ReadReq_hits::cpu3.data 41301 # number of ReadReq hits system.cpu3.dcache.ReadReq_hits::total 41301 # number of ReadReq hits system.cpu3.dcache.WriteReq_hits::cpu3.data 14260 # number of WriteReq hits diff --git a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-timing-mp/config.ini b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-timing-mp/config.ini index fc54ba8f2..f42d1a52f 100644 --- a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-timing-mp/config.ini +++ b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-timing-mp/config.ini @@ -85,6 +85,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu0.dcache.tags @@ -101,6 +102,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu0.dtb] @@ -123,6 +125,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu0.icache.tags @@ -139,6 +142,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu0.interrupts] @@ -167,7 +171,7 @@ env= errout=cerr euid=100 eventq_index=0 -executable=tests/test-progs/m5threads/bin/sparc/linux/test_atomic +executable=/dist/test-progs/m5threads/bin/sparc/linux/test_atomic gid=100 input=cin max_stack_size=67108864 @@ -224,6 +228,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu1.dcache.tags @@ -240,6 +245,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu1.dtb] @@ -262,6 +268,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu1.icache.tags @@ -278,6 +285,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu1.interrupts] @@ -343,6 +351,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu2.dcache.tags @@ -359,6 +368,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu2.dtb] @@ -381,6 +391,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu2.icache.tags @@ -397,6 +408,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu2.interrupts] @@ -462,6 +474,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu3.dcache.tags @@ -478,6 +491,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu3.dtb] @@ -500,6 +514,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu3.icache.tags @@ -516,6 +531,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu3.interrupts] @@ -556,6 +572,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=4194304 system=system tags=system.l2c.tags @@ -572,6 +589,7 @@ block_size=64 clk_domain=system.cpu_clk_domain eventq_index=0 hit_latency=20 +sequential_access=false size=4194304 [system.membus] diff --git a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-timing-mp/simerr b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-timing-mp/simerr index e45cd058f..1a4f96712 100755 --- a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-timing-mp/simerr +++ b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-timing-mp/simerr @@ -1,2 +1 @@ warn: Sockets disabled, not accepting gdb connections -hack: be nice to actually delete the event here diff --git a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-timing-mp/simout b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-timing-mp/simout index 7a29b18d1..f70079816 100755 --- a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-timing-mp/simout +++ b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-timing-mp/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/SPARC/tests/opt/quick/se/40.m5threads-test-atomic/sparc/linux/simple-timing-mp/simout -Redirecting stderr to build/SPARC/tests/opt/quick/se/40.m5threads-test-atomic/sparc/linux/simple-timing-mp/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 06:07:13 -gem5 started Sep 22 2013 06:10:12 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 17:04:27 +gem5 started Jan 22 2014 17:29:55 +gem5 executing on u200540-lin command line: build/SPARC/gem5.opt -d build/SPARC/tests/opt/quick/se/40.m5threads-test-atomic/sparc/linux/simple-timing-mp -re tests/run.py build/SPARC/tests/opt/quick/se/40.m5threads-test-atomic/sparc/linux/simple-timing-mp Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-timing-mp/stats.txt b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-timing-mp/stats.txt index 8d5cb3498..bea653a5a 100644 --- a/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-timing-mp/stats.txt +++ b/tests/quick/se/40.m5threads-test-atomic/ref/sparc/linux/simple-timing-mp/stats.txt @@ -4,13 +4,15 @@ sim_seconds 0.000263 # Nu sim_ticks 262794500 # Number of ticks simulated final_tick 262794500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_inst_rate 200508 # Simulator instruction rate (inst/s) -host_op_rate 200507 # Simulator op (including micro ops) rate (op/s) -host_tick_rate 79406810 # Simulator tick rate (ticks/s) -host_mem_usage 291148 # Number of bytes of host memory used -host_seconds 3.31 # Real time elapsed on the host +host_inst_rate 87015 # Simulator instruction rate (inst/s) +host_op_rate 87015 # Simulator op (including micro ops) rate (op/s) +host_tick_rate 34460789 # Simulator tick rate (ticks/s) +host_mem_usage 249056 # Number of bytes of host memory used +host_seconds 7.63 # Real time elapsed on the host sim_insts 663567 # Number of instructions simulated sim_ops 663567 # Number of ops (including micro ops) simulated +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu0.inst 18240 # Number of bytes read from this memory system.physmem.bytes_read::cpu0.data 10560 # Number of bytes read from this memory system.physmem.bytes_read::cpu1.inst 3776 # Number of bytes read from this memory @@ -74,6 +76,7 @@ system.membus.reqLayer0.occupancy 852296 # La system.membus.reqLayer0.utilization 0.3 # Layer utilization (%) system.membus.respLayer1.occupancy 5420500 # Layer occupancy (ticks) system.membus.respLayer1.utilization 2.1 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.l2c.tags.replacements 0 # number of replacements system.l2c.tags.tagsinuse 349.046072 # Cycle average of tags in use system.l2c.tags.total_refs 1220 # Total number of references to valid blocks. @@ -99,6 +102,12 @@ system.l2c.tags.occ_percent::cpu2.data 0.000013 # Av system.l2c.tags.occ_percent::cpu3.inst 0.000016 # Average percentage of cache occupancy system.l2c.tags.occ_percent::cpu3.data 0.000013 # Average percentage of cache occupancy system.l2c.tags.occ_percent::total 0.005326 # Average percentage of cache occupancy +system.l2c.tags.occ_task_id_blocks::1024 429 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::0 55 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::2 374 # Occupied blocks per task id +system.l2c.tags.occ_task_id_percent::1024 0.006546 # Percentage of cache occupancy per task id +system.l2c.tags.tag_accesses 15709 # Number of tag accesses +system.l2c.tags.data_accesses 15709 # Number of data accesses system.l2c.ReadReq_hits::cpu0.inst 182 # number of ReadReq hits system.l2c.ReadReq_hits::cpu0.data 5 # number of ReadReq hits system.l2c.ReadReq_hits::cpu1.inst 300 # number of ReadReq hits @@ -560,6 +569,12 @@ system.cpu0.icache.tags.warmup_cycle 0 # Cy system.cpu0.icache.tags.occ_blocks::cpu0.inst 212.401822 # Average occupied blocks per requestor system.cpu0.icache.tags.occ_percent::cpu0.inst 0.414847 # Average percentage of cache occupancy system.cpu0.icache.tags.occ_percent::total 0.414847 # Average percentage of cache occupancy +system.cpu0.icache.tags.occ_task_id_blocks::1024 252 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::0 53 # Occupied blocks per task id +system.cpu0.icache.tags.age_task_id_blocks_1024::2 199 # Occupied blocks per task id +system.cpu0.icache.tags.occ_task_id_percent::1024 0.492188 # Percentage of cache occupancy per task id +system.cpu0.icache.tags.tag_accesses 159104 # Number of tag accesses +system.cpu0.icache.tags.data_accesses 159104 # Number of data accesses system.cpu0.icache.ReadReq_hits::cpu0.inst 158170 # number of ReadReq hits system.cpu0.icache.ReadReq_hits::total 158170 # number of ReadReq hits system.cpu0.icache.demand_hits::cpu0.inst 158170 # number of demand (read+write) hits @@ -638,6 +653,12 @@ system.cpu0.dcache.tags.warmup_cycle 0 # Cy system.cpu0.dcache.tags.occ_blocks::cpu0.data 145.571924 # Average occupied blocks per requestor system.cpu0.dcache.tags.occ_percent::cpu0.data 0.284320 # Average percentage of cache occupancy system.cpu0.dcache.tags.occ_percent::total 0.284320 # Average percentage of cache occupancy +system.cpu0.dcache.tags.occ_task_id_blocks::1024 165 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::0 16 # Occupied blocks per task id +system.cpu0.dcache.tags.age_task_id_blocks_1024::2 149 # Occupied blocks per task id +system.cpu0.dcache.tags.occ_task_id_percent::1024 0.322266 # Percentage of cache occupancy per task id +system.cpu0.dcache.tags.tag_accesses 296317 # Number of tag accesses +system.cpu0.dcache.tags.data_accesses 296317 # Number of data accesses system.cpu0.dcache.ReadReq_hits::cpu0.data 48827 # number of ReadReq hits system.cpu0.dcache.ReadReq_hits::total 48827 # number of ReadReq hits system.cpu0.dcache.WriteReq_hits::cpu0.data 24780 # number of WriteReq hits @@ -780,6 +801,13 @@ system.cpu1.icache.tags.warmup_cycle 0 # Cy system.cpu1.icache.tags.occ_blocks::cpu1.inst 70.017504 # Average occupied blocks per requestor system.cpu1.icache.tags.occ_percent::cpu1.inst 0.136753 # Average percentage of cache occupancy system.cpu1.icache.tags.occ_percent::total 0.136753 # Average percentage of cache occupancy +system.cpu1.icache.tags.occ_task_id_blocks::1024 86 # Occupied blocks per task id +system.cpu1.icache.tags.age_task_id_blocks_1024::0 16 # Occupied blocks per task id +system.cpu1.icache.tags.age_task_id_blocks_1024::1 1 # Occupied blocks per task id +system.cpu1.icache.tags.age_task_id_blocks_1024::2 69 # Occupied blocks per task id +system.cpu1.icache.tags.occ_task_id_percent::1024 0.167969 # Percentage of cache occupancy per task id +system.cpu1.icache.tags.tag_accesses 163870 # Number of tag accesses +system.cpu1.icache.tags.data_accesses 163870 # Number of data accesses system.cpu1.icache.ReadReq_hits::cpu1.inst 163138 # number of ReadReq hits system.cpu1.icache.ReadReq_hits::total 163138 # number of ReadReq hits system.cpu1.icache.demand_hits::cpu1.inst 163138 # number of demand (read+write) hits @@ -858,6 +886,12 @@ system.cpu1.dcache.tags.warmup_cycle 0 # Cy system.cpu1.dcache.tags.occ_blocks::cpu1.data 27.720196 # Average occupied blocks per requestor system.cpu1.dcache.tags.occ_percent::cpu1.data 0.054141 # Average percentage of cache occupancy system.cpu1.dcache.tags.occ_percent::total 0.054141 # Average percentage of cache occupancy +system.cpu1.dcache.tags.occ_task_id_blocks::1024 30 # Occupied blocks per task id +system.cpu1.dcache.tags.age_task_id_blocks_1024::0 4 # Occupied blocks per task id +system.cpu1.dcache.tags.age_task_id_blocks_1024::2 26 # Occupied blocks per task id +system.cpu1.dcache.tags.occ_task_id_percent::1024 0.058594 # Percentage of cache occupancy per task id +system.cpu1.dcache.tags.tag_accesses 232288 # Number of tag accesses +system.cpu1.dcache.tags.data_accesses 232288 # Number of data accesses system.cpu1.dcache.ReadReq_hits::cpu1.data 41378 # number of ReadReq hits system.cpu1.dcache.ReadReq_hits::total 41378 # number of ReadReq hits system.cpu1.dcache.WriteReq_hits::cpu1.data 16307 # number of WriteReq hits @@ -998,6 +1032,13 @@ system.cpu2.icache.tags.warmup_cycle 0 # Cy system.cpu2.icache.tags.occ_blocks::cpu2.inst 67.624960 # Average occupied blocks per requestor system.cpu2.icache.tags.occ_percent::cpu2.inst 0.132080 # Average percentage of cache occupancy system.cpu2.icache.tags.occ_percent::total 0.132080 # Average percentage of cache occupancy +system.cpu2.icache.tags.occ_task_id_blocks::1024 86 # Occupied blocks per task id +system.cpu2.icache.tags.age_task_id_blocks_1024::0 9 # Occupied blocks per task id +system.cpu2.icache.tags.age_task_id_blocks_1024::1 8 # Occupied blocks per task id +system.cpu2.icache.tags.age_task_id_blocks_1024::2 69 # Occupied blocks per task id +system.cpu2.icache.tags.occ_task_id_percent::1024 0.167969 # Percentage of cache occupancy per task id +system.cpu2.icache.tags.tag_accesses 165265 # Number of tag accesses +system.cpu2.icache.tags.data_accesses 165265 # Number of data accesses system.cpu2.icache.ReadReq_hits::cpu2.inst 164533 # number of ReadReq hits system.cpu2.icache.ReadReq_hits::total 164533 # number of ReadReq hits system.cpu2.icache.demand_hits::cpu2.inst 164533 # number of demand (read+write) hits @@ -1076,6 +1117,12 @@ system.cpu2.dcache.tags.warmup_cycle 0 # Cy system.cpu2.dcache.tags.occ_blocks::cpu2.data 26.763890 # Average occupied blocks per requestor system.cpu2.dcache.tags.occ_percent::cpu2.data 0.052273 # Average percentage of cache occupancy system.cpu2.dcache.tags.occ_percent::total 0.052273 # Average percentage of cache occupancy +system.cpu2.dcache.tags.occ_task_id_blocks::1024 29 # Occupied blocks per task id +system.cpu2.dcache.tags.age_task_id_blocks_1024::0 3 # Occupied blocks per task id +system.cpu2.dcache.tags.age_task_id_blocks_1024::2 26 # Occupied blocks per task id +system.cpu2.dcache.tags.occ_task_id_percent::1024 0.056641 # Percentage of cache occupancy per task id +system.cpu2.dcache.tags.tag_accesses 237038 # Number of tag accesses +system.cpu2.dcache.tags.data_accesses 237038 # Number of data accesses system.cpu2.dcache.ReadReq_hits::cpu2.data 42011 # number of ReadReq hits system.cpu2.dcache.ReadReq_hits::total 42011 # number of ReadReq hits system.cpu2.dcache.WriteReq_hits::cpu2.data 16865 # number of WriteReq hits @@ -1216,6 +1263,13 @@ system.cpu3.icache.tags.warmup_cycle 0 # Cy system.cpu3.icache.tags.occ_blocks::cpu3.inst 65.598437 # Average occupied blocks per requestor system.cpu3.icache.tags.occ_percent::cpu3.inst 0.128122 # Average percentage of cache occupancy system.cpu3.icache.tags.occ_percent::total 0.128122 # Average percentage of cache occupancy +system.cpu3.icache.tags.occ_task_id_blocks::1024 86 # Occupied blocks per task id +system.cpu3.icache.tags.age_task_id_blocks_1024::0 9 # Occupied blocks per task id +system.cpu3.icache.tags.age_task_id_blocks_1024::1 8 # Occupied blocks per task id +system.cpu3.icache.tags.age_task_id_blocks_1024::2 69 # Occupied blocks per task id +system.cpu3.icache.tags.occ_task_id_percent::1024 0.167969 # Percentage of cache occupancy per task id +system.cpu3.icache.tags.tag_accesses 177056 # Number of tag accesses +system.cpu3.icache.tags.data_accesses 177056 # Number of data accesses system.cpu3.icache.ReadReq_hits::cpu3.inst 176322 # number of ReadReq hits system.cpu3.icache.ReadReq_hits::total 176322 # number of ReadReq hits system.cpu3.icache.demand_hits::cpu3.inst 176322 # number of demand (read+write) hits @@ -1294,6 +1348,12 @@ system.cpu3.dcache.tags.warmup_cycle 0 # Cy system.cpu3.dcache.tags.occ_blocks::cpu3.data 25.915086 # Average occupied blocks per requestor system.cpu3.dcache.tags.occ_percent::cpu3.data 0.050615 # Average percentage of cache occupancy system.cpu3.dcache.tags.occ_percent::total 0.050615 # Average percentage of cache occupancy +system.cpu3.dcache.tags.occ_task_id_blocks::1024 29 # Occupied blocks per task id +system.cpu3.dcache.tags.age_task_id_blocks_1024::0 3 # Occupied blocks per task id +system.cpu3.dcache.tags.age_task_id_blocks_1024::2 26 # Occupied blocks per task id +system.cpu3.dcache.tags.occ_task_id_percent::1024 0.056641 # Percentage of cache occupancy per task id +system.cpu3.dcache.tags.tag_accesses 184905 # Number of tag accesses +system.cpu3.dcache.tags.data_accesses 184905 # Number of data accesses system.cpu3.dcache.ReadReq_hits::cpu3.data 39563 # number of ReadReq hits system.cpu3.dcache.ReadReq_hits::total 39563 # number of ReadReq hits system.cpu3.dcache.WriteReq_hits::cpu3.data 6216 # number of WriteReq hits diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MESI_Two_Level/simerr b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MESI_Two_Level/simerr index d0c477d0e..6c3b6657b 100755 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MESI_Two_Level/simerr +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MESI_Two_Level/simerr @@ -79,4 +79,3 @@ system.cpu4: completed 90000 read, 48685 write accesses @6602186 system.cpu5: completed 90000 read, 48384 write accesses @6637212 system.cpu3: completed 90000 read, 48869 write accesses @6654178 system.cpu6: completed 100000 read, 53414 write accesses @7257449 -hack: be nice to actually delete the event here diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MESI_Two_Level/simout b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MESI_Two_Level/simout index 53312cb70..d0305734f 100755 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MESI_Two_Level/simout +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MESI_Two_Level/simout @@ -1,12 +1,10 @@ -Redirecting stdout to build/ALPHA_MESI_CMP_directory/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MESI_CMP_directory/simout -Redirecting stderr to build/ALPHA_MESI_CMP_directory/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MESI_CMP_directory/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:27:02 -gem5 started Sep 22 2013 05:27:23 -gem5 executing on zizzer -command line: build/ALPHA_MESI_CMP_directory/gem5.opt -d build/ALPHA_MESI_CMP_directory/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MESI_CMP_directory -re tests/run.py build/ALPHA_MESI_CMP_directory/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MESI_CMP_directory +gem5 compiled Jan 22 2014 16:37:52 +gem5 started Jan 22 2014 17:26:10 +gem5 executing on u200540-lin +command line: build/ALPHA_MESI_Two_Level/gem5.opt -d build/ALPHA_MESI_Two_Level/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MESI_Two_Level -re tests/run.py build/ALPHA_MESI_Two_Level/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MESI_Two_Level Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... Exiting @ tick 7257449 because maximum number of loads reached diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MESI_Two_Level/stats.txt b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MESI_Two_Level/stats.txt index 173706474..6ebaf5618 100644 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MESI_Two_Level/stats.txt +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MESI_Two_Level/stats.txt @@ -4,9 +4,12 @@ sim_seconds 0.007257 # Nu sim_ticks 7257449 # Number of ticks simulated final_tick 7257449 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_tick_rate 68193 # Simulator tick rate (ticks/s) -host_mem_usage 302480 # Number of bytes of host memory used -host_seconds 106.43 # Real time elapsed on the host +host_tick_rate 104409 # Simulator tick rate (ticks/s) +host_mem_usage 258924 # Number of bytes of host memory used +host_seconds 69.51 # Real time elapsed on the host +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.delayHist::bucket_size 128 # delay histogram for all message system.ruby.delayHist::max_bucket 1279 # delay histogram for all message system.ruby.delayHist::samples 4856797 # delay histogram for all message @@ -322,6 +325,7 @@ system.ruby.network.routers08.msg_bytes.Response_Control::2 4842768 system.ruby.network.routers08.msg_bytes.Writeback_Data::0 8120304 system.ruby.network.routers08.msg_bytes.Writeback_Data::1 28664064 system.ruby.network.routers08.msg_bytes.Writeback_Control::0 1656240 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.dir_cntrl0.memBuffer.memReq 817953 # Total number of memory requests system.ruby.dir_cntrl0.memBuffer.memRead 604997 # Number of memory reads system.ruby.dir_cntrl0.memBuffer.memWrite 212953 # Number of memory writes @@ -377,6 +381,7 @@ system.ruby.network.msg_byte.Writeback_Data 110353176 system.ruby.network.msg_byte.Writeback_Control 4968736 system.funcbus.throughput 0 # Throughput (bytes/s) system.funcbus.data_through_bus 0 # Total data (bytes) +system.cpu_clk_domain.clock 1 # Clock period in ticks system.cpu0.num_reads 99060 # number of read accesses completed system.cpu0.num_writes 53442 # number of write accesses completed system.cpu0.num_copies 0 # number of copy accesses completed diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_directory/config.ini b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_directory/config.ini index ba91b18e2..53be052ed 100644 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_directory/config.ini +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_directory/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,12 +14,13 @@ children=clk_domain cpu0 cpu1 cpu2 cpu3 cpu4 cpu5 cpu6 cpu7 cpu_clk_domain funcb boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 mem_mode=timing mem_ranges=0:268435455 -memories=system.funcmem system.physmem +memories=system.physmem system.funcmem num_work_ids=16 readfile= symbolfile= @@ -33,12 +36,14 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -58,6 +63,7 @@ test=system.ruby.l1_cntrl0.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -77,6 +83,7 @@ test=system.ruby.l1_cntrl1.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -96,6 +103,7 @@ test=system.ruby.l1_cntrl2.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -115,6 +123,7 @@ test=system.ruby.l1_cntrl3.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -134,6 +143,7 @@ test=system.ruby.l1_cntrl4.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -153,6 +163,7 @@ test=system.ruby.l1_cntrl5.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -172,6 +183,7 @@ test=system.ruby.l1_cntrl6.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -190,11 +202,13 @@ test=system.ruby.l1_cntrl7.sequencer.slave[0] [system.cpu_clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.funcbus] type=NoncoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 use_default_range=false width=8 @@ -206,6 +220,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=false latency=30 latency_var=0 @@ -218,6 +233,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -226,18 +242,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 l1_cntrl1 l1_cntrl2 l1_cntrl3 l1_cntrl4 l1_cntrl5 l1_cntrl6 l1_cntrl7 l2_cntrl0 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 l1_cntrl1 l1_cntrl2 l1_cntrl3 l1_cntrl4 l1_cntrl5 l1_cntrl6 l1_cntrl7 l2_cntrl0 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=8 random_seed=1234 randomization=false -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -245,9 +265,10 @@ type=Directory_Controller children=directory memBuffer buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=9 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory directory_latency=6 +eventq_index=0 memBuffer=system.ruby.dir_cntrl0.memBuffer number_of_TBEs=256 peer=Null @@ -258,6 +279,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -274,6 +296,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -293,7 +316,8 @@ L1Dcache=system.ruby.l1_cntrl0.L1Dcache L1Icache=system.ruby.l1_cntrl0.L1Icache buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 +eventq_index=0 l2_select_num_bits=0 number_of_TBEs=256 peer=Null @@ -311,6 +335,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -325,6 +350,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=3 replacement_policy=PSEUDO_LRU @@ -340,6 +366,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl0.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -358,7 +385,8 @@ L1Dcache=system.ruby.l1_cntrl1.L1Dcache L1Icache=system.ruby.l1_cntrl1.L1Icache buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 +eventq_index=0 l2_select_num_bits=0 number_of_TBEs=256 peer=Null @@ -376,6 +404,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -390,6 +419,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=3 replacement_policy=PSEUDO_LRU @@ -405,6 +435,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl1.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl1.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -423,7 +454,8 @@ L1Dcache=system.ruby.l1_cntrl2.L1Dcache L1Icache=system.ruby.l1_cntrl2.L1Icache buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=2 +cluster_id=0 +eventq_index=0 l2_select_num_bits=0 number_of_TBEs=256 peer=Null @@ -441,6 +473,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -455,6 +488,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=3 replacement_policy=PSEUDO_LRU @@ -470,6 +504,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl2.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl2.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -488,7 +523,8 @@ L1Dcache=system.ruby.l1_cntrl3.L1Dcache L1Icache=system.ruby.l1_cntrl3.L1Icache buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=3 +cluster_id=0 +eventq_index=0 l2_select_num_bits=0 number_of_TBEs=256 peer=Null @@ -506,6 +542,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -520,6 +557,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=3 replacement_policy=PSEUDO_LRU @@ -535,6 +573,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl3.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl3.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -553,7 +592,8 @@ L1Dcache=system.ruby.l1_cntrl4.L1Dcache L1Icache=system.ruby.l1_cntrl4.L1Icache buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=4 +cluster_id=0 +eventq_index=0 l2_select_num_bits=0 number_of_TBEs=256 peer=Null @@ -571,6 +611,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -585,6 +626,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=3 replacement_policy=PSEUDO_LRU @@ -600,6 +642,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl4.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl4.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -618,7 +661,8 @@ L1Dcache=system.ruby.l1_cntrl5.L1Dcache L1Icache=system.ruby.l1_cntrl5.L1Icache buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=5 +cluster_id=0 +eventq_index=0 l2_select_num_bits=0 number_of_TBEs=256 peer=Null @@ -636,6 +680,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -650,6 +695,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=3 replacement_policy=PSEUDO_LRU @@ -665,6 +711,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl5.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl5.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -683,7 +730,8 @@ L1Dcache=system.ruby.l1_cntrl6.L1Dcache L1Icache=system.ruby.l1_cntrl6.L1Icache buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=6 +cluster_id=0 +eventq_index=0 l2_select_num_bits=0 number_of_TBEs=256 peer=Null @@ -701,6 +749,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -715,6 +764,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=3 replacement_policy=PSEUDO_LRU @@ -730,6 +780,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl6.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl6.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -748,7 +799,8 @@ L1Dcache=system.ruby.l1_cntrl7.L1Dcache L1Icache=system.ruby.l1_cntrl7.L1Icache buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=7 +cluster_id=0 +eventq_index=0 l2_select_num_bits=0 number_of_TBEs=256 peer=Null @@ -766,6 +818,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -780,6 +833,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=3 replacement_policy=PSEUDO_LRU @@ -795,6 +849,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl7.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl7.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -812,7 +867,8 @@ children=L2cache L2cache=system.ruby.l2_cntrl0.L2cache buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=8 +cluster_id=0 +eventq_index=0 number_of_TBEs=256 peer=Null recycle_latency=10 @@ -827,6 +883,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=15 replacement_policy=PSEUDO_LRU @@ -840,6 +897,7 @@ tagArrayBanks=1 type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -849,6 +907,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 system.ruby.network.ext_links2 system.ruby.network.ext_links3 system.ruby.network.ext_links4 system.ruby.network.ext_links5 system.ruby.network.ext_links6 system.ruby.network.ext_links7 system.ruby.network.ext_links8 system.ruby.network.ext_links9 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 system.ruby.network.int_links2 system.ruby.network.int_links3 system.ruby.network.int_links4 system.ruby.network.int_links5 system.ruby.network.int_links6 system.ruby.network.int_links7 system.ruby.network.int_links8 system.ruby.network.int_links9 number_of_virtual_networks=10 @@ -859,6 +918,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers00 latency=1 @@ -868,6 +928,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl1 int_node=system.ruby.network.routers01 latency=1 @@ -877,6 +938,7 @@ weight=1 [system.ruby.network.ext_links2] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl2 int_node=system.ruby.network.routers02 latency=1 @@ -886,6 +948,7 @@ weight=1 [system.ruby.network.ext_links3] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl3 int_node=system.ruby.network.routers03 latency=1 @@ -895,6 +958,7 @@ weight=1 [system.ruby.network.ext_links4] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl4 int_node=system.ruby.network.routers04 latency=1 @@ -904,6 +968,7 @@ weight=1 [system.ruby.network.ext_links5] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl5 int_node=system.ruby.network.routers05 latency=1 @@ -913,6 +978,7 @@ weight=1 [system.ruby.network.ext_links6] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl6 int_node=system.ruby.network.routers06 latency=1 @@ -922,6 +988,7 @@ weight=1 [system.ruby.network.ext_links7] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl7 int_node=system.ruby.network.routers07 latency=1 @@ -931,6 +998,7 @@ weight=1 [system.ruby.network.ext_links8] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l2_cntrl0 int_node=system.ruby.network.routers08 latency=1 @@ -940,6 +1008,7 @@ weight=1 [system.ruby.network.ext_links9] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers09 latency=1 @@ -949,6 +1018,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=10 node_a=system.ruby.network.routers00 @@ -958,6 +1028,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=11 node_a=system.ruby.network.routers01 @@ -967,6 +1038,7 @@ weight=1 [system.ruby.network.int_links2] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=12 node_a=system.ruby.network.routers02 @@ -976,6 +1048,7 @@ weight=1 [system.ruby.network.int_links3] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=13 node_a=system.ruby.network.routers03 @@ -985,6 +1058,7 @@ weight=1 [system.ruby.network.int_links4] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=14 node_a=system.ruby.network.routers04 @@ -994,6 +1068,7 @@ weight=1 [system.ruby.network.int_links5] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=15 node_a=system.ruby.network.routers05 @@ -1003,6 +1078,7 @@ weight=1 [system.ruby.network.int_links6] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=16 node_a=system.ruby.network.routers06 @@ -1012,6 +1088,7 @@ weight=1 [system.ruby.network.int_links7] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=17 node_a=system.ruby.network.routers07 @@ -1021,6 +1098,7 @@ weight=1 [system.ruby.network.int_links8] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=18 node_a=system.ruby.network.routers08 @@ -1030,6 +1108,7 @@ weight=1 [system.ruby.network.int_links9] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=19 node_a=system.ruby.network.routers09 @@ -1039,80 +1118,85 @@ weight=1 [system.ruby.network.routers00] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers01] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers02] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 [system.ruby.network.routers03] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=3 virt_nets=10 [system.ruby.network.routers04] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=4 virt_nets=10 [system.ruby.network.routers05] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=5 virt_nets=10 [system.ruby.network.routers06] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=6 virt_nets=10 [system.ruby.network.routers07] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=7 virt_nets=10 [system.ruby.network.routers08] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=8 virt_nets=10 [system.ruby.network.routers09] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=9 virt_nets=10 [system.ruby.network.routers10] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=10 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=8 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -1124,5 +1208,6 @@ slave=system.system_port [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_directory/simerr b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_directory/simerr index d888f2c0a..96061ea38 100755 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_directory/simerr +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_directory/simerr @@ -79,4 +79,3 @@ system.cpu3: completed 90000 read, 49090 write accesses @6783540 system.cpu7: completed 90000 read, 48766 write accesses @6785808 system.cpu2: completed 90000 read, 49113 write accesses @6821790 system.cpu6: completed 100000 read, 54332 write accesses @7481441 -hack: be nice to actually delete the event here diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_directory/simout b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_directory/simout index 802dd1a62..45c67b8ac 100755 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_directory/simout +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_directory/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA_MOESI_CMP_directory/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MOESI_CMP_directory/simout -Redirecting stderr to build/ALPHA_MOESI_CMP_directory/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MOESI_CMP_directory/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:36:12 -gem5 started Sep 22 2013 05:36:22 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:42:56 +gem5 started Jan 22 2014 17:26:44 +gem5 executing on u200540-lin command line: build/ALPHA_MOESI_CMP_directory/gem5.opt -d build/ALPHA_MOESI_CMP_directory/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MOESI_CMP_directory -re tests/run.py build/ALPHA_MOESI_CMP_directory/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MOESI_CMP_directory Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_directory/stats.txt b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_directory/stats.txt index a1c353735..9ceb39be3 100644 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_directory/stats.txt +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_directory/stats.txt @@ -4,9 +4,12 @@ sim_seconds 0.007481 # Nu sim_ticks 7481441 # Number of ticks simulated final_tick 7481441 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_tick_rate 40706 # Simulator tick rate (ticks/s) -host_mem_usage 305724 # Number of bytes of host memory used -host_seconds 183.79 # Real time elapsed on the host +host_tick_rate 57492 # Simulator tick rate (ticks/s) +host_mem_usage 261156 # Number of bytes of host memory used +host_seconds 130.13 # Real time elapsed on the host +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.outstanding_req_hist::bucket_size 2 system.ruby.outstanding_req_hist::max_bucket 19 system.ruby.outstanding_req_hist::samples 619788 @@ -280,6 +283,7 @@ system.ruby.network.routers08.msg_bytes.Writeback_Control::2 3115216 system.ruby.network.routers08.msg_bytes.Forwarded_Control::0 67368 system.ruby.network.routers08.msg_bytes.Invalidate_Control::0 152 system.ruby.network.routers08.msg_bytes.Unblock_Control::2 9858144 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.dir_cntrl0.memBuffer.memReq 820394 # Total number of memory requests system.ruby.dir_cntrl0.memBuffer.memRead 605143 # Number of memory reads system.ruby.dir_cntrl0.memBuffer.memWrite 215243 # Number of memory writes @@ -359,6 +363,7 @@ system.ruby.network.msg_byte.Invalidate_Control 456 system.ruby.network.msg_byte.Unblock_Control 29574432 system.funcbus.throughput 0 # Throughput (bytes/s) system.funcbus.data_through_bus 0 # Total data (bytes) +system.cpu_clk_domain.clock 1 # Clock period in ticks system.cpu0.num_reads 99553 # number of read accesses completed system.cpu0.num_writes 54274 # number of write accesses completed system.cpu0.num_copies 0 # number of copy accesses completed diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_token/config.ini b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_token/config.ini index a202baa14..4e078b123 100644 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_token/config.ini +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_token/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,6 +14,7 @@ children=clk_domain cpu0 cpu1 cpu2 cpu3 cpu4 cpu5 cpu6 cpu7 cpu_clk_domain funcb boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,12 +36,14 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -58,6 +63,7 @@ test=system.ruby.l1_cntrl0.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -77,6 +83,7 @@ test=system.ruby.l1_cntrl1.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -96,6 +103,7 @@ test=system.ruby.l1_cntrl2.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -115,6 +123,7 @@ test=system.ruby.l1_cntrl3.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -134,6 +143,7 @@ test=system.ruby.l1_cntrl4.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -153,6 +163,7 @@ test=system.ruby.l1_cntrl5.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -172,6 +183,7 @@ test=system.ruby.l1_cntrl6.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -190,11 +202,13 @@ test=system.ruby.l1_cntrl7.sequencer.slave[0] [system.cpu_clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.funcbus] type=NoncoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 use_default_range=false width=8 @@ -206,6 +220,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=false latency=30 latency_var=0 @@ -218,6 +233,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -226,18 +242,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 l1_cntrl1 l1_cntrl2 l1_cntrl3 l1_cntrl4 l1_cntrl5 l1_cntrl6 l1_cntrl7 l2_cntrl0 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 l1_cntrl1 l1_cntrl2 l1_cntrl3 l1_cntrl4 l1_cntrl5 l1_cntrl6 l1_cntrl7 l2_cntrl0 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=8 random_seed=1234 randomization=false -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -245,10 +265,11 @@ type=Directory_Controller children=directory memBuffer buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=9 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory directory_latency=5 distributed_persistent=true +eventq_index=0 fixed_timeout_latency=100 l2_select_num_bits=0 memBuffer=system.ruby.dir_cntrl0.memBuffer @@ -262,6 +283,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -278,6 +300,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -298,8 +321,9 @@ L1Icache=system.ruby.l1_cntrl0.L1Icache N_tokens=9 buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 dynamic_timeout_enabled=true +eventq_index=0 fixed_timeout_latency=300 l1_request_latency=2 l1_response_latency=2 @@ -322,6 +346,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -336,6 +361,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -351,6 +377,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl0.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -370,8 +397,9 @@ L1Icache=system.ruby.l1_cntrl1.L1Icache N_tokens=9 buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 dynamic_timeout_enabled=true +eventq_index=0 fixed_timeout_latency=300 l1_request_latency=2 l1_response_latency=2 @@ -394,6 +422,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -408,6 +437,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -423,6 +453,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl1.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl1.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -442,8 +473,9 @@ L1Icache=system.ruby.l1_cntrl2.L1Icache N_tokens=9 buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=2 +cluster_id=0 dynamic_timeout_enabled=true +eventq_index=0 fixed_timeout_latency=300 l1_request_latency=2 l1_response_latency=2 @@ -466,6 +498,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -480,6 +513,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -495,6 +529,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl2.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl2.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -514,8 +549,9 @@ L1Icache=system.ruby.l1_cntrl3.L1Icache N_tokens=9 buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=3 +cluster_id=0 dynamic_timeout_enabled=true +eventq_index=0 fixed_timeout_latency=300 l1_request_latency=2 l1_response_latency=2 @@ -538,6 +574,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -552,6 +589,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -567,6 +605,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl3.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl3.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -586,8 +625,9 @@ L1Icache=system.ruby.l1_cntrl4.L1Icache N_tokens=9 buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=4 +cluster_id=0 dynamic_timeout_enabled=true +eventq_index=0 fixed_timeout_latency=300 l1_request_latency=2 l1_response_latency=2 @@ -610,6 +650,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -624,6 +665,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -639,6 +681,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl4.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl4.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -658,8 +701,9 @@ L1Icache=system.ruby.l1_cntrl5.L1Icache N_tokens=9 buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=5 +cluster_id=0 dynamic_timeout_enabled=true +eventq_index=0 fixed_timeout_latency=300 l1_request_latency=2 l1_response_latency=2 @@ -682,6 +726,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -696,6 +741,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -711,6 +757,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl5.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl5.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -730,8 +777,9 @@ L1Icache=system.ruby.l1_cntrl6.L1Icache N_tokens=9 buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=6 +cluster_id=0 dynamic_timeout_enabled=true +eventq_index=0 fixed_timeout_latency=300 l1_request_latency=2 l1_response_latency=2 @@ -754,6 +802,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -768,6 +817,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -783,6 +833,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl6.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl6.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -802,8 +853,9 @@ L1Icache=system.ruby.l1_cntrl7.L1Icache N_tokens=9 buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=7 +cluster_id=0 dynamic_timeout_enabled=true +eventq_index=0 fixed_timeout_latency=300 l1_request_latency=2 l1_response_latency=2 @@ -826,6 +878,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -840,6 +893,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -855,6 +909,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl7.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl7.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -873,7 +928,8 @@ L2cache=system.ruby.l2_cntrl0.L2cache N_tokens=9 buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=8 +cluster_id=0 +eventq_index=0 filtering_enabled=true l2_request_latency=5 l2_response_latency=5 @@ -889,6 +945,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=10 replacement_policy=PSEUDO_LRU @@ -902,6 +959,7 @@ tagArrayBanks=1 type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -911,6 +969,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 system.ruby.network.ext_links2 system.ruby.network.ext_links3 system.ruby.network.ext_links4 system.ruby.network.ext_links5 system.ruby.network.ext_links6 system.ruby.network.ext_links7 system.ruby.network.ext_links8 system.ruby.network.ext_links9 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 system.ruby.network.int_links2 system.ruby.network.int_links3 system.ruby.network.int_links4 system.ruby.network.int_links5 system.ruby.network.int_links6 system.ruby.network.int_links7 system.ruby.network.int_links8 system.ruby.network.int_links9 number_of_virtual_networks=10 @@ -921,6 +980,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers00 latency=1 @@ -930,6 +990,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl1 int_node=system.ruby.network.routers01 latency=1 @@ -939,6 +1000,7 @@ weight=1 [system.ruby.network.ext_links2] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl2 int_node=system.ruby.network.routers02 latency=1 @@ -948,6 +1010,7 @@ weight=1 [system.ruby.network.ext_links3] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl3 int_node=system.ruby.network.routers03 latency=1 @@ -957,6 +1020,7 @@ weight=1 [system.ruby.network.ext_links4] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl4 int_node=system.ruby.network.routers04 latency=1 @@ -966,6 +1030,7 @@ weight=1 [system.ruby.network.ext_links5] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl5 int_node=system.ruby.network.routers05 latency=1 @@ -975,6 +1040,7 @@ weight=1 [system.ruby.network.ext_links6] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl6 int_node=system.ruby.network.routers06 latency=1 @@ -984,6 +1050,7 @@ weight=1 [system.ruby.network.ext_links7] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl7 int_node=system.ruby.network.routers07 latency=1 @@ -993,6 +1060,7 @@ weight=1 [system.ruby.network.ext_links8] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l2_cntrl0 int_node=system.ruby.network.routers08 latency=1 @@ -1002,6 +1070,7 @@ weight=1 [system.ruby.network.ext_links9] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers09 latency=1 @@ -1011,6 +1080,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=10 node_a=system.ruby.network.routers00 @@ -1020,6 +1090,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=11 node_a=system.ruby.network.routers01 @@ -1029,6 +1100,7 @@ weight=1 [system.ruby.network.int_links2] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=12 node_a=system.ruby.network.routers02 @@ -1038,6 +1110,7 @@ weight=1 [system.ruby.network.int_links3] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=13 node_a=system.ruby.network.routers03 @@ -1047,6 +1120,7 @@ weight=1 [system.ruby.network.int_links4] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=14 node_a=system.ruby.network.routers04 @@ -1056,6 +1130,7 @@ weight=1 [system.ruby.network.int_links5] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=15 node_a=system.ruby.network.routers05 @@ -1065,6 +1140,7 @@ weight=1 [system.ruby.network.int_links6] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=16 node_a=system.ruby.network.routers06 @@ -1074,6 +1150,7 @@ weight=1 [system.ruby.network.int_links7] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=17 node_a=system.ruby.network.routers07 @@ -1083,6 +1160,7 @@ weight=1 [system.ruby.network.int_links8] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=18 node_a=system.ruby.network.routers08 @@ -1092,6 +1170,7 @@ weight=1 [system.ruby.network.int_links9] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=19 node_a=system.ruby.network.routers09 @@ -1101,80 +1180,85 @@ weight=1 [system.ruby.network.routers00] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers01] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers02] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 [system.ruby.network.routers03] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=3 virt_nets=10 [system.ruby.network.routers04] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=4 virt_nets=10 [system.ruby.network.routers05] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=5 virt_nets=10 [system.ruby.network.routers06] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=6 virt_nets=10 [system.ruby.network.routers07] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=7 virt_nets=10 [system.ruby.network.routers08] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=8 virt_nets=10 [system.ruby.network.routers09] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=9 virt_nets=10 [system.ruby.network.routers10] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=10 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=8 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -1186,5 +1270,6 @@ slave=system.system_port [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_token/simerr b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_token/simerr index 03befb105..78259ab68 100755 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_token/simerr +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_token/simerr @@ -79,4 +79,3 @@ system.cpu7: completed 90000 read, 48273 write accesses @5561660 system.cpu4: completed 90000 read, 48720 write accesses @5589754 system.cpu6: completed 90000 read, 49134 write accesses @5592253 system.cpu0: completed 100000 read, 54250 write accesses @6151475 -hack: be nice to actually delete the event here diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_token/simout b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_token/simout index 9d06307c2..b764ed210 100755 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_token/simout +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_token/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA_MOESI_CMP_token/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MOESI_CMP_token/simout -Redirecting stderr to build/ALPHA_MOESI_CMP_token/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MOESI_CMP_token/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:44:48 -gem5 started Sep 22 2013 05:44:59 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:47:59 +gem5 started Jan 22 2014 17:27:37 +gem5 executing on u200540-lin command line: build/ALPHA_MOESI_CMP_token/gem5.opt -d build/ALPHA_MOESI_CMP_token/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MOESI_CMP_token -re tests/run.py build/ALPHA_MOESI_CMP_token/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MOESI_CMP_token Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_token/stats.txt b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_token/stats.txt index 83fd02236..7d7ea198b 100644 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_token/stats.txt +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_CMP_token/stats.txt @@ -4,9 +4,12 @@ sim_seconds 0.006151 # Nu sim_ticks 6151475 # Number of ticks simulated final_tick 6151475 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_tick_rate 45886 # Simulator tick rate (ticks/s) -host_mem_usage 303616 # Number of bytes of host memory used -host_seconds 134.06 # Real time elapsed on the host +host_tick_rate 74061 # Simulator tick rate (ticks/s) +host_mem_usage 259044 # Number of bytes of host memory used +host_seconds 83.06 # Real time elapsed on the host +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.outstanding_req_hist::bucket_size 2 system.ruby.outstanding_req_hist::max_bucket 19 system.ruby.outstanding_req_hist::samples 617095 @@ -245,6 +248,7 @@ system.ruby.network.routers08.msg_bytes.Response_Control::4 11184 system.ruby.network.routers08.msg_bytes.Writeback_Data::4 61234056 system.ruby.network.routers08.msg_bytes.Writeback_Control::4 3020648 system.ruby.network.routers08.msg_bytes.Persistent_Control::3 2077536 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.dir_cntrl0.memBuffer.memReq 844944 # Total number of memory requests system.ruby.dir_cntrl0.memBuffer.memRead 610587 # Number of memory reads system.ruby.dir_cntrl0.memBuffer.memWrite 234338 # Number of memory writes @@ -318,6 +322,7 @@ system.ruby.network.msg_byte.Broadcast_Control 74018760 system.ruby.network.msg_byte.Persistent_Control 41550720 system.funcbus.throughput 0 # Throughput (bytes/s) system.funcbus.data_through_bus 0 # Total data (bytes) +system.cpu_clk_domain.clock 1 # Clock period in ticks system.cpu0.num_reads 100000 # number of read accesses completed system.cpu0.num_writes 54250 # number of write accesses completed system.cpu0.num_copies 0 # number of copy accesses completed diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_hammer/config.ini b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_hammer/config.ini index 490bbc6b4..bb19b17fd 100644 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_hammer/config.ini +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_hammer/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,12 +14,13 @@ children=clk_domain cpu0 cpu1 cpu2 cpu3 cpu4 cpu5 cpu6 cpu7 cpu_clk_domain funcb boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 mem_mode=timing mem_ranges=0:268435455 -memories=system.funcmem system.physmem +memories=system.physmem system.funcmem num_work_ids=16 readfile= symbolfile= @@ -33,12 +36,14 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -58,6 +63,7 @@ test=system.ruby.l1_cntrl0.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -77,6 +83,7 @@ test=system.ruby.l1_cntrl1.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -96,6 +103,7 @@ test=system.ruby.l1_cntrl2.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -115,6 +123,7 @@ test=system.ruby.l1_cntrl3.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -134,6 +143,7 @@ test=system.ruby.l1_cntrl4.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -153,6 +163,7 @@ test=system.ruby.l1_cntrl5.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -172,6 +183,7 @@ test=system.ruby.l1_cntrl6.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -190,11 +202,13 @@ test=system.ruby.l1_cntrl7.sequencer.slave[0] [system.cpu_clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.funcbus] type=NoncoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 use_default_range=false width=8 @@ -206,6 +220,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=false latency=30 latency_var=0 @@ -218,6 +233,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -226,18 +242,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 l1_cntrl1 l1_cntrl2 l1_cntrl3 l1_cntrl4 l1_cntrl5 l1_cntrl6 l1_cntrl7 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 l1_cntrl1 l1_cntrl2 l1_cntrl3 l1_cntrl4 l1_cntrl5 l1_cntrl6 l1_cntrl7 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=8 random_seed=1234 randomization=false -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -245,8 +265,9 @@ type=Directory_Controller children=directory memBuffer probeFilter buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=8 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory +eventq_index=0 full_bit_dir_enabled=false memBuffer=system.ruby.dir_cntrl0.memBuffer memory_controller_latency=2 @@ -261,6 +282,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -277,6 +299,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -294,6 +317,7 @@ type=RubyCache assoc=4 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=1 replacement_policy=PSEUDO_LRU @@ -312,7 +336,8 @@ L2cache=system.ruby.l1_cntrl0.L2cache buffer_size=0 cache_response_latency=10 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 +eventq_index=0 issue_latency=2 l2_cache_hit_latency=10 no_mig_atomic=true @@ -330,6 +355,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -344,6 +370,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=2 replacement_policy=PSEUDO_LRU @@ -358,6 +385,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=10 replacement_policy=PSEUDO_LRU @@ -373,6 +401,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl0.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -393,7 +422,8 @@ L2cache=system.ruby.l1_cntrl1.L2cache buffer_size=0 cache_response_latency=10 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 +eventq_index=0 issue_latency=2 l2_cache_hit_latency=10 no_mig_atomic=true @@ -411,6 +441,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -425,6 +456,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=2 replacement_policy=PSEUDO_LRU @@ -439,6 +471,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=10 replacement_policy=PSEUDO_LRU @@ -454,6 +487,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl1.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl1.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -474,7 +508,8 @@ L2cache=system.ruby.l1_cntrl2.L2cache buffer_size=0 cache_response_latency=10 clk_domain=system.ruby.clk_domain -cntrl_id=2 +cluster_id=0 +eventq_index=0 issue_latency=2 l2_cache_hit_latency=10 no_mig_atomic=true @@ -492,6 +527,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -506,6 +542,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=2 replacement_policy=PSEUDO_LRU @@ -520,6 +557,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=10 replacement_policy=PSEUDO_LRU @@ -535,6 +573,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl2.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl2.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -555,7 +594,8 @@ L2cache=system.ruby.l1_cntrl3.L2cache buffer_size=0 cache_response_latency=10 clk_domain=system.ruby.clk_domain -cntrl_id=3 +cluster_id=0 +eventq_index=0 issue_latency=2 l2_cache_hit_latency=10 no_mig_atomic=true @@ -573,6 +613,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -587,6 +628,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=2 replacement_policy=PSEUDO_LRU @@ -601,6 +643,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=10 replacement_policy=PSEUDO_LRU @@ -616,6 +659,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl3.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl3.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -636,7 +680,8 @@ L2cache=system.ruby.l1_cntrl4.L2cache buffer_size=0 cache_response_latency=10 clk_domain=system.ruby.clk_domain -cntrl_id=4 +cluster_id=0 +eventq_index=0 issue_latency=2 l2_cache_hit_latency=10 no_mig_atomic=true @@ -654,6 +699,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -668,6 +714,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=2 replacement_policy=PSEUDO_LRU @@ -682,6 +729,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=10 replacement_policy=PSEUDO_LRU @@ -697,6 +745,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl4.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl4.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -717,7 +766,8 @@ L2cache=system.ruby.l1_cntrl5.L2cache buffer_size=0 cache_response_latency=10 clk_domain=system.ruby.clk_domain -cntrl_id=5 +cluster_id=0 +eventq_index=0 issue_latency=2 l2_cache_hit_latency=10 no_mig_atomic=true @@ -735,6 +785,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -749,6 +800,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=2 replacement_policy=PSEUDO_LRU @@ -763,6 +815,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=10 replacement_policy=PSEUDO_LRU @@ -778,6 +831,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl5.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl5.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -798,7 +852,8 @@ L2cache=system.ruby.l1_cntrl6.L2cache buffer_size=0 cache_response_latency=10 clk_domain=system.ruby.clk_domain -cntrl_id=6 +cluster_id=0 +eventq_index=0 issue_latency=2 l2_cache_hit_latency=10 no_mig_atomic=true @@ -816,6 +871,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -830,6 +886,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=2 replacement_policy=PSEUDO_LRU @@ -844,6 +901,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=10 replacement_policy=PSEUDO_LRU @@ -859,6 +917,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl6.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl6.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -879,7 +938,8 @@ L2cache=system.ruby.l1_cntrl7.L2cache buffer_size=0 cache_response_latency=10 clk_domain=system.ruby.clk_domain -cntrl_id=7 +cluster_id=0 +eventq_index=0 issue_latency=2 l2_cache_hit_latency=10 no_mig_atomic=true @@ -897,6 +957,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -911,6 +972,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=2 replacement_policy=PSEUDO_LRU @@ -925,6 +987,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=10 replacement_policy=PSEUDO_LRU @@ -940,6 +1003,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl7.L1Dcache deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl7.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -955,6 +1019,7 @@ slave=system.cpu7.test type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -964,6 +1029,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 system.ruby.network.ext_links2 system.ruby.network.ext_links3 system.ruby.network.ext_links4 system.ruby.network.ext_links5 system.ruby.network.ext_links6 system.ruby.network.ext_links7 system.ruby.network.ext_links8 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 system.ruby.network.int_links2 system.ruby.network.int_links3 system.ruby.network.int_links4 system.ruby.network.int_links5 system.ruby.network.int_links6 system.ruby.network.int_links7 system.ruby.network.int_links8 number_of_virtual_networks=10 @@ -974,6 +1040,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers0 latency=1 @@ -983,6 +1050,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl1 int_node=system.ruby.network.routers1 latency=1 @@ -992,6 +1060,7 @@ weight=1 [system.ruby.network.ext_links2] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl2 int_node=system.ruby.network.routers2 latency=1 @@ -1001,6 +1070,7 @@ weight=1 [system.ruby.network.ext_links3] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl3 int_node=system.ruby.network.routers3 latency=1 @@ -1010,6 +1080,7 @@ weight=1 [system.ruby.network.ext_links4] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl4 int_node=system.ruby.network.routers4 latency=1 @@ -1019,6 +1090,7 @@ weight=1 [system.ruby.network.ext_links5] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl5 int_node=system.ruby.network.routers5 latency=1 @@ -1028,6 +1100,7 @@ weight=1 [system.ruby.network.ext_links6] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl6 int_node=system.ruby.network.routers6 latency=1 @@ -1037,6 +1110,7 @@ weight=1 [system.ruby.network.ext_links7] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl7 int_node=system.ruby.network.routers7 latency=1 @@ -1046,6 +1120,7 @@ weight=1 [system.ruby.network.ext_links8] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers8 latency=1 @@ -1055,6 +1130,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=9 node_a=system.ruby.network.routers0 @@ -1064,6 +1140,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=10 node_a=system.ruby.network.routers1 @@ -1073,6 +1150,7 @@ weight=1 [system.ruby.network.int_links2] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=11 node_a=system.ruby.network.routers2 @@ -1082,6 +1160,7 @@ weight=1 [system.ruby.network.int_links3] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=12 node_a=system.ruby.network.routers3 @@ -1091,6 +1170,7 @@ weight=1 [system.ruby.network.int_links4] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=13 node_a=system.ruby.network.routers4 @@ -1100,6 +1180,7 @@ weight=1 [system.ruby.network.int_links5] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=14 node_a=system.ruby.network.routers5 @@ -1109,6 +1190,7 @@ weight=1 [system.ruby.network.int_links6] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=15 node_a=system.ruby.network.routers6 @@ -1118,6 +1200,7 @@ weight=1 [system.ruby.network.int_links7] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=16 node_a=system.ruby.network.routers7 @@ -1127,6 +1210,7 @@ weight=1 [system.ruby.network.int_links8] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=17 node_a=system.ruby.network.routers8 @@ -1136,74 +1220,78 @@ weight=1 [system.ruby.network.routers0] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers1] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers2] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 [system.ruby.network.routers3] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=3 virt_nets=10 [system.ruby.network.routers4] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=4 virt_nets=10 [system.ruby.network.routers5] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=5 virt_nets=10 [system.ruby.network.routers6] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=6 virt_nets=10 [system.ruby.network.routers7] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=7 virt_nets=10 [system.ruby.network.routers8] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=8 virt_nets=10 [system.ruby.network.routers9] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=9 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=8 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -1215,5 +1303,6 @@ slave=system.system_port [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_hammer/simerr b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_hammer/simerr index a5b70c1a7..f2f8ae71c 100755 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_hammer/simerr +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_hammer/simerr @@ -79,4 +79,3 @@ system.cpu0: completed 90000 read, 48602 write accesses @5246963 system.cpu4: completed 90000 read, 48456 write accesses @5248509 system.cpu7: completed 90000 read, 48936 write accesses @5260982 system.cpu2: completed 100000 read, 54294 write accesses @5795833 -hack: be nice to actually delete the event here diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_hammer/simout b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_hammer/simout index 5f6410683..4c8b54d81 100755 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_hammer/simout +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_hammer/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA_MOESI_hammer/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MOESI_hammer/simout -Redirecting stderr to build/ALPHA_MOESI_hammer/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MOESI_hammer/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:17:28 -gem5 started Sep 22 2013 05:17:37 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:32:54 +gem5 started Jan 22 2014 17:25:37 +gem5 executing on u200540-lin command line: build/ALPHA_MOESI_hammer/gem5.opt -d build/ALPHA_MOESI_hammer/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MOESI_hammer -re tests/run.py build/ALPHA_MOESI_hammer/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby-MOESI_hammer Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_hammer/stats.txt b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_hammer/stats.txt index dccaff688..c7c9aeb58 100644 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_hammer/stats.txt +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby-MOESI_hammer/stats.txt @@ -4,9 +4,12 @@ sim_seconds 0.005796 # Nu sim_ticks 5795833 # Number of ticks simulated final_tick 5795833 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_tick_rate 40133 # Simulator tick rate (ticks/s) -host_mem_usage 303548 # Number of bytes of host memory used -host_seconds 144.41 # Real time elapsed on the host +host_tick_rate 66984 # Simulator tick rate (ticks/s) +host_mem_usage 260008 # Number of bytes of host memory used +host_seconds 86.53 # Real time elapsed on the host +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.outstanding_req_hist::bucket_size 2 system.ruby.outstanding_req_hist::max_bucket 19 system.ruby.outstanding_req_hist::samples 618244 @@ -280,6 +283,7 @@ system.ruby.network.routers7.msg_bytes.Writeback_Control::3 578720 system.ruby.network.routers7.msg_bytes.Writeback_Control::5 365344 system.ruby.network.routers7.msg_bytes.Broadcast_Control::3 4321008 system.ruby.network.routers7.msg_bytes.Unblock_Control::5 615240 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.dir_cntrl0.memBuffer.memReq 811546 # Total number of memory requests system.ruby.dir_cntrl0.memBuffer.memRead 597507 # Number of memory reads system.ruby.dir_cntrl0.memBuffer.memWrite 214013 # Number of memory writes @@ -356,6 +360,7 @@ system.ruby.network.msg_byte.Broadcast_Control 74035320 system.ruby.network.msg_byte.Unblock_Control 14822312 system.funcbus.throughput 0 # Throughput (bytes/s) system.funcbus.data_through_bus 0 # Total data (bytes) +system.cpu_clk_domain.clock 1 # Clock period in ticks system.cpu0.num_reads 99395 # number of read accesses completed system.cpu0.num_writes 53721 # number of write accesses completed system.cpu0.num_copies 0 # number of copy accesses completed diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby/config.ini b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby/config.ini index cd6eb6e26..717e12af4 100644 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby/config.ini +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,12 +14,13 @@ children=clk_domain cpu0 cpu1 cpu2 cpu3 cpu4 cpu5 cpu6 cpu7 cpu_clk_domain funcb boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 mem_mode=timing mem_ranges=0:268435455 -memories=system.funcmem system.physmem +memories=system.physmem system.funcmem num_work_ids=16 readfile= symbolfile= @@ -33,12 +36,14 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -58,6 +63,7 @@ test=system.ruby.l1_cntrl0.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -77,6 +83,7 @@ test=system.ruby.l1_cntrl1.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -96,6 +103,7 @@ test=system.ruby.l1_cntrl2.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -115,6 +123,7 @@ test=system.ruby.l1_cntrl3.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -134,6 +143,7 @@ test=system.ruby.l1_cntrl4.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -153,6 +163,7 @@ test=system.ruby.l1_cntrl5.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -172,6 +183,7 @@ test=system.ruby.l1_cntrl6.sequencer.slave[0] type=MemTest atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -190,11 +202,13 @@ test=system.ruby.l1_cntrl7.sequencer.slave[0] [system.cpu_clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.funcbus] type=NoncoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 use_default_range=false width=8 @@ -206,6 +220,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=false latency=30 latency_var=0 @@ -218,6 +233,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -226,18 +242,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 l1_cntrl1 l1_cntrl2 l1_cntrl3 l1_cntrl4 l1_cntrl5 l1_cntrl6 l1_cntrl7 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 l1_cntrl1 l1_cntrl2 l1_cntrl3 l1_cntrl4 l1_cntrl5 l1_cntrl6 l1_cntrl7 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=8 random_seed=1234 randomization=false -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -245,9 +265,10 @@ type=Directory_Controller children=directory memBuffer buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=8 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory directory_latency=12 +eventq_index=0 memBuffer=system.ruby.dir_cntrl0.memBuffer number_of_TBEs=256 peer=Null @@ -258,6 +279,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -274,6 +296,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -293,7 +316,8 @@ buffer_size=0 cacheMemory=system.ruby.l1_cntrl0.cacheMemory cache_response_latency=12 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 +eventq_index=0 issue_latency=2 number_of_TBEs=256 peer=Null @@ -309,6 +333,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -324,6 +349,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.cacheMemory deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl0.cacheMemory max_outstanding_requests=16 ruby_system=system.ruby @@ -342,7 +368,8 @@ buffer_size=0 cacheMemory=system.ruby.l1_cntrl1.cacheMemory cache_response_latency=12 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 +eventq_index=0 issue_latency=2 number_of_TBEs=256 peer=Null @@ -358,6 +385,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -373,6 +401,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl1.cacheMemory deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl1.cacheMemory max_outstanding_requests=16 ruby_system=system.ruby @@ -391,7 +420,8 @@ buffer_size=0 cacheMemory=system.ruby.l1_cntrl2.cacheMemory cache_response_latency=12 clk_domain=system.ruby.clk_domain -cntrl_id=2 +cluster_id=0 +eventq_index=0 issue_latency=2 number_of_TBEs=256 peer=Null @@ -407,6 +437,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -422,6 +453,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl2.cacheMemory deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl2.cacheMemory max_outstanding_requests=16 ruby_system=system.ruby @@ -440,7 +472,8 @@ buffer_size=0 cacheMemory=system.ruby.l1_cntrl3.cacheMemory cache_response_latency=12 clk_domain=system.ruby.clk_domain -cntrl_id=3 +cluster_id=0 +eventq_index=0 issue_latency=2 number_of_TBEs=256 peer=Null @@ -456,6 +489,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -471,6 +505,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl3.cacheMemory deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl3.cacheMemory max_outstanding_requests=16 ruby_system=system.ruby @@ -489,7 +524,8 @@ buffer_size=0 cacheMemory=system.ruby.l1_cntrl4.cacheMemory cache_response_latency=12 clk_domain=system.ruby.clk_domain -cntrl_id=4 +cluster_id=0 +eventq_index=0 issue_latency=2 number_of_TBEs=256 peer=Null @@ -505,6 +541,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -520,6 +557,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl4.cacheMemory deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl4.cacheMemory max_outstanding_requests=16 ruby_system=system.ruby @@ -538,7 +576,8 @@ buffer_size=0 cacheMemory=system.ruby.l1_cntrl5.cacheMemory cache_response_latency=12 clk_domain=system.ruby.clk_domain -cntrl_id=5 +cluster_id=0 +eventq_index=0 issue_latency=2 number_of_TBEs=256 peer=Null @@ -554,6 +593,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -569,6 +609,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl5.cacheMemory deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl5.cacheMemory max_outstanding_requests=16 ruby_system=system.ruby @@ -587,7 +628,8 @@ buffer_size=0 cacheMemory=system.ruby.l1_cntrl6.cacheMemory cache_response_latency=12 clk_domain=system.ruby.clk_domain -cntrl_id=6 +cluster_id=0 +eventq_index=0 issue_latency=2 number_of_TBEs=256 peer=Null @@ -603,6 +645,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -618,6 +661,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl6.cacheMemory deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl6.cacheMemory max_outstanding_requests=16 ruby_system=system.ruby @@ -636,7 +680,8 @@ buffer_size=0 cacheMemory=system.ruby.l1_cntrl7.cacheMemory cache_response_latency=12 clk_domain=system.ruby.clk_domain -cntrl_id=7 +cluster_id=0 +eventq_index=0 issue_latency=2 number_of_TBEs=256 peer=Null @@ -652,6 +697,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -667,6 +713,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl7.cacheMemory deadlock_threshold=1000000 +eventq_index=0 icache=system.ruby.l1_cntrl7.cacheMemory max_outstanding_requests=16 ruby_system=system.ruby @@ -682,6 +729,7 @@ slave=system.cpu7.test type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -691,6 +739,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 system.ruby.network.ext_links2 system.ruby.network.ext_links3 system.ruby.network.ext_links4 system.ruby.network.ext_links5 system.ruby.network.ext_links6 system.ruby.network.ext_links7 system.ruby.network.ext_links8 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 system.ruby.network.int_links2 system.ruby.network.int_links3 system.ruby.network.int_links4 system.ruby.network.int_links5 system.ruby.network.int_links6 system.ruby.network.int_links7 system.ruby.network.int_links8 number_of_virtual_networks=10 @@ -701,6 +750,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers0 latency=1 @@ -710,6 +760,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl1 int_node=system.ruby.network.routers1 latency=1 @@ -719,6 +770,7 @@ weight=1 [system.ruby.network.ext_links2] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl2 int_node=system.ruby.network.routers2 latency=1 @@ -728,6 +780,7 @@ weight=1 [system.ruby.network.ext_links3] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl3 int_node=system.ruby.network.routers3 latency=1 @@ -737,6 +790,7 @@ weight=1 [system.ruby.network.ext_links4] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl4 int_node=system.ruby.network.routers4 latency=1 @@ -746,6 +800,7 @@ weight=1 [system.ruby.network.ext_links5] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl5 int_node=system.ruby.network.routers5 latency=1 @@ -755,6 +810,7 @@ weight=1 [system.ruby.network.ext_links6] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl6 int_node=system.ruby.network.routers6 latency=1 @@ -764,6 +820,7 @@ weight=1 [system.ruby.network.ext_links7] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl7 int_node=system.ruby.network.routers7 latency=1 @@ -773,6 +830,7 @@ weight=1 [system.ruby.network.ext_links8] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers8 latency=1 @@ -782,6 +840,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=9 node_a=system.ruby.network.routers0 @@ -791,6 +850,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=10 node_a=system.ruby.network.routers1 @@ -800,6 +860,7 @@ weight=1 [system.ruby.network.int_links2] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=11 node_a=system.ruby.network.routers2 @@ -809,6 +870,7 @@ weight=1 [system.ruby.network.int_links3] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=12 node_a=system.ruby.network.routers3 @@ -818,6 +880,7 @@ weight=1 [system.ruby.network.int_links4] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=13 node_a=system.ruby.network.routers4 @@ -827,6 +890,7 @@ weight=1 [system.ruby.network.int_links5] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=14 node_a=system.ruby.network.routers5 @@ -836,6 +900,7 @@ weight=1 [system.ruby.network.int_links6] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=15 node_a=system.ruby.network.routers6 @@ -845,6 +910,7 @@ weight=1 [system.ruby.network.int_links7] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=16 node_a=system.ruby.network.routers7 @@ -854,6 +920,7 @@ weight=1 [system.ruby.network.int_links8] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=17 node_a=system.ruby.network.routers8 @@ -863,74 +930,78 @@ weight=1 [system.ruby.network.routers0] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers1] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers2] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 [system.ruby.network.routers3] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=3 virt_nets=10 [system.ruby.network.routers4] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=4 virt_nets=10 [system.ruby.network.routers5] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=5 virt_nets=10 [system.ruby.network.routers6] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=6 virt_nets=10 [system.ruby.network.routers7] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=7 virt_nets=10 [system.ruby.network.routers8] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=8 virt_nets=10 [system.ruby.network.routers9] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=9 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=8 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -942,5 +1013,6 @@ slave=system.system_port [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby/simerr b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby/simerr index 082530ace..5a7e36bf9 100755 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby/simerr +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby/simerr @@ -79,4 +79,3 @@ system.cpu3: completed 90000 read, 49017 write accesses @7850422 system.cpu5: completed 90000 read, 49075 write accesses @7851926 system.cpu4: completed 90000 read, 49432 write accesses @7874435 system.cpu7: completed 100000 read, 53796 write accesses @8664886 -hack: be nice to actually delete the event here diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby/simout b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby/simout index 1c746b09a..1137a773d 100755 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby/simout +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby/simout -Redirecting stderr to build/ALPHA/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 24 2013 03:08:53 -gem5 started Sep 28 2013 03:05:49 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:27:55 +gem5 started Jan 22 2014 17:24:32 +gem5 executing on u200540-lin command line: build/ALPHA/gem5.opt -d build/ALPHA/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby -re tests/run.py build/ALPHA/tests/opt/quick/se/50.memtest/alpha/linux/memtest-ruby Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby/stats.txt b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby/stats.txt index d75cefcef..ab5af9666 100644 --- a/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby/stats.txt +++ b/tests/quick/se/50.memtest/ref/alpha/linux/memtest-ruby/stats.txt @@ -4,9 +4,12 @@ sim_seconds 0.008665 # Nu sim_ticks 8664886 # Number of ticks simulated final_tick 8664886 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_tick_rate 151755 # Simulator tick rate (ticks/s) -host_mem_usage 301056 # Number of bytes of host memory used -host_seconds 57.10 # Real time elapsed on the host +host_tick_rate 261960 # Simulator tick rate (ticks/s) +host_mem_usage 257512 # Number of bytes of host memory used +host_seconds 33.08 # Real time elapsed on the host +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.delayHist::bucket_size 4 # delay histogram for all message system.ruby.delayHist::max_bucket 39 # delay histogram for all message system.ruby.delayHist::samples 1237687 # delay histogram for all message @@ -136,6 +139,7 @@ system.ruby.network.routers7.msg_bytes.Control::2 618216 system.ruby.network.routers7.msg_bytes.Data::2 5512896 system.ruby.network.routers7.msg_bytes.Response_Data::4 5637312 system.ruby.network.routers7.msg_bytes.Writeback_Control::3 620680 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.dir_cntrl0.memBuffer.memReq 1218678 # Total number of memory requests system.ruby.dir_cntrl0.memBuffer.memRead 609346 # Number of memory reads system.ruby.dir_cntrl0.memBuffer.memWrite 609308 # Number of memory writes @@ -181,6 +185,7 @@ system.ruby.network.msg_byte.Response_Data 133391376 system.ruby.network.msg_byte.Writeback_Control 14883240 system.funcbus.throughput 0 # Throughput (bytes/s) system.funcbus.data_through_bus 0 # Total data (bytes) +system.cpu_clk_domain.clock 1 # Clock period in ticks system.cpu0.num_reads 99885 # number of read accesses completed system.cpu0.num_writes 54375 # number of write accesses completed system.cpu0.num_copies 0 # number of copy accesses completed diff --git a/tests/quick/se/50.memtest/ref/null/none/memtest/config.ini b/tests/quick/se/50.memtest/ref/null/none/memtest/config.ini index 0b50bed4c..dd37e6b1e 100644 --- a/tests/quick/se/50.memtest/ref/null/none/memtest/config.ini +++ b/tests/quick/se/50.memtest/ref/null/none/memtest/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,12 +14,13 @@ children=clk_domain cpu0 cpu1 cpu2 cpu3 cpu4 cpu5 cpu6 cpu7 cpu_clk_domain funcb boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 mem_mode=timing mem_ranges= -memories=system.physmem system.funcmem +memories=system.funcmem system.physmem num_work_ids=16 readfile= symbolfile= @@ -33,6 +36,7 @@ system_port=system.membus.slave[1] [system.clk_domain] type=SrcClockDomain clock=1000 +eventq_index=0 voltage_domain=system.voltage_domain [system.cpu0] @@ -40,6 +44,7 @@ type=MemTest children=l1c atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -61,6 +66,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=4 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -69,6 +75,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu0.l1c.tags @@ -83,7 +90,9 @@ type=LRU assoc=4 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu1] @@ -91,6 +100,7 @@ type=MemTest children=l1c atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -112,6 +122,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=4 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -120,6 +131,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu1.l1c.tags @@ -134,7 +146,9 @@ type=LRU assoc=4 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu2] @@ -142,6 +156,7 @@ type=MemTest children=l1c atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -163,6 +178,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=4 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -171,6 +187,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu2.l1c.tags @@ -185,7 +202,9 @@ type=LRU assoc=4 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu3] @@ -193,6 +212,7 @@ type=MemTest children=l1c atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -214,6 +234,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=4 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -222,6 +243,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu3.l1c.tags @@ -236,7 +258,9 @@ type=LRU assoc=4 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu4] @@ -244,6 +268,7 @@ type=MemTest children=l1c atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -265,6 +290,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=4 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -273,6 +299,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu4.l1c.tags @@ -287,7 +314,9 @@ type=LRU assoc=4 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu5] @@ -295,6 +324,7 @@ type=MemTest children=l1c atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -316,6 +346,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=4 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -324,6 +355,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu5.l1c.tags @@ -338,7 +370,9 @@ type=LRU assoc=4 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu6] @@ -346,6 +380,7 @@ type=MemTest children=l1c atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -367,6 +402,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=4 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -375,6 +411,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu6.l1c.tags @@ -389,7 +426,9 @@ type=LRU assoc=4 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu7] @@ -397,6 +436,7 @@ type=MemTest children=l1c atomic=false clk_domain=system.cpu_clk_domain +eventq_index=0 issue_dmas=false max_loads=100000 memory_size=65536 @@ -418,6 +458,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=4 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=2 is_top_level=true @@ -426,6 +467,7 @@ mshrs=4 prefetch_on_access=false prefetcher=Null response_latency=2 +sequential_access=false size=32768 system=system tags=system.cpu7.l1c.tags @@ -440,17 +482,21 @@ type=LRU assoc=4 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=2 +sequential_access=false size=32768 [system.cpu_clk_domain] type=SrcClockDomain clock=500 +eventq_index=0 voltage_domain=system.voltage_domain [system.funcbus] type=NoncoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 use_default_range=false width=8 @@ -462,6 +508,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=false latency=30000 latency_var=0 @@ -475,6 +522,7 @@ children=tags addr_ranges=0:18446744073709551615 assoc=8 clk_domain=system.cpu_clk_domain +eventq_index=0 forward_snoops=true hit_latency=20 is_top_level=false @@ -483,6 +531,7 @@ mshrs=20 prefetch_on_access=false prefetcher=Null response_latency=20 +sequential_access=false size=65536 system=system tags=system.l2c.tags @@ -497,12 +546,15 @@ type=LRU assoc=8 block_size=64 clk_domain=system.cpu_clk_domain +eventq_index=0 hit_latency=20 +sequential_access=false size=65536 [system.membus] type=CoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -515,6 +567,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 @@ -525,6 +578,7 @@ port=system.membus.master[0] [system.toL2Bus] type=CoherentBus clk_domain=system.cpu_clk_domain +eventq_index=0 header_cycles=1 system=system use_default_range=false @@ -534,5 +588,6 @@ slave=system.cpu0.l1c.mem_side system.cpu1.l1c.mem_side system.cpu2.l1c.mem_side [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/50.memtest/ref/null/none/memtest/simerr b/tests/quick/se/50.memtest/ref/null/none/memtest/simerr index ad8539d90..084f6f6ab 100755 --- a/tests/quick/se/50.memtest/ref/null/none/memtest/simerr +++ b/tests/quick/se/50.memtest/ref/null/none/memtest/simerr @@ -71,4 +71,3 @@ system.cpu2: completed 90000 read, 48395 write accesses @591584000 system.cpu7: completed 90000 read, 48496 write accesses @592485000 system.cpu0: completed 90000 read, 48680 write accesses @594831500 system.cpu3: completed 100000 read, 53536 write accesses @652606500 -hack: be nice to actually delete the event here diff --git a/tests/quick/se/50.memtest/ref/null/none/memtest/simout b/tests/quick/se/50.memtest/ref/null/none/memtest/simout index de32ac2d8..831211e6c 100755 --- a/tests/quick/se/50.memtest/ref/null/none/memtest/simout +++ b/tests/quick/se/50.memtest/ref/null/none/memtest/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/NULL/tests/opt/quick/se/50.memtest/null/none/memtest/simout -Redirecting stderr to build/NULL/tests/opt/quick/se/50.memtest/null/none/memtest/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:53:51 -gem5 started Sep 22 2013 05:53:54 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:54:17 +gem5 started Jan 22 2014 17:28:45 +gem5 executing on u200540-lin command line: build/NULL/gem5.opt -d build/NULL/tests/opt/quick/se/50.memtest/null/none/memtest -re tests/run.py build/NULL/tests/opt/quick/se/50.memtest/null/none/memtest Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/50.memtest/ref/null/none/memtest/stats.txt b/tests/quick/se/50.memtest/ref/null/none/memtest/stats.txt index 6f84c5ba1..d30a7aa16 100644 --- a/tests/quick/se/50.memtest/ref/null/none/memtest/stats.txt +++ b/tests/quick/se/50.memtest/ref/null/none/memtest/stats.txt @@ -4,9 +4,11 @@ sim_seconds 0.000653 # Nu sim_ticks 652606500 # Number of ticks simulated final_tick 652606500 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_tick_rate 158104978 # Simulator tick rate (ticks/s) -host_mem_usage 355504 # Number of bytes of host memory used -host_seconds 4.13 # Real time elapsed on the host +host_tick_rate 148113487 # Simulator tick rate (ticks/s) +host_mem_usage 336812 # Number of bytes of host memory used +host_seconds 4.41 # Real time elapsed on the host +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu0 80014 # Number of bytes read from this memory system.physmem.bytes_read::cpu1 82049 # Number of bytes read from this memory system.physmem.bytes_read::cpu2 81047 # Number of bytes read from this memory @@ -94,6 +96,7 @@ system.membus.reqLayer0.occupancy 286485584 # La system.membus.reqLayer0.utilization 43.9 # Layer utilization (%) system.membus.respLayer0.occupancy 311361500 # Layer occupancy (ticks) system.membus.respLayer0.utilization 47.7 # Layer utilization (%) +system.cpu_clk_domain.clock 500 # Clock period in ticks system.l2c.tags.replacements 13254 # number of replacements system.l2c.tags.tagsinuse 783.820018 # Cycle average of tags in use system.l2c.tags.total_refs 149317 # Total number of references to valid blocks. @@ -119,6 +122,12 @@ system.l2c.tags.occ_percent::cpu5 0.006825 # Av system.l2c.tags.occ_percent::cpu6 0.006582 # Average percentage of cache occupancy system.l2c.tags.occ_percent::cpu7 0.006846 # Average percentage of cache occupancy system.l2c.tags.occ_percent::total 0.765449 # Average percentage of cache occupancy +system.l2c.tags.occ_task_id_blocks::1024 811 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::0 611 # Occupied blocks per task id +system.l2c.tags.age_task_id_blocks_1024::1 200 # Occupied blocks per task id +system.l2c.tags.occ_task_id_percent::1024 0.791992 # Percentage of cache occupancy per task id +system.l2c.tags.tag_accesses 1942968 # Number of tag accesses +system.l2c.tags.data_accesses 1942968 # Number of data accesses system.l2c.ReadReq_hits::cpu0 10635 # number of ReadReq hits system.l2c.ReadReq_hits::cpu1 10552 # number of ReadReq hits system.l2c.ReadReq_hits::cpu2 10744 # number of ReadReq hits @@ -740,6 +749,12 @@ system.cpu0.l1c.tags.warmup_cycle 0 # Cy system.cpu0.l1c.tags.occ_blocks::cpu0 393.709596 # Average occupied blocks per requestor system.cpu0.l1c.tags.occ_percent::cpu0 0.768964 # Average percentage of cache occupancy system.cpu0.l1c.tags.occ_percent::total 0.768964 # Average percentage of cache occupancy +system.cpu0.l1c.tags.occ_task_id_blocks::1024 400 # Occupied blocks per task id +system.cpu0.l1c.tags.age_task_id_blocks_1024::0 380 # Occupied blocks per task id +system.cpu0.l1c.tags.age_task_id_blocks_1024::1 20 # Occupied blocks per task id +system.cpu0.l1c.tags.occ_task_id_percent::1024 0.781250 # Percentage of cache occupancy per task id +system.cpu0.l1c.tags.tag_accesses 330568 # Number of tag accesses +system.cpu0.l1c.tags.data_accesses 330568 # Number of data accesses system.cpu0.l1c.ReadReq_hits::cpu0 8685 # number of ReadReq hits system.cpu0.l1c.ReadReq_hits::total 8685 # number of ReadReq hits system.cpu0.l1c.WriteReq_hits::cpu0 1118 # number of WriteReq hits @@ -855,6 +870,12 @@ system.cpu1.l1c.tags.warmup_cycle 0 # Cy system.cpu1.l1c.tags.occ_blocks::cpu1 395.298418 # Average occupied blocks per requestor system.cpu1.l1c.tags.occ_percent::cpu1 0.772067 # Average percentage of cache occupancy system.cpu1.l1c.tags.occ_percent::total 0.772067 # Average percentage of cache occupancy +system.cpu1.l1c.tags.occ_task_id_blocks::1024 407 # Occupied blocks per task id +system.cpu1.l1c.tags.age_task_id_blocks_1024::0 374 # Occupied blocks per task id +system.cpu1.l1c.tags.age_task_id_blocks_1024::1 33 # Occupied blocks per task id +system.cpu1.l1c.tags.occ_task_id_percent::1024 0.794922 # Percentage of cache occupancy per task id +system.cpu1.l1c.tags.tag_accesses 332439 # Number of tag accesses +system.cpu1.l1c.tags.data_accesses 332439 # Number of data accesses system.cpu1.l1c.ReadReq_hits::cpu1 8757 # number of ReadReq hits system.cpu1.l1c.ReadReq_hits::total 8757 # number of ReadReq hits system.cpu1.l1c.WriteReq_hits::cpu1 1135 # number of WriteReq hits @@ -970,6 +991,12 @@ system.cpu2.l1c.tags.warmup_cycle 0 # Cy system.cpu2.l1c.tags.occ_blocks::cpu2 394.859577 # Average occupied blocks per requestor system.cpu2.l1c.tags.occ_percent::cpu2 0.771210 # Average percentage of cache occupancy system.cpu2.l1c.tags.occ_percent::total 0.771210 # Average percentage of cache occupancy +system.cpu2.l1c.tags.occ_task_id_blocks::1024 400 # Occupied blocks per task id +system.cpu2.l1c.tags.age_task_id_blocks_1024::0 381 # Occupied blocks per task id +system.cpu2.l1c.tags.age_task_id_blocks_1024::1 19 # Occupied blocks per task id +system.cpu2.l1c.tags.occ_task_id_percent::1024 0.781250 # Percentage of cache occupancy per task id +system.cpu2.l1c.tags.tag_accesses 331261 # Number of tag accesses +system.cpu2.l1c.tags.data_accesses 331261 # Number of data accesses system.cpu2.l1c.ReadReq_hits::cpu2 8708 # number of ReadReq hits system.cpu2.l1c.ReadReq_hits::total 8708 # number of ReadReq hits system.cpu2.l1c.WriteReq_hits::cpu2 1070 # number of WriteReq hits @@ -1085,6 +1112,12 @@ system.cpu3.l1c.tags.warmup_cycle 0 # Cy system.cpu3.l1c.tags.occ_blocks::cpu3 397.838914 # Average occupied blocks per requestor system.cpu3.l1c.tags.occ_percent::cpu3 0.777029 # Average percentage of cache occupancy system.cpu3.l1c.tags.occ_percent::total 0.777029 # Average percentage of cache occupancy +system.cpu3.l1c.tags.occ_task_id_blocks::1024 398 # Occupied blocks per task id +system.cpu3.l1c.tags.age_task_id_blocks_1024::0 380 # Occupied blocks per task id +system.cpu3.l1c.tags.age_task_id_blocks_1024::1 18 # Occupied blocks per task id +system.cpu3.l1c.tags.occ_task_id_percent::1024 0.777344 # Percentage of cache occupancy per task id +system.cpu3.l1c.tags.tag_accesses 331508 # Number of tag accesses +system.cpu3.l1c.tags.data_accesses 331508 # Number of data accesses system.cpu3.l1c.ReadReq_hits::cpu3 8781 # number of ReadReq hits system.cpu3.l1c.ReadReq_hits::total 8781 # number of ReadReq hits system.cpu3.l1c.WriteReq_hits::cpu3 1109 # number of WriteReq hits @@ -1200,6 +1233,12 @@ system.cpu4.l1c.tags.warmup_cycle 0 # Cy system.cpu4.l1c.tags.occ_blocks::cpu4 393.544066 # Average occupied blocks per requestor system.cpu4.l1c.tags.occ_percent::cpu4 0.768641 # Average percentage of cache occupancy system.cpu4.l1c.tags.occ_percent::total 0.768641 # Average percentage of cache occupancy +system.cpu4.l1c.tags.occ_task_id_blocks::1024 404 # Occupied blocks per task id +system.cpu4.l1c.tags.age_task_id_blocks_1024::0 378 # Occupied blocks per task id +system.cpu4.l1c.tags.age_task_id_blocks_1024::1 26 # Occupied blocks per task id +system.cpu4.l1c.tags.occ_task_id_percent::1024 0.789062 # Percentage of cache occupancy per task id +system.cpu4.l1c.tags.tag_accesses 331555 # Number of tag accesses +system.cpu4.l1c.tags.data_accesses 331555 # Number of data accesses system.cpu4.l1c.ReadReq_hits::cpu4 8712 # number of ReadReq hits system.cpu4.l1c.ReadReq_hits::total 8712 # number of ReadReq hits system.cpu4.l1c.WriteReq_hits::cpu4 1102 # number of WriteReq hits @@ -1315,6 +1354,12 @@ system.cpu5.l1c.tags.warmup_cycle 0 # Cy system.cpu5.l1c.tags.occ_blocks::cpu5 395.592742 # Average occupied blocks per requestor system.cpu5.l1c.tags.occ_percent::cpu5 0.772642 # Average percentage of cache occupancy system.cpu5.l1c.tags.occ_percent::total 0.772642 # Average percentage of cache occupancy +system.cpu5.l1c.tags.occ_task_id_blocks::1024 399 # Occupied blocks per task id +system.cpu5.l1c.tags.age_task_id_blocks_1024::0 375 # Occupied blocks per task id +system.cpu5.l1c.tags.age_task_id_blocks_1024::1 24 # Occupied blocks per task id +system.cpu5.l1c.tags.occ_task_id_percent::1024 0.779297 # Percentage of cache occupancy per task id +system.cpu5.l1c.tags.tag_accesses 332072 # Number of tag accesses +system.cpu5.l1c.tags.data_accesses 332072 # Number of data accesses system.cpu5.l1c.ReadReq_hits::cpu5 8824 # number of ReadReq hits system.cpu5.l1c.ReadReq_hits::total 8824 # number of ReadReq hits system.cpu5.l1c.WriteReq_hits::cpu5 1160 # number of WriteReq hits @@ -1430,6 +1475,12 @@ system.cpu6.l1c.tags.warmup_cycle 0 # Cy system.cpu6.l1c.tags.occ_blocks::cpu6 395.582005 # Average occupied blocks per requestor system.cpu6.l1c.tags.occ_percent::cpu6 0.772621 # Average percentage of cache occupancy system.cpu6.l1c.tags.occ_percent::total 0.772621 # Average percentage of cache occupancy +system.cpu6.l1c.tags.occ_task_id_blocks::1024 408 # Occupied blocks per task id +system.cpu6.l1c.tags.age_task_id_blocks_1024::0 376 # Occupied blocks per task id +system.cpu6.l1c.tags.age_task_id_blocks_1024::1 32 # Occupied blocks per task id +system.cpu6.l1c.tags.occ_task_id_percent::1024 0.796875 # Percentage of cache occupancy per task id +system.cpu6.l1c.tags.tag_accesses 332017 # Number of tag accesses +system.cpu6.l1c.tags.data_accesses 332017 # Number of data accesses system.cpu6.l1c.ReadReq_hits::cpu6 8715 # number of ReadReq hits system.cpu6.l1c.ReadReq_hits::total 8715 # number of ReadReq hits system.cpu6.l1c.WriteReq_hits::cpu6 1094 # number of WriteReq hits @@ -1545,6 +1596,12 @@ system.cpu7.l1c.tags.warmup_cycle 0 # Cy system.cpu7.l1c.tags.occ_blocks::cpu7 394.587693 # Average occupied blocks per requestor system.cpu7.l1c.tags.occ_percent::cpu7 0.770679 # Average percentage of cache occupancy system.cpu7.l1c.tags.occ_percent::total 0.770679 # Average percentage of cache occupancy +system.cpu7.l1c.tags.occ_task_id_blocks::1024 401 # Occupied blocks per task id +system.cpu7.l1c.tags.age_task_id_blocks_1024::0 370 # Occupied blocks per task id +system.cpu7.l1c.tags.age_task_id_blocks_1024::1 31 # Occupied blocks per task id +system.cpu7.l1c.tags.occ_task_id_percent::1024 0.783203 # Percentage of cache occupancy per task id +system.cpu7.l1c.tags.tag_accesses 331300 # Number of tag accesses +system.cpu7.l1c.tags.data_accesses 331300 # Number of data accesses system.cpu7.l1c.ReadReq_hits::cpu7 8635 # number of ReadReq hits system.cpu7.l1c.ReadReq_hits::total 8635 # number of ReadReq hits system.cpu7.l1c.WriteReq_hits::cpu7 1078 # number of WriteReq hits diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MESI_Two_Level/simerr b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MESI_Two_Level/simerr index f5d2abbce..2b5c3b11d 100755 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MESI_Two_Level/simerr +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MESI_Two_Level/simerr @@ -2,4 +2,3 @@ warn: rounding error > tolerance 0.072760 rounded to 0 warn: rounding error > tolerance 0.072760 rounded to 0 -hack: be nice to actually delete the event here diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MESI_Two_Level/simout b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MESI_Two_Level/simout index 95d13e969..941dddfc7 100755 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MESI_Two_Level/simout +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MESI_Two_Level/simout @@ -1,12 +1,10 @@ -Redirecting stdout to build/ALPHA_MESI_CMP_directory/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MESI_CMP_directory/simout -Redirecting stderr to build/ALPHA_MESI_CMP_directory/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MESI_CMP_directory/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:27:02 -gem5 started Sep 22 2013 05:27:12 -gem5 executing on zizzer -command line: build/ALPHA_MESI_CMP_directory/gem5.opt -d build/ALPHA_MESI_CMP_directory/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MESI_CMP_directory -re tests/run.py build/ALPHA_MESI_CMP_directory/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MESI_CMP_directory +gem5 compiled Jan 22 2014 16:37:52 +gem5 started Jan 22 2014 17:26:11 +gem5 executing on u200540-lin +command line: build/ALPHA_MESI_Two_Level/gem5.opt -d build/ALPHA_MESI_Two_Level/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MESI_Two_Level -re tests/run.py build/ALPHA_MESI_Two_Level/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MESI_Two_Level Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... Exiting @ tick 318321 because Ruby Tester completed diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MESI_Two_Level/stats.txt b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MESI_Two_Level/stats.txt index 07c10314c..c90fc8b43 100644 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MESI_Two_Level/stats.txt +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MESI_Two_Level/stats.txt @@ -4,9 +4,12 @@ sim_seconds 0.000318 # Nu sim_ticks 318321 # Number of ticks simulated final_tick 318321 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_tick_rate 1524477 # Simulator tick rate (ticks/s) -host_mem_usage 167184 # Number of bytes of host memory used -host_seconds 0.21 # Real time elapsed on the host +host_tick_rate 2101304 # Simulator tick rate (ticks/s) +host_mem_usage 123592 # Number of bytes of host memory used +host_seconds 0.15 # Real time elapsed on the host +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.delayHist::bucket_size 256 # delay histogram for all message system.ruby.delayHist::max_bucket 2559 # delay histogram for all message system.ruby.delayHist::samples 7069 # delay histogram for all message @@ -102,6 +105,7 @@ system.ruby.network.routers1.msg_bytes.Response_Control::2 6864 system.ruby.network.routers1.msg_bytes.Writeback_Data::0 51984 system.ruby.network.routers1.msg_bytes.Writeback_Data::1 36936 system.ruby.network.routers1.msg_bytes.Writeback_Control::0 272 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.dir_cntrl0.memBuffer.memReq 1660 # Total number of memory requests system.ruby.dir_cntrl0.memBuffer.memRead 874 # Number of memory reads system.ruby.dir_cntrl0.memBuffer.memWrite 786 # Number of memory writes diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_directory/config.ini b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_directory/config.ini index 7b4a4a030..683949732 100644 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_directory/config.ini +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_directory/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,6 +14,7 @@ children=clk_domain physmem ruby sys_port_proxy tester voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.physmem] @@ -40,6 +44,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -48,18 +53,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 l2_cntrl0 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 l2_cntrl0 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=1 random_seed=1234 randomization=true -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -67,9 +76,10 @@ type=Directory_Controller children=directory memBuffer buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=2 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory directory_latency=6 +eventq_index=0 memBuffer=system.ruby.dir_cntrl0.memBuffer number_of_TBEs=256 peer=Null @@ -80,6 +90,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -96,6 +107,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -115,7 +127,8 @@ L1Dcache=system.ruby.l1_cntrl0.L1Dcache L1Icache=system.ruby.l1_cntrl0.L1Icache buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 +eventq_index=0 l2_select_num_bits=0 number_of_TBEs=256 peer=Null @@ -133,6 +146,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -147,6 +161,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=3 replacement_policy=PSEUDO_LRU @@ -162,6 +177,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.L1Dcache deadlock_threshold=500000 +eventq_index=0 icache=system.ruby.l1_cntrl0.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -179,7 +195,8 @@ children=L2cache L2cache=system.ruby.l2_cntrl0.L2cache buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 +eventq_index=0 number_of_TBEs=256 peer=Null recycle_latency=10 @@ -194,6 +211,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=15 replacement_policy=PSEUDO_LRU @@ -207,6 +225,7 @@ tagArrayBanks=1 type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -216,6 +235,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 system.ruby.network.ext_links2 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 system.ruby.network.int_links2 number_of_virtual_networks=10 @@ -226,6 +246,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers0 latency=1 @@ -235,6 +256,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l2_cntrl0 int_node=system.ruby.network.routers1 latency=1 @@ -244,6 +266,7 @@ weight=1 [system.ruby.network.ext_links2] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers2 latency=1 @@ -253,6 +276,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=3 node_a=system.ruby.network.routers0 @@ -262,6 +286,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=4 node_a=system.ruby.network.routers1 @@ -271,6 +296,7 @@ weight=1 [system.ruby.network.int_links2] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=5 node_a=system.ruby.network.routers2 @@ -280,38 +306,36 @@ weight=1 [system.ruby.network.routers0] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers1] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers2] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 [system.ruby.network.routers3] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=3 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=1 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -327,6 +351,7 @@ check_flush=false checks_to_complete=100 clk_domain=system.clk_domain deadlock_threshold=50000 +eventq_index=0 num_cpus=1 system=system wakeup_frequency=10 @@ -335,5 +360,6 @@ cpuInstPort=system.ruby.l1_cntrl0.sequencer.slave[1] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_directory/simerr b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_directory/simerr index f5d2abbce..2b5c3b11d 100755 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_directory/simerr +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_directory/simerr @@ -2,4 +2,3 @@ warn: rounding error > tolerance 0.072760 rounded to 0 warn: rounding error > tolerance 0.072760 rounded to 0 -hack: be nice to actually delete the event here diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_directory/simout b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_directory/simout index 2167c1256..b4257bd2c 100755 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_directory/simout +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_directory/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA_MOESI_CMP_directory/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MOESI_CMP_directory/simout -Redirecting stderr to build/ALPHA_MOESI_CMP_directory/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MOESI_CMP_directory/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:36:12 -gem5 started Sep 22 2013 05:36:22 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:42:56 +gem5 started Jan 22 2014 17:27:15 +gem5 executing on u200540-lin command line: build/ALPHA_MOESI_CMP_directory/gem5.opt -d build/ALPHA_MOESI_CMP_directory/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MOESI_CMP_directory -re tests/run.py build/ALPHA_MOESI_CMP_directory/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MOESI_CMP_directory Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_directory/stats.txt b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_directory/stats.txt index 6245a45ed..aefa03a40 100644 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_directory/stats.txt +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_directory/stats.txt @@ -4,9 +4,12 @@ sim_seconds 0.000327 # Nu sim_ticks 327361 # Number of ticks simulated final_tick 327361 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_tick_rate 557303 # Simulator tick rate (ticks/s) -host_mem_usage 169404 # Number of bytes of host memory used -host_seconds 0.59 # Real time elapsed on the host +host_tick_rate 771883 # Simulator tick rate (ticks/s) +host_mem_usage 124808 # Number of bytes of host memory used +host_seconds 0.42 # Real time elapsed on the host +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.outstanding_req_hist::bucket_size 2 system.ruby.outstanding_req_hist::max_bucket 19 system.ruby.outstanding_req_hist::samples 1000 @@ -84,6 +87,7 @@ system.ruby.network.routers1.msg_bytes.Writeback_Control::0 14448 system.ruby.network.routers1.msg_bytes.Writeback_Control::1 13536 system.ruby.network.routers1.msg_bytes.Writeback_Control::2 640 system.ruby.network.routers1.msg_bytes.Unblock_Control::2 14064 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.dir_cntrl0.memBuffer.memReq 1619 # Total number of memory requests system.ruby.dir_cntrl0.memBuffer.memRead 854 # Number of memory reads system.ruby.dir_cntrl0.memBuffer.memWrite 765 # Number of memory writes diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_token/config.ini b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_token/config.ini index 7d3b90ce9..323ed5f61 100644 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_token/config.ini +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_token/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,6 +14,7 @@ children=clk_domain physmem ruby sys_port_proxy tester voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.physmem] @@ -40,6 +44,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -48,18 +53,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 l2_cntrl0 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 l2_cntrl0 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=1 random_seed=1234 randomization=true -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -67,10 +76,11 @@ type=Directory_Controller children=directory memBuffer buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=2 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory directory_latency=5 distributed_persistent=true +eventq_index=0 fixed_timeout_latency=100 l2_select_num_bits=0 memBuffer=system.ruby.dir_cntrl0.memBuffer @@ -84,6 +94,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -100,6 +111,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -120,8 +132,9 @@ L1Icache=system.ruby.l1_cntrl0.L1Icache N_tokens=2 buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 dynamic_timeout_enabled=true +eventq_index=0 fixed_timeout_latency=300 l1_request_latency=2 l1_response_latency=2 @@ -144,6 +157,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -158,6 +172,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -173,6 +188,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.L1Dcache deadlock_threshold=500000 +eventq_index=0 icache=system.ruby.l1_cntrl0.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -191,7 +207,8 @@ L2cache=system.ruby.l2_cntrl0.L2cache N_tokens=2 buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 +eventq_index=0 filtering_enabled=true l2_request_latency=5 l2_response_latency=5 @@ -207,6 +224,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=10 replacement_policy=PSEUDO_LRU @@ -220,6 +238,7 @@ tagArrayBanks=1 type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -229,6 +248,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 system.ruby.network.ext_links2 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 system.ruby.network.int_links2 number_of_virtual_networks=10 @@ -239,6 +259,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers0 latency=1 @@ -248,6 +269,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l2_cntrl0 int_node=system.ruby.network.routers1 latency=1 @@ -257,6 +279,7 @@ weight=1 [system.ruby.network.ext_links2] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers2 latency=1 @@ -266,6 +289,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=3 node_a=system.ruby.network.routers0 @@ -275,6 +299,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=4 node_a=system.ruby.network.routers1 @@ -284,6 +309,7 @@ weight=1 [system.ruby.network.int_links2] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=5 node_a=system.ruby.network.routers2 @@ -293,38 +319,36 @@ weight=1 [system.ruby.network.routers0] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers1] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers2] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 [system.ruby.network.routers3] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=3 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=1 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -340,6 +364,7 @@ check_flush=false checks_to_complete=100 clk_domain=system.clk_domain deadlock_threshold=50000 +eventq_index=0 num_cpus=1 system=system wakeup_frequency=10 @@ -348,5 +373,6 @@ cpuInstPort=system.ruby.l1_cntrl0.sequencer.slave[1] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_token/simerr b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_token/simerr index f5d2abbce..2b5c3b11d 100755 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_token/simerr +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_token/simerr @@ -2,4 +2,3 @@ warn: rounding error > tolerance 0.072760 rounded to 0 warn: rounding error > tolerance 0.072760 rounded to 0 -hack: be nice to actually delete the event here diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_token/simout b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_token/simout index 733c0eefd..a6637bc84 100755 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_token/simout +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_token/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA_MOESI_CMP_token/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MOESI_CMP_token/simout -Redirecting stderr to build/ALPHA_MOESI_CMP_token/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MOESI_CMP_token/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:44:48 -gem5 started Sep 22 2013 05:44:59 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:47:59 +gem5 started Jan 22 2014 17:27:41 +gem5 executing on u200540-lin command line: build/ALPHA_MOESI_CMP_token/gem5.opt -d build/ALPHA_MOESI_CMP_token/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MOESI_CMP_token -re tests/run.py build/ALPHA_MOESI_CMP_token/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MOESI_CMP_token Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_token/stats.txt b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_token/stats.txt index 5c157abec..389c45695 100644 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_token/stats.txt +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_CMP_token/stats.txt @@ -4,9 +4,12 @@ sim_seconds 0.000225 # Nu sim_ticks 225141 # Number of ticks simulated final_tick 225141 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_tick_rate 1264878 # Simulator tick rate (ticks/s) -host_mem_usage 168320 # Number of bytes of host memory used -host_seconds 0.18 # Real time elapsed on the host +host_tick_rate 1830870 # Simulator tick rate (ticks/s) +host_mem_usage 123716 # Number of bytes of host memory used +host_seconds 0.12 # Real time elapsed on the host +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.outstanding_req_hist::bucket_size 2 system.ruby.outstanding_req_hist::max_bucket 19 system.ruby.outstanding_req_hist::samples 1007 @@ -83,6 +86,7 @@ system.ruby.network.routers1.msg_bytes.Response_Control::4 8 system.ruby.network.routers1.msg_bytes.Writeback_Data::4 120168 system.ruby.network.routers1.msg_bytes.Writeback_Control::4 576 system.ruby.network.routers1.msg_bytes.Persistent_Control::3 2984 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.dir_cntrl0.memBuffer.memReq 1655 # Total number of memory requests system.ruby.dir_cntrl0.memBuffer.memRead 868 # Number of memory reads system.ruby.dir_cntrl0.memBuffer.memWrite 787 # Number of memory writes diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_hammer/config.ini b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_hammer/config.ini index 3ff520581..e4c814100 100644 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_hammer/config.ini +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_hammer/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,6 +14,7 @@ children=clk_domain physmem ruby sys_port_proxy tester voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.physmem] @@ -40,6 +44,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -48,18 +53,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=1 random_seed=1234 randomization=true -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -67,8 +76,9 @@ type=Directory_Controller children=directory memBuffer probeFilter buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory +eventq_index=0 full_bit_dir_enabled=false memBuffer=system.ruby.dir_cntrl0.memBuffer memory_controller_latency=2 @@ -83,6 +93,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -99,6 +110,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -116,6 +128,7 @@ type=RubyCache assoc=4 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=1 replacement_policy=PSEUDO_LRU @@ -134,7 +147,8 @@ L2cache=system.ruby.l1_cntrl0.L2cache buffer_size=0 cache_response_latency=10 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 +eventq_index=0 issue_latency=2 l2_cache_hit_latency=10 no_mig_atomic=true @@ -152,6 +166,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=2 replacement_policy=PSEUDO_LRU @@ -166,6 +181,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=true latency=2 replacement_policy=PSEUDO_LRU @@ -180,6 +196,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=10 replacement_policy=PSEUDO_LRU @@ -195,6 +212,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.L1Dcache deadlock_threshold=500000 +eventq_index=0 icache=system.ruby.l1_cntrl0.L1Icache max_outstanding_requests=16 ruby_system=system.ruby @@ -210,6 +228,7 @@ slave=system.tester.cpuDataPort[0] system.tester.cpuInstPort[0] type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -219,6 +238,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 number_of_virtual_networks=10 @@ -229,6 +249,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers0 latency=1 @@ -238,6 +259,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers1 latency=1 @@ -247,6 +269,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=2 node_a=system.ruby.network.routers0 @@ -256,6 +279,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=3 node_a=system.ruby.network.routers1 @@ -265,32 +289,29 @@ weight=1 [system.ruby.network.routers0] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers1] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers2] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=1 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -306,6 +327,7 @@ check_flush=true checks_to_complete=100 clk_domain=system.clk_domain deadlock_threshold=50000 +eventq_index=0 num_cpus=1 system=system wakeup_frequency=10 @@ -314,5 +336,6 @@ cpuInstPort=system.ruby.l1_cntrl0.sequencer.slave[1] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_hammer/simerr b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_hammer/simerr index f5d2abbce..2b5c3b11d 100755 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_hammer/simerr +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_hammer/simerr @@ -2,4 +2,3 @@ warn: rounding error > tolerance 0.072760 rounded to 0 warn: rounding error > tolerance 0.072760 rounded to 0 -hack: be nice to actually delete the event here diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_hammer/simout b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_hammer/simout index 980451a66..8f18036b7 100755 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_hammer/simout +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_hammer/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA_MOESI_hammer/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MOESI_hammer/simout -Redirecting stderr to build/ALPHA_MOESI_hammer/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MOESI_hammer/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:17:28 -gem5 started Sep 22 2013 05:17:38 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:32:54 +gem5 started Jan 22 2014 17:25:38 +gem5 executing on u200540-lin command line: build/ALPHA_MOESI_hammer/gem5.opt -d build/ALPHA_MOESI_hammer/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MOESI_hammer -re tests/run.py build/ALPHA_MOESI_hammer/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby-MOESI_hammer Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_hammer/stats.txt b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_hammer/stats.txt index ca2b008f4..618345d21 100644 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_hammer/stats.txt +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby-MOESI_hammer/stats.txt @@ -4,9 +4,12 @@ sim_seconds 0.000172 # Nu sim_ticks 172201 # Number of ticks simulated final_tick 172201 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_tick_rate 1338362 # Simulator tick rate (ticks/s) -host_mem_usage 168248 # Number of bytes of host memory used -host_seconds 0.13 # Real time elapsed on the host +host_tick_rate 1805084 # Simulator tick rate (ticks/s) +host_mem_usage 124680 # Number of bytes of host memory used +host_seconds 0.10 # Real time elapsed on the host +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.outstanding_req_hist::bucket_size 2 system.ruby.outstanding_req_hist::max_bucket 19 system.ruby.outstanding_req_hist::samples 986 @@ -40,6 +43,7 @@ system.ruby.miss_latency_hist::stdev 1015.184360 system.ruby.miss_latency_hist | 60 7.09% 7.09% | 28 3.31% 10.40% | 1 0.12% 10.52% | 3 0.35% 10.87% | 14 1.65% 12.53% | 145 17.14% 29.67% | 352 41.61% 71.28% | 192 22.70% 93.97% | 40 4.73% 98.70% | 11 1.30% 100.00% system.ruby.miss_latency_hist::total 846 system.ruby.Directory.incomplete_times 846 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.l1_cntrl0.L1Dcache.demand_hits 70 # Number of cache demand hits system.ruby.l1_cntrl0.L1Dcache.demand_misses 848 # Number of cache demand misses system.ruby.l1_cntrl0.L1Dcache.demand_accesses 918 # Number of cache demand accesses diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby/config.ini b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby/config.ini index 5aaa9746f..25bd77a4f 100644 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby/config.ini +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000 time_sync_spin_threshold=100000 @@ -12,6 +14,7 @@ children=clk_domain physmem ruby sys_port_proxy tester voltage_domain boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -33,6 +36,7 @@ system_port=system.sys_port_proxy.slave[0] [system.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.physmem] @@ -40,6 +44,7 @@ type=SimpleMemory bandwidth=0.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30 latency_var=0 @@ -48,18 +53,22 @@ range=0:134217727 [system.ruby] type=RubySystem -children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network profiler +children=clk_domain dir_cntrl0 l1_cntrl0 memctrl_clk_domain network +all_instructions=false block_size_bytes=64 clk_domain=system.ruby.clk_domain +eventq_index=0 +hot_lines=false mem_size=268435456 no_mem_vec=false +num_of_sequencers=1 random_seed=1234 randomization=true -stats_filename=ruby.stats [system.ruby.clk_domain] type=SrcClockDomain clock=1 +eventq_index=0 voltage_domain=system.voltage_domain [system.ruby.dir_cntrl0] @@ -67,9 +76,10 @@ type=Directory_Controller children=directory memBuffer buffer_size=0 clk_domain=system.ruby.clk_domain -cntrl_id=1 +cluster_id=0 directory=system.ruby.dir_cntrl0.directory directory_latency=12 +eventq_index=0 memBuffer=system.ruby.dir_cntrl0.memBuffer number_of_TBEs=256 peer=Null @@ -80,6 +90,7 @@ version=0 [system.ruby.dir_cntrl0.directory] type=RubyDirectoryMemory +eventq_index=0 map_levels=4 numa_high_bit=5 size=268435456 @@ -96,6 +107,7 @@ basic_bus_busy_time=2 clk_domain=system.ruby.memctrl_clk_domain dimm_bit_0=12 dimms_per_channel=2 +eventq_index=0 mem_ctl_latency=12 mem_fixed_delay=0 mem_random_arbitrate=0 @@ -115,7 +127,8 @@ buffer_size=0 cacheMemory=system.ruby.l1_cntrl0.cacheMemory cache_response_latency=12 clk_domain=system.ruby.clk_domain -cntrl_id=0 +cluster_id=0 +eventq_index=0 issue_latency=2 number_of_TBEs=256 peer=Null @@ -131,6 +144,7 @@ type=RubyCache assoc=2 dataAccessLatency=1 dataArrayBanks=1 +eventq_index=0 is_icache=false latency=3 replacement_policy=PSEUDO_LRU @@ -146,6 +160,7 @@ access_phys_mem=false clk_domain=system.ruby.clk_domain dcache=system.ruby.l1_cntrl0.cacheMemory deadlock_threshold=500000 +eventq_index=0 icache=system.ruby.l1_cntrl0.cacheMemory max_outstanding_requests=16 ruby_system=system.ruby @@ -161,6 +176,7 @@ slave=system.tester.cpuDataPort[0] system.tester.cpuInstPort[0] type=DerivedClockDomain clk_divider=3 clk_domain=system.ruby.clk_domain +eventq_index=0 [system.ruby.network] type=SimpleNetwork @@ -170,6 +186,7 @@ buffer_size=0 clk_domain=system.ruby.clk_domain control_msg_size=8 endpoint_bandwidth=1000 +eventq_index=0 ext_links=system.ruby.network.ext_links0 system.ruby.network.ext_links1 int_links=system.ruby.network.int_links0 system.ruby.network.int_links1 number_of_virtual_networks=10 @@ -180,6 +197,7 @@ topology=Crossbar [system.ruby.network.ext_links0] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.l1_cntrl0 int_node=system.ruby.network.routers0 latency=1 @@ -189,6 +207,7 @@ weight=1 [system.ruby.network.ext_links1] type=SimpleExtLink bandwidth_factor=16 +eventq_index=0 ext_node=system.ruby.dir_cntrl0 int_node=system.ruby.network.routers1 latency=1 @@ -198,6 +217,7 @@ weight=1 [system.ruby.network.int_links0] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=2 node_a=system.ruby.network.routers0 @@ -207,6 +227,7 @@ weight=1 [system.ruby.network.int_links1] type=SimpleIntLink bandwidth_factor=16 +eventq_index=0 latency=1 link_id=3 node_a=system.ruby.network.routers1 @@ -216,32 +237,29 @@ weight=1 [system.ruby.network.routers0] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=0 virt_nets=10 [system.ruby.network.routers1] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=1 virt_nets=10 [system.ruby.network.routers2] type=Switch clk_domain=system.ruby.clk_domain +eventq_index=0 router_id=2 virt_nets=10 -[system.ruby.profiler] -type=RubyProfiler -all_instructions=false -hot_lines=false -num_of_sequencers=1 -ruby_system=system.ruby - [system.sys_port_proxy] type=RubyPortProxy access_phys_mem=true clk_domain=system.clk_domain +eventq_index=0 ruby_system=system.ruby support_data_reqs=true support_inst_reqs=true @@ -257,6 +275,7 @@ check_flush=false checks_to_complete=100 clk_domain=system.clk_domain deadlock_threshold=50000 +eventq_index=0 num_cpus=1 system=system wakeup_frequency=10 @@ -265,5 +284,6 @@ cpuInstPort=system.ruby.l1_cntrl0.sequencer.slave[1] [system.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby/simerr b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby/simerr index f5d2abbce..2b5c3b11d 100755 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby/simerr +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby/simerr @@ -2,4 +2,3 @@ warn: rounding error > tolerance 0.072760 rounded to 0 warn: rounding error > tolerance 0.072760 rounded to 0 -hack: be nice to actually delete the event here diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby/simout b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby/simout index 6606669ac..f268d31d9 100755 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby/simout +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/ALPHA/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby/simout -Redirecting stderr to build/ALPHA/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 24 2013 03:08:53 -gem5 started Sep 28 2013 03:05:49 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:27:55 +gem5 started Jan 22 2014 17:24:37 +gem5 executing on u200540-lin command line: build/ALPHA/gem5.opt -d build/ALPHA/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby -re tests/run.py build/ALPHA/tests/opt/quick/se/60.rubytest/alpha/linux/rubytest-ruby Global frequency set at 1000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby/stats.txt b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby/stats.txt index 9c41d3d2f..67ca7d426 100644 --- a/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby/stats.txt +++ b/tests/quick/se/60.rubytest/ref/alpha/linux/rubytest-ruby/stats.txt @@ -4,9 +4,12 @@ sim_seconds 0.000222 # Nu sim_ticks 221941 # Number of ticks simulated final_tick 221941 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000 # Frequency of simulated ticks -host_tick_rate 2165156 # Simulator tick rate (ticks/s) -host_mem_usage 165760 # Number of bytes of host memory used -host_seconds 0.10 # Real time elapsed on the host +host_tick_rate 3307860 # Simulator tick rate (ticks/s) +host_mem_usage 122180 # Number of bytes of host memory used +host_seconds 0.07 # Real time elapsed on the host +system.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1 # Clock period in ticks +system.ruby.clk_domain.clock 1 # Clock period in ticks system.ruby.delayHist::bucket_size 2 # delay histogram for all message system.ruby.delayHist::max_bucket 19 # delay histogram for all message system.ruby.delayHist::samples 1828 # delay histogram for all message @@ -47,6 +50,7 @@ system.ruby.miss_latency_hist::stdev 573.775637 system.ruby.miss_latency_hist | 6 0.66% 0.66% | 4 0.44% 1.09% | 82 8.95% 10.04% | 594 64.85% 74.89% | 229 25.00% 99.89% | 1 0.11% 100.00% | 0 0.00% 100.00% | 0 0.00% 100.00% | 0 0.00% 100.00% | 0 0.00% 100.00% system.ruby.miss_latency_hist::total 916 system.ruby.Directory.incomplete_times 916 +system.ruby.memctrl_clk_domain.clock 3 # Clock period in ticks system.ruby.l1_cntrl0.cacheMemory.demand_hits 38 # Number of cache demand hits system.ruby.l1_cntrl0.cacheMemory.demand_misses 917 # Number of cache demand misses system.ruby.l1_cntrl0.cacheMemory.demand_accesses 955 # Number of cache demand accesses diff --git a/tests/quick/se/70.tgen/ref/null/none/tgen-simple-dram/config.ini b/tests/quick/se/70.tgen/ref/null/none/tgen-simple-dram/config.ini index 61b6eb32e..b3c13e1c2 100644 --- a/tests/quick/se/70.tgen/ref/null/none/tgen-simple-dram/config.ini +++ b/tests/quick/se/70.tgen/ref/null/none/tgen-simple-dram/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu membus monitor physmem boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -34,10 +37,12 @@ system_port=system.membus.slave[1] type=SrcClockDomain children=voltage_domain clock=1000 +eventq_index=0 voltage_domain=system.clk_domain.voltage_domain [system.clk_domain.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 [system.cpu] @@ -45,12 +50,14 @@ type=TrafficGen clk_domain=system.clk_domain config_file=tests/quick/se/70.tgen/tgen-simple-dram.cfg elastic_req=false +eventq_index=0 system=system port=system.monitor.slave [system.membus] type=NoncoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 use_default_range=false width=16 @@ -69,6 +76,7 @@ disable_itt_dists=false disable_latency_hists=false disable_outstanding_hists=false disable_transaction_hists=false +eventq_index=0 itt_bins=20 itt_max_bin=100000 latency_bins=20 @@ -93,6 +101,7 @@ conf_table_reported=true device_bus_width=8 device_rowbuffer_size=1024 devices_per_rank=8 +eventq_index=0 in_addr_map=true mem_sched_policy=frfcfs null=false @@ -104,13 +113,16 @@ static_backend_latency=10000 static_frontend_latency=10000 tBURST=5000 tCL=13750 +tRAS=35000 tRCD=13750 tREFI=7800000 tRFC=300000 tRP=13750 +tRRD=6250 tWTR=7500 tXAW=40000 write_buffer_size=32 -write_thresh_perc=70 +write_high_thresh_perc=70 +write_low_thresh_perc=0 port=system.membus.master[0] diff --git a/tests/quick/se/70.tgen/ref/null/none/tgen-simple-dram/simout b/tests/quick/se/70.tgen/ref/null/none/tgen-simple-dram/simout index 2426a6cee..cffe93183 100755 --- a/tests/quick/se/70.tgen/ref/null/none/tgen-simple-dram/simout +++ b/tests/quick/se/70.tgen/ref/null/none/tgen-simple-dram/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/NULL/tests/opt/quick/se/70.tgen/null/none/tgen-simple-dram/simout -Redirecting stderr to build/NULL/tests/opt/quick/se/70.tgen/null/none/tgen-simple-dram/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:53:51 -gem5 started Sep 22 2013 05:53:54 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:54:17 +gem5 started Jan 22 2014 17:29:00 +gem5 executing on u200540-lin command line: build/NULL/gem5.opt -d build/NULL/tests/opt/quick/se/70.tgen/null/none/tgen-simple-dram -re tests/run.py build/NULL/tests/opt/quick/se/70.tgen/null/none/tgen-simple-dram Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/70.tgen/ref/null/none/tgen-simple-dram/stats.txt b/tests/quick/se/70.tgen/ref/null/none/tgen-simple-dram/stats.txt index 9c1ab67e5..9a5e1cab0 100644 --- a/tests/quick/se/70.tgen/ref/null/none/tgen-simple-dram/stats.txt +++ b/tests/quick/se/70.tgen/ref/null/none/tgen-simple-dram/stats.txt @@ -4,9 +4,11 @@ sim_seconds 0.100000 # Nu sim_ticks 100000000000 # Number of ticks simulated final_tick 100000000000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_tick_rate 20181472495 # Simulator tick rate (ticks/s) -host_mem_usage 192916 # Number of bytes of host memory used -host_seconds 4.96 # Real time elapsed on the host +host_tick_rate 33856013702 # Simulator tick rate (ticks/s) +host_mem_usage 195468 # Number of bytes of host memory used +host_seconds 2.95 # Real time elapsed on the host +system.clk_domain.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu 213331136 # Number of bytes read from this memory system.physmem.bytes_read::total 213331136 # Number of bytes read from this memory system.physmem.num_reads::cpu 3333299 # Number of read requests responded to by this memory diff --git a/tests/quick/se/70.tgen/ref/null/none/tgen-simple-mem/config.ini b/tests/quick/se/70.tgen/ref/null/none/tgen-simple-mem/config.ini index 27a6fb9af..1932695fb 100644 --- a/tests/quick/se/70.tgen/ref/null/none/tgen-simple-mem/config.ini +++ b/tests/quick/se/70.tgen/ref/null/none/tgen-simple-mem/config.ini @@ -1,7 +1,9 @@ [root] type=Root children=system +eventq_index=0 full_system=false +sim_quantum=0 time_sync_enable=false time_sync_period=100000000000 time_sync_spin_threshold=100000000 @@ -12,6 +14,7 @@ children=clk_domain cpu membus monitor physmem boot_osflags=a cache_line_size=64 clk_domain=system.clk_domain +eventq_index=0 init_param=0 kernel= load_addr_mask=1099511627775 @@ -34,10 +37,12 @@ system_port=system.membus.slave[1] type=SrcClockDomain children=voltage_domain clock=1000 +eventq_index=0 voltage_domain=system.clk_domain.voltage_domain [system.clk_domain.voltage_domain] type=VoltageDomain +eventq_index=0 voltage=1.000000 [system.cpu] @@ -45,12 +50,14 @@ type=TrafficGen clk_domain=system.clk_domain config_file=tests/quick/se/70.tgen/tgen-simple-mem.cfg elastic_req=false +eventq_index=0 system=system port=system.monitor.slave [system.membus] type=NoncoherentBus clk_domain=system.clk_domain +eventq_index=0 header_cycles=1 use_default_range=false width=16 @@ -69,6 +76,7 @@ disable_itt_dists=false disable_latency_hists=false disable_outstanding_hists=false disable_transaction_hists=false +eventq_index=0 itt_bins=20 itt_max_bin=100000 latency_bins=20 @@ -86,6 +94,7 @@ type=SimpleMemory bandwidth=73.000000 clk_domain=system.clk_domain conf_table_reported=true +eventq_index=0 in_addr_map=true latency=30000 latency_var=0 diff --git a/tests/quick/se/70.tgen/ref/null/none/tgen-simple-mem/simout b/tests/quick/se/70.tgen/ref/null/none/tgen-simple-mem/simout index efa3fa542..ccbd2154c 100755 --- a/tests/quick/se/70.tgen/ref/null/none/tgen-simple-mem/simout +++ b/tests/quick/se/70.tgen/ref/null/none/tgen-simple-mem/simout @@ -1,11 +1,9 @@ -Redirecting stdout to build/NULL/tests/opt/quick/se/70.tgen/null/none/tgen-simple-mem/simout -Redirecting stderr to build/NULL/tests/opt/quick/se/70.tgen/null/none/tgen-simple-mem/simerr gem5 Simulator System. http://gem5.org gem5 is copyrighted software; use the --copyright option for details. -gem5 compiled Sep 22 2013 05:53:51 -gem5 started Sep 22 2013 05:53:54 -gem5 executing on zizzer +gem5 compiled Jan 22 2014 16:54:17 +gem5 started Jan 22 2014 17:29:05 +gem5 executing on u200540-lin command line: build/NULL/gem5.opt -d build/NULL/tests/opt/quick/se/70.tgen/null/none/tgen-simple-mem -re tests/run.py build/NULL/tests/opt/quick/se/70.tgen/null/none/tgen-simple-mem Global frequency set at 1000000000000 ticks per second info: Entering event queue @ 0. Starting simulation... diff --git a/tests/quick/se/70.tgen/ref/null/none/tgen-simple-mem/stats.txt b/tests/quick/se/70.tgen/ref/null/none/tgen-simple-mem/stats.txt index 14b3c1d80..ead00396f 100644 --- a/tests/quick/se/70.tgen/ref/null/none/tgen-simple-mem/stats.txt +++ b/tests/quick/se/70.tgen/ref/null/none/tgen-simple-mem/stats.txt @@ -4,9 +4,11 @@ sim_seconds 0.100000 # Nu sim_ticks 100000000000 # Number of ticks simulated final_tick 100000000000 # Number of ticks from beginning of simulation (restored from checkpoints and never reset) sim_freq 1000000000000 # Frequency of simulated ticks -host_tick_rate 8032030639 # Simulator tick rate (ticks/s) -host_mem_usage 228596 # Number of bytes of host memory used -host_seconds 12.45 # Real time elapsed on the host +host_tick_rate 14364594493 # Simulator tick rate (ticks/s) +host_mem_usage 195500 # Number of bytes of host memory used +host_seconds 6.96 # Real time elapsed on the host +system.clk_domain.voltage_domain.voltage 1 # Voltage in Volts +system.clk_domain.clock 1000 # Clock period in ticks system.physmem.bytes_read::cpu 64 # Number of bytes read from this memory system.physmem.bytes_read::total 64 # Number of bytes read from this memory system.physmem.bytes_written::cpu 213329152 # Number of bytes written to this memory |