summaryrefslogtreecommitdiff
path: root/src/arch
diff options
context:
space:
mode:
authorGiacomo Travaglini <giacomo.travaglini@arm.com>2019-06-11 10:02:16 +0100
committerGiacomo Travaglini <giacomo.travaglini@arm.com>2019-06-17 08:18:59 +0000
commit8e3164a90b50a18fb2906a18f353189902fce26e (patch)
treeee1620324efea597d9e84277c853dd46a5aaef9b /src/arch
parentd3accb8ba3a65127ca214f19a85ff6ddf50a3c7a (diff)
downloadgem5-8e3164a90b50a18fb2906a18f353189902fce26e.tar.xz
arch-arm: Move the memacc_code before op_wb in fp loads
This is trying to fix the bug that arises when a memory exception is generated during a fp flavoured load (A memory load targeting a SIMD & FP register). With the previous template a fault was not stopping the register value to be modified (wrong) if (fault == NoFault) { fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags); %(memacc_code)s; } if (fault == NoFault) { %(op_wb)s; } The patch introduces a Load64FpExecute template which is moving the register write (memacc_code) just before the op_wb Change-Id: I1c89c525dfa7a4ef489abe0872cd7baacdd6ce3c Signed-off-by: Giacomo Travaglini <giacomo.travaglini@arm.com> Reviewed-by: Nikos Nikoleris <nikos.nikoleris@arm.com> Reviewed-on: https://gem5-review.googlesource.com/c/public/gem5/+/19228 Reviewed-by: Andreas Sandberg <andreas.sandberg@arm.com> Maintainer: Andreas Sandberg <andreas.sandberg@arm.com> Tested-by: kokoro <noreply+kokoro@google.com>
Diffstat (limited to 'src/arch')
-rw-r--r--src/arch/arm/isa/insts/ldr64.isa7
-rw-r--r--src/arch/arm/isa/templates/mem64.isa26
2 files changed, 32 insertions, 1 deletions
diff --git a/src/arch/arm/isa/insts/ldr64.isa b/src/arch/arm/isa/insts/ldr64.isa
index 56112a7c1..801316eeb 100644
--- a/src/arch/arm/isa/insts/ldr64.isa
+++ b/src/arch/arm/isa/insts/ldr64.isa
@@ -99,6 +99,13 @@ let {{
if self.flavor in ("acex", "exclusive", "exp", "acexp"):
self.memFlags.append("Request::LLSC")
+ # Using a different execute template for fp flavoured loads.
+ # In this specific template the memacc_code is executed
+ # conditionally depending of wether the memory load has
+ # generated any fault
+ if flavor == "fp":
+ self.fullExecTemplate = eval(self.execBase + 'FpExecute')
+
def buildEACode(self):
# Address computation code
eaCode = ""
diff --git a/src/arch/arm/isa/templates/mem64.isa b/src/arch/arm/isa/templates/mem64.isa
index bb8594cfe..fd796698d 100644
--- a/src/arch/arm/isa/templates/mem64.isa
+++ b/src/arch/arm/isa/templates/mem64.isa
@@ -1,6 +1,6 @@
// -*- mode:c++ -*-
-// Copyright (c) 2011-2014, 2017 ARM Limited
+// Copyright (c) 2011-2014, 2017, 2019 ARM Limited
// All rights reserved
//
// The license below extends only to copyright in the software and shall
@@ -70,6 +70,30 @@ def template Load64Execute {{
}
}};
+def template Load64FpExecute {{
+ Fault %(class_name)s::execute(ExecContext *xc,
+ Trace::InstRecord *traceData) const
+ {
+ Addr EA;
+ Fault fault = NoFault;
+
+ %(op_decl)s;
+ %(op_rd)s;
+ %(ea_code)s;
+
+ if (fault == NoFault) {
+ fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
+ }
+
+ if (fault == NoFault) {
+ %(memacc_code)s;
+ %(op_wb)s;
+ }
+
+ return fault;
+ }
+}};
+
def template Store64Execute {{
Fault %(class_name)s::execute(ExecContext *xc,
Trace::InstRecord *traceData) const