summaryrefslogtreecommitdiff
path: root/src/arch/arm/isa/templates
diff options
context:
space:
mode:
authorARM gem5 Developers <none@none>2014-01-24 15:29:34 -0600
committerARM gem5 Developers <none@none>2014-01-24 15:29:34 -0600
commit612f8f074fa1099cf70faf495d46cc647762a031 (patch)
treebd1e99c43bf15292395eadd4b7ae3f5c823545c3 /src/arch/arm/isa/templates
parentf3585c841e964c98911784a187fc4f081a02a0a6 (diff)
downloadgem5-612f8f074fa1099cf70faf495d46cc647762a031.tar.xz
arm: Add support for ARMv8 (AArch64 & AArch32)
Note: AArch64 and AArch32 interworking is not supported. If you use an AArch64 kernel you are restricted to AArch64 user-mode binaries. This will be addressed in a later patch. Note: Virtualization is only supported in AArch32 mode. This will also be fixed in a later patch. Contributors: Giacomo Gabrielli (TrustZone, LPAE, system-level AArch64, AArch64 NEON, validation) Thomas Grocutt (AArch32 Virtualization, AArch64 FP, validation) Mbou Eyole (AArch64 NEON, validation) Ali Saidi (AArch64 Linux support, code integration, validation) Edmund Grimley-Evans (AArch64 FP) William Wang (AArch64 Linux support) Rene De Jong (AArch64 Linux support, performance opt.) Matt Horsnell (AArch64 MP, validation) Matt Evans (device models, code integration, validation) Chris Adeniyi-Jones (AArch64 syscall-emulation) Prakash Ramrakhyani (validation) Dam Sunwoo (validation) Chander Sudanthi (validation) Stephan Diestelhorst (validation) Andreas Hansson (code integration, performance opt.) Eric Van Hensbergen (performance opt.) Gabe Black
Diffstat (limited to 'src/arch/arm/isa/templates')
-rw-r--r--src/arch/arm/isa/templates/basic.isa19
-rw-r--r--src/arch/arm/isa/templates/branch64.isa141
-rw-r--r--src/arch/arm/isa/templates/data64.isa279
-rw-r--r--src/arch/arm/isa/templates/macromem.isa126
-rw-r--r--src/arch/arm/isa/templates/mem.isa22
-rw-r--r--src/arch/arm/isa/templates/mem64.isa686
-rw-r--r--src/arch/arm/isa/templates/misc.isa154
-rw-r--r--src/arch/arm/isa/templates/misc64.isa91
-rw-r--r--src/arch/arm/isa/templates/neon.isa24
-rw-r--r--src/arch/arm/isa/templates/neon64.isa527
-rw-r--r--src/arch/arm/isa/templates/templates.isa13
-rw-r--r--src/arch/arm/isa/templates/vfp.isa105
-rw-r--r--src/arch/arm/isa/templates/vfp64.isa140
13 files changed, 2310 insertions, 17 deletions
diff --git a/src/arch/arm/isa/templates/basic.isa b/src/arch/arm/isa/templates/basic.isa
index b3878b89a..de4506e05 100644
--- a/src/arch/arm/isa/templates/basic.isa
+++ b/src/arch/arm/isa/templates/basic.isa
@@ -1,5 +1,17 @@
// -*- mode:c++ -*-
+// Copyright (c) 2011 ARM Limited
+// All rights reserved
+//
+// The license below extends only to copyright in the software and shall
+// not be construed as granting a license to any other intellectual
+// property including but not limited to intellectual property relating
+// to a hardware implementation of the functionality of the software
+// licensed hereunder. You may use the software subject to the license
+// terms below provided that you ensure that this notice is replicated
+// unmodified and in its entirety in all distributions of the software,
+// modified or unmodified, in source code or in binary form.
+//
// Copyright (c) 2007-2008 The Florida State University
// All rights reserved.
//
@@ -60,6 +72,13 @@ def template BasicConstructor {{
}
}};
+def template BasicConstructor64 {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst) : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
+ {
+ %(constructor)s;
+ }
+}};
+
// Basic instruction class execute method template.
def template BasicExecute {{
diff --git a/src/arch/arm/isa/templates/branch64.isa b/src/arch/arm/isa/templates/branch64.isa
new file mode 100644
index 000000000..84b3e6ae7
--- /dev/null
+++ b/src/arch/arm/isa/templates/branch64.isa
@@ -0,0 +1,141 @@
+// -*- mode:c++ -*-
+
+// Copyright (c) 2011 ARM Limited
+// All rights reserved
+//
+// The license below extends only to copyright in the software and shall
+// not be construed as granting a license to any other intellectual
+// property including but not limited to intellectual property relating
+// to a hardware implementation of the functionality of the software
+// licensed hereunder. You may use the software subject to the license
+// terms below provided that you ensure that this notice is replicated
+// unmodified and in its entirety in all distributions of the software,
+// modified or unmodified, in source code or in binary form.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met: redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer;
+// redistributions in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution;
+// neither the name of the copyright holders nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: Gabe Black
+
+def template BranchImm64Declare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, int64_t _imm);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template BranchImm64Constructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ int64_t _imm)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _imm)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template BranchImmCond64Declare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, int64_t _imm,
+ ConditionCode _condCode);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template BranchImmCond64Constructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ int64_t _imm,
+ ConditionCode _condCode)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _imm, _condCode)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template BranchReg64Declare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _op1);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template BranchReg64Constructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _op1)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _op1)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template BranchImmReg64Declare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst,
+ int64_t imm, IntRegIndex _op1);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template BranchImmReg64Constructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ int64_t _imm,
+ IntRegIndex _op1)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _imm, _op1)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template BranchImmImmReg64Declare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, int64_t _imm1, int64_t _imm2,
+ IntRegIndex _op1);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template BranchImmImmReg64Constructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ int64_t _imm1, int64_t _imm2,
+ IntRegIndex _op1)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _imm1, _imm2, _op1)
+ {
+ %(constructor)s;
+ }
+}};
diff --git a/src/arch/arm/isa/templates/data64.isa b/src/arch/arm/isa/templates/data64.isa
new file mode 100644
index 000000000..b6f7ce8d0
--- /dev/null
+++ b/src/arch/arm/isa/templates/data64.isa
@@ -0,0 +1,279 @@
+// -*- mode:c++ -*-
+
+// Copyright (c) 2011 ARM Limited
+// All rights reserved
+//
+// The license below extends only to copyright in the software and shall
+// not be construed as granting a license to any other intellectual
+// property including but not limited to intellectual property relating
+// to a hardware implementation of the functionality of the software
+// licensed hereunder. You may use the software subject to the license
+// terms below provided that you ensure that this notice is replicated
+// unmodified and in its entirety in all distributions of the software,
+// modified or unmodified, in source code or in binary form.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met: redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer;
+// redistributions in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution;
+// neither the name of the copyright holders nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: Gabe Black
+
+def template DataXImmDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
+ IntRegIndex _op1, uint64_t _imm);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template DataXImmConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest,
+ IntRegIndex _op1,
+ uint64_t _imm)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _imm)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template DataXSRegDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
+ IntRegIndex _op1, IntRegIndex _op2,
+ int32_t _shiftAmt, ArmShiftType _shiftType);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template DataXSRegConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest,
+ IntRegIndex _op1,
+ IntRegIndex _op2,
+ int32_t _shiftAmt,
+ ArmShiftType _shiftType)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _op2, _shiftAmt, _shiftType)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template DataXERegDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
+ IntRegIndex _op1, IntRegIndex _op2,
+ ArmExtendType _extendType, int32_t _shiftAmt);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template DataXERegConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest,
+ IntRegIndex _op1,
+ IntRegIndex _op2,
+ ArmExtendType _extendType,
+ int32_t _shiftAmt)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _op2, _extendType, _shiftAmt)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template DataX1RegDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
+ IntRegIndex _op1);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template DataX1RegConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest,
+ IntRegIndex _op1)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _op1)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template DataX2RegDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
+ IntRegIndex _op1, IntRegIndex _op2);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template DataX2RegConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest,
+ IntRegIndex _op1,
+ IntRegIndex _op2)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _op2)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template DataX2RegImmDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
+ IntRegIndex _op1, IntRegIndex _op2, uint64_t _imm);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template DataX2RegImmConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest,
+ IntRegIndex _op1,
+ IntRegIndex _op2,
+ uint64_t _imm)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _op2, _imm)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template DataX3RegDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
+ IntRegIndex _op1, IntRegIndex _op2, IntRegIndex _op3);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template DataX3RegConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest,
+ IntRegIndex _op1,
+ IntRegIndex _op2,
+ IntRegIndex _op3)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _op2, _op3)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template DataXCondCompImmDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _op1,
+ uint64_t _imm, ConditionCode _condCode, uint8_t _defCc);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template DataXCondCompImmConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _op1,
+ uint64_t _imm,
+ ConditionCode _condCode,
+ uint8_t _defCc)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _op1, _imm, _condCode, _defCc)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template DataXCondCompRegDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _op1,
+ IntRegIndex _op2, ConditionCode _condCode,
+ uint8_t _defCc);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template DataXCondCompRegConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _op1,
+ IntRegIndex _op2,
+ ConditionCode _condCode,
+ uint8_t _defCc)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _op1, _op2, _condCode, _defCc)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template DataXCondSelDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
+ IntRegIndex _op1, IntRegIndex _op2,
+ ConditionCode _condCode);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template DataXCondSelConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest,
+ IntRegIndex _op1,
+ IntRegIndex _op2,
+ ConditionCode _condCode)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _op2, _condCode)
+ {
+ %(constructor)s;
+ }
+}};
diff --git a/src/arch/arm/isa/templates/macromem.isa b/src/arch/arm/isa/templates/macromem.isa
index 195204a95..465090660 100644
--- a/src/arch/arm/isa/templates/macromem.isa
+++ b/src/arch/arm/isa/templates/macromem.isa
@@ -1,6 +1,6 @@
// -*- mode:c++ -*-
-// Copyright (c) 2010 ARM Limited
+// Copyright (c) 2010-2013 ARM Limited
// All rights reserved
//
// The license below extends only to copyright in the software and shall
@@ -338,6 +338,18 @@ def template MicroIntImmConstructor {{
}
}};
+def template MicroIntImmXConstructor {{
+ %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ RegIndex _ura,
+ RegIndex _urb,
+ int32_t _imm)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _ura, _urb, _imm)
+ {
+ %(constructor)s;
+ }
+}};
+
def template MicroIntRegDeclare {{
class %(class_name)s : public %(base_class)s
{
@@ -349,6 +361,28 @@ def template MicroIntRegDeclare {{
};
}};
+def template MicroIntXERegConstructor {{
+ %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ RegIndex _ura, RegIndex _urb, RegIndex _urc,
+ ArmExtendType _type, uint32_t _shiftAmt)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _ura, _urb, _urc, _type, _shiftAmt)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template MicroIntXERegDeclare {{
+ class %(class_name)s : public %(base_class)s
+ {
+ public:
+ %(class_name)s(ExtMachInst machInst,
+ RegIndex _ura, RegIndex _urb, RegIndex _urc,
+ ArmExtendType _type, uint32_t _shiftAmt);
+ %(BasicExecDeclare)s
+ };
+}};
+
def template MicroIntRegConstructor {{
%(class_name)s::%(class_name)s(ExtMachInst machInst,
RegIndex _ura, RegIndex _urb, RegIndex _urc,
@@ -402,6 +436,96 @@ def template MacroMemConstructor {{
}};
+def template BigFpMemImmDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(const char *mnemonic, ExtMachInst machInst,
+ bool load, IntRegIndex dest, IntRegIndex base, int64_t imm);
+ %(BasicExecPanic)s
+};
+}};
+
+def template BigFpMemImmConstructor {{
+%(class_name)s::%(class_name)s(const char *mnemonic, ExtMachInst machInst,
+ bool load, IntRegIndex dest, IntRegIndex base, int64_t imm)
+ : %(base_class)s(mnemonic, machInst, %(op_class)s, load, dest, base, imm)
+{
+ %(constructor)s;
+}
+}};
+
+def template BigFpMemRegDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(const char *mnemonic, ExtMachInst machInst,
+ bool load, IntRegIndex dest, IntRegIndex base,
+ IntRegIndex offset, ArmExtendType type, int64_t imm);
+ %(BasicExecPanic)s
+};
+}};
+
+def template BigFpMemRegConstructor {{
+%(class_name)s::%(class_name)s(const char *mnemonic, ExtMachInst machInst,
+ bool load, IntRegIndex dest, IntRegIndex base,
+ IntRegIndex offset, ArmExtendType type, int64_t imm)
+ : %(base_class)s(mnemonic, machInst, %(op_class)s, load, dest, base,
+ offset, type, imm)
+{
+ %(constructor)s;
+}
+}};
+
+def template BigFpMemLitDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(const char *mnemonic, ExtMachInst machInst,
+ IntRegIndex dest, int64_t imm);
+ %(BasicExecPanic)s
+};
+}};
+
+def template BigFpMemLitConstructor {{
+%(class_name)s::%(class_name)s(const char *mnemonic, ExtMachInst machInst,
+ IntRegIndex dest, int64_t imm)
+ : %(base_class)s(mnemonic, machInst, %(op_class)s, dest, imm)
+{
+ %(constructor)s;
+}
+}};
+
+def template PairMemDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(const char *mnemonic, ExtMachInst machInst,
+ uint32_t size, bool fp, bool load, bool noAlloc, bool signExt,
+ bool exclusive, bool acrel, uint32_t imm,
+ AddrMode mode, IntRegIndex rn, IntRegIndex rt,
+ IntRegIndex rt2);
+ %(BasicExecPanic)s
+};
+}};
+
+def template PairMemConstructor {{
+%(class_name)s::%(class_name)s(const char *mnemonic, ExtMachInst machInst,
+ uint32_t size, bool fp, bool load, bool noAlloc, bool signExt,
+ bool exclusive, bool acrel, uint32_t imm, AddrMode mode,
+ IntRegIndex rn, IntRegIndex rt, IntRegIndex rt2)
+ : %(base_class)s(mnemonic, machInst, %(op_class)s, size,
+ fp, load, noAlloc, signExt, exclusive, acrel,
+ imm, mode, rn, rt, rt2)
+{
+ %(constructor)s;
+}
+}};
+
def template VMemMultDeclare {{
class %(class_name)s : public %(base_class)s
{
diff --git a/src/arch/arm/isa/templates/mem.isa b/src/arch/arm/isa/templates/mem.isa
index 871378f3f..7682c277d 100644
--- a/src/arch/arm/isa/templates/mem.isa
+++ b/src/arch/arm/isa/templates/mem.isa
@@ -1,6 +1,6 @@
// -*- mode:c++ -*-
-// Copyright (c) 2010 ARM Limited
+// Copyright (c) 2010, 2012 ARM Limited
// All rights reserved
//
// The license below extends only to copyright in the software and shall
@@ -697,6 +697,11 @@ def template LoadStoreImmDeclare {{
%(InitiateAccDeclare)s
%(CompleteAccDeclare)s
+
+ virtual void
+ annotateFault(ArmFault *fault) {
+ %(fa_code)s
+ }
};
}};
@@ -763,6 +768,11 @@ def template StoreRegDeclare {{
%(InitiateAccDeclare)s
%(CompleteAccDeclare)s
+
+ virtual void
+ annotateFault(ArmFault *fault) {
+ %(fa_code)s
+ }
};
}};
@@ -808,6 +818,11 @@ def template LoadRegDeclare {{
%(InitiateAccDeclare)s
%(CompleteAccDeclare)s
+
+ virtual void
+ annotateFault(ArmFault *fault) {
+ %(fa_code)s
+ }
};
}};
@@ -828,6 +843,11 @@ def template LoadImmDeclare {{
%(InitiateAccDeclare)s
%(CompleteAccDeclare)s
+
+ virtual void
+ annotateFault(ArmFault *fault) {
+ %(fa_code)s
+ }
};
}};
diff --git a/src/arch/arm/isa/templates/mem64.isa b/src/arch/arm/isa/templates/mem64.isa
new file mode 100644
index 000000000..87dcba988
--- /dev/null
+++ b/src/arch/arm/isa/templates/mem64.isa
@@ -0,0 +1,686 @@
+// -*- mode:c++ -*-
+
+// Copyright (c) 2011-2013 ARM Limited
+// All rights reserved
+//
+// The license below extends only to copyright in the software and shall
+// not be construed as granting a license to any other intellectual
+// property including but not limited to intellectual property relating
+// to a hardware implementation of the functionality of the software
+// licensed hereunder. You may use the software subject to the license
+// terms below provided that you ensure that this notice is replicated
+// unmodified and in its entirety in all distributions of the software,
+// modified or unmodified, in source code or in binary form.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met: redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer;
+// redistributions in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution;
+// neither the name of the copyright holders nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: Gabe Black
+
+let {{
+ SPAlignmentCheckCode = '''
+ if (baseIsSP && bits(XBase, 3, 0) &&
+ SPAlignmentCheckEnabled(xc->tcBase())) {
+ return new SPAlignmentFault();
+ }
+ '''
+}};
+
+def template Load64Execute {{
+ Fault %(class_name)s::execute(%(CPU_exec_context)s *xc,
+ Trace::InstRecord *traceData) const
+ {
+ Addr EA;
+ Fault fault = NoFault;
+
+ %(op_decl)s;
+ %(op_rd)s;
+ %(ea_code)s;
+
+ if (fault == NoFault) {
+ fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
+ %(memacc_code)s;
+ }
+
+ if (fault == NoFault) {
+ %(op_wb)s;
+ }
+
+ return fault;
+ }
+}};
+
+def template Store64Execute {{
+ Fault %(class_name)s::execute(%(CPU_exec_context)s *xc,
+ Trace::InstRecord *traceData) const
+ {
+ Addr EA;
+ Fault fault = NoFault;
+
+ %(op_decl)s;
+ %(op_rd)s;
+ %(ea_code)s;
+
+ if (fault == NoFault) {
+ %(memacc_code)s;
+ }
+
+ if (fault == NoFault) {
+ fault = writeMemAtomic(xc, traceData, Mem, EA,
+ memAccessFlags, NULL);
+ }
+
+ if (fault == NoFault) {
+ %(op_wb)s;
+ }
+
+ return fault;
+ }
+}};
+
+def template Store64InitiateAcc {{
+ Fault %(class_name)s::initiateAcc(%(CPU_exec_context)s *xc,
+ Trace::InstRecord *traceData) const
+ {
+ Addr EA;
+ Fault fault = NoFault;
+
+ %(op_decl)s;
+ %(op_rd)s;
+ %(ea_code)s;
+
+ if (fault == NoFault) {
+ %(memacc_code)s;
+ }
+
+ if (fault == NoFault) {
+ fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
+ NULL);
+ }
+
+ return fault;
+ }
+}};
+
+def template StoreEx64Execute {{
+ Fault %(class_name)s::execute(%(CPU_exec_context)s *xc,
+ Trace::InstRecord *traceData) const
+ {
+ Addr EA;
+ Fault fault = NoFault;
+
+ %(op_decl)s;
+ %(op_rd)s;
+ %(ea_code)s;
+
+ if (fault == NoFault) {
+ %(memacc_code)s;
+ }
+
+ uint64_t writeResult = 0;
+ if (fault == NoFault) {
+ fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
+ &writeResult);
+ }
+
+ if (fault == NoFault) {
+ %(postacc_code)s;
+ }
+
+ if (fault == NoFault) {
+ %(op_wb)s;
+ }
+
+ return fault;
+ }
+}};
+
+def template StoreEx64InitiateAcc {{
+ Fault %(class_name)s::initiateAcc(%(CPU_exec_context)s *xc,
+ Trace::InstRecord *traceData) const
+ {
+ Addr EA;
+ Fault fault = NoFault;
+
+ %(op_decl)s;
+ %(op_rd)s;
+ %(ea_code)s;
+
+ if (fault == NoFault) {
+ %(memacc_code)s;
+ }
+
+ if (fault == NoFault) {
+ fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
+ NULL);
+ }
+
+ return fault;
+ }
+}};
+
+def template Load64InitiateAcc {{
+ Fault %(class_name)s::initiateAcc(%(CPU_exec_context)s *xc,
+ Trace::InstRecord *traceData) const
+ {
+ Addr EA;
+ Fault fault = NoFault;
+
+ %(op_src_decl)s;
+ %(op_rd)s;
+ %(ea_code)s;
+
+ if (fault == NoFault) {
+ fault = readMemTiming(xc, traceData, EA, Mem, memAccessFlags);
+ }
+
+ return fault;
+ }
+}};
+
+def template Load64CompleteAcc {{
+ Fault %(class_name)s::completeAcc(PacketPtr pkt,
+ %(CPU_exec_context)s *xc,
+ Trace::InstRecord *traceData) const
+ {
+ Fault fault = NoFault;
+
+ %(op_decl)s;
+ %(op_rd)s;
+
+ // ARM instructions will not have a pkt if the predicate is false
+ getMem(pkt, Mem, traceData);
+
+ if (fault == NoFault) {
+ %(memacc_code)s;
+ }
+
+ if (fault == NoFault) {
+ %(op_wb)s;
+ }
+
+ return fault;
+ }
+}};
+
+def template Store64CompleteAcc {{
+ Fault %(class_name)s::completeAcc(PacketPtr pkt,
+ %(CPU_exec_context)s *xc,
+ Trace::InstRecord *traceData) const
+ {
+ return NoFault;
+ }
+}};
+
+def template StoreEx64CompleteAcc {{
+ Fault %(class_name)s::completeAcc(PacketPtr pkt,
+ %(CPU_exec_context)s *xc,
+ Trace::InstRecord *traceData) const
+ {
+ Fault fault = NoFault;
+
+ %(op_decl)s;
+ %(op_rd)s;
+
+ uint64_t writeResult = pkt->req->getExtraData();
+ %(postacc_code)s;
+
+ if (fault == NoFault) {
+ %(op_wb)s;
+ }
+
+ return fault;
+ }
+}};
+
+def template DCStore64Declare {{
+ class %(class_name)s : public %(base_class)s
+ {
+ public:
+
+ /// Constructor.
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm);
+
+ %(BasicExecDeclare)s
+ %(InitiateAccDeclare)s
+ %(CompleteAccDeclare)s
+
+ virtual void
+ annotateFault(ArmFault *fault) {
+ %(fa_code)s
+ }
+ };
+}};
+
+def template DCStore64Constructor {{
+ %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ (IntRegIndex)_base, _dest, _imm)
+ {
+ %(constructor)s;
+ assert(!%(use_uops)d);
+ }
+}};
+
+def template DCStore64Execute {{
+ Fault %(class_name)s::execute(%(CPU_exec_context)s *xc,
+ Trace::InstRecord *traceData) const
+ {
+ Addr EA;
+ Fault fault = NoFault;
+
+ %(op_decl)s;
+ %(op_rd)s;
+ %(ea_code)s;
+
+
+ if (fault == NoFault) {
+ %(memacc_code)s;
+ }
+
+ if (fault == NoFault) {
+ fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
+ }
+
+ if (fault == NoFault) {
+ %(op_wb)s;
+ }
+
+ return fault;
+ }
+}};
+
+def template DCStore64InitiateAcc {{
+ Fault %(class_name)s::initiateAcc(%(CPU_exec_context)s *xc,
+ Trace::InstRecord *traceData) const
+ {
+ Addr EA;
+ Fault fault = NoFault;
+
+ %(op_decl)s;
+ %(op_rd)s;
+ %(ea_code)s;
+
+ if (fault == NoFault) {
+ %(memacc_code)s;
+ }
+
+ if (fault == NoFault) {
+ fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
+ }
+
+ return fault;
+ }
+}};
+
+
+def template LoadStoreImm64Declare {{
+ class %(class_name)s : public %(base_class)s
+ {
+ public:
+
+ /// Constructor.
+ %(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
+
+ %(BasicExecDeclare)s
+ %(InitiateAccDeclare)s
+ %(CompleteAccDeclare)s
+
+ virtual void
+ annotateFault(ArmFault *fault) {
+ %(fa_code)s
+ }
+ };
+}};
+
+def template LoadStoreImmU64Declare {{
+ class %(class_name)s : public %(base_class)s
+ {
+ public:
+
+ /// Constructor.
+ %(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
+ bool noAlloc = false, bool exclusive = false,
+ bool acrel = false);
+
+ %(BasicExecDeclare)s
+ %(InitiateAccDeclare)s
+ %(CompleteAccDeclare)s
+
+ virtual void
+ annotateFault(ArmFault *fault) {
+ %(fa_code)s
+ }
+ };
+}};
+
+def template LoadStoreImmDU64Declare {{
+ class %(class_name)s : public %(base_class)s
+ {
+ public:
+
+ /// Constructor.
+ %(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
+ int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
+ bool acrel = false);
+
+ %(BasicExecDeclare)s
+ %(InitiateAccDeclare)s
+ %(CompleteAccDeclare)s
+
+ virtual void
+ annotateFault(ArmFault *fault) {
+ %(fa_code)s
+ }
+ };
+}};
+
+def template StoreImmDEx64Declare {{
+ /**
+ * Static instruction class for "%(mnemonic)s".
+ */
+ class %(class_name)s : public %(base_class)s
+ {
+ public:
+
+ /// Constructor.
+ %(class_name)s(ExtMachInst machInst,
+ IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
+ IntRegIndex _base, int64_t _imm = 0);
+
+ %(BasicExecDeclare)s
+
+ %(InitiateAccDeclare)s
+
+ %(CompleteAccDeclare)s
+ };
+}};
+
+
+def template LoadStoreReg64Declare {{
+ class %(class_name)s : public %(base_class)s
+ {
+ public:
+
+ /// Constructor.
+ %(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
+ ArmExtendType _type, uint32_t _shiftAmt);
+
+ %(BasicExecDeclare)s
+ %(InitiateAccDeclare)s
+ %(CompleteAccDeclare)s
+
+ virtual void
+ annotateFault(ArmFault *fault) {
+ %(fa_code)s
+ }
+ };
+}};
+
+def template LoadStoreRegU64Declare {{
+ class %(class_name)s : public %(base_class)s
+ {
+ public:
+
+ /// Constructor.
+ %(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
+ ArmExtendType _type, uint32_t _shiftAmt,
+ bool noAlloc = false, bool exclusive = false,
+ bool acrel = false);
+
+ %(BasicExecDeclare)s
+ %(InitiateAccDeclare)s
+ %(CompleteAccDeclare)s
+
+ virtual void
+ annotateFault(ArmFault *fault) {
+ %(fa_code)s
+ }
+ };
+}};
+
+def template LoadStoreRaw64Declare {{
+ class %(class_name)s : public %(base_class)s
+ {
+ public:
+
+ /// Constructor.
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
+ IntRegIndex _base);
+
+ %(BasicExecDeclare)s
+ %(InitiateAccDeclare)s
+ %(CompleteAccDeclare)s
+
+ virtual void
+ annotateFault(ArmFault *fault) {
+ %(fa_code)s
+ }
+ };
+}};
+
+def template LoadStoreEx64Declare {{
+ class %(class_name)s : public %(base_class)s
+ {
+ public:
+
+ /// Constructor.
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
+ IntRegIndex _base, IntRegIndex _result);
+
+ %(BasicExecDeclare)s
+ %(InitiateAccDeclare)s
+ %(CompleteAccDeclare)s
+
+ virtual void
+ annotateFault(ArmFault *fault) {
+ %(fa_code)s
+ }
+ };
+}};
+
+def template LoadStoreLit64Declare {{
+ class %(class_name)s : public %(base_class)s
+ {
+ public:
+
+ /// Constructor.
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
+
+ %(BasicExecDeclare)s
+ %(InitiateAccDeclare)s
+ %(CompleteAccDeclare)s
+
+ virtual void
+ annotateFault(ArmFault *fault) {
+ %(fa_code)s
+ }
+ };
+}};
+
+def template LoadStoreLitU64Declare {{
+ class %(class_name)s : public %(base_class)s
+ {
+ public:
+
+ /// Constructor.
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
+ bool noAlloc = false, bool exclusive = false,
+ bool acrel = false);
+
+ %(BasicExecDeclare)s
+ %(InitiateAccDeclare)s
+ %(CompleteAccDeclare)s
+
+ virtual void
+ annotateFault(ArmFault *fault) {
+ %(fa_code)s
+ }
+ };
+}};
+
+def template LoadStoreImm64Constructor {{
+ %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ (IntRegIndex)_dest, (IntRegIndex)_base, _imm)
+ {
+ %(constructor)s;
+#if %(use_uops)d
+ assert(numMicroops >= 2);
+ uops = new StaticInstPtr[numMicroops];
+ uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
+ uops[0]->setDelayedCommit();
+ uops[1] = new %(wb_decl)s;
+ uops[1]->setLastMicroop();
+#endif
+ }
+}};
+
+def template LoadStoreImmU64Constructor {{
+ %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
+ bool noAlloc, bool exclusive, bool acrel)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _base, _imm)
+ {
+ %(constructor)s;
+ assert(!%(use_uops)d);
+ setExcAcRel(exclusive, acrel);
+ }
+}};
+
+def template LoadStoreImmDU64Constructor {{
+ %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
+ int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _dest2, _base, _imm)
+ {
+ %(constructor)s;
+ assert(!%(use_uops)d);
+ setExcAcRel(exclusive, acrel);
+ }
+}};
+
+def template StoreImmDEx64Constructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
+ IntRegIndex _base, int64_t _imm)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _result, _dest, _dest2, _base, _imm)
+ {
+ %(constructor)s;
+ assert(!%(use_uops)d);
+ }
+}};
+
+
+def template LoadStoreReg64Constructor {{
+ %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
+ ArmExtendType _type, uint32_t _shiftAmt)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _base, _offset, _type, _shiftAmt)
+ {
+ %(constructor)s;
+#if %(use_uops)d
+ assert(numMicroops >= 2);
+ uops = new StaticInstPtr[numMicroops];
+ uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
+ _type, _shiftAmt);
+ uops[0]->setDelayedCommit();
+ uops[1] = new %(wb_decl)s;
+ uops[1]->setLastMicroop();
+#endif
+ }
+}};
+
+def template LoadStoreRegU64Constructor {{
+ %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
+ ArmExtendType _type, uint32_t _shiftAmt,
+ bool noAlloc, bool exclusive, bool acrel)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _base, _offset, _type, _shiftAmt)
+ {
+ %(constructor)s;
+ assert(!%(use_uops)d);
+ setExcAcRel(exclusive, acrel);
+ }
+}};
+
+def template LoadStoreRaw64Constructor {{
+ %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _base)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template LoadStoreEx64Constructor {{
+ %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _base, _result)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template LoadStoreLit64Constructor {{
+ %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, int64_t _imm)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ (IntRegIndex)_dest, _imm)
+ {
+ %(constructor)s;
+#if %(use_uops)d
+ assert(numMicroops >= 2);
+ uops = new StaticInstPtr[numMicroops];
+ uops[0] = new %(acc_name)s(machInst, _dest, _imm);
+ uops[0]->setDelayedCommit();
+ uops[1] = new %(wb_decl)s;
+ uops[1]->setLastMicroop();
+#endif
+ }
+}};
+
+def template LoadStoreLitU64Constructor {{
+ %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, int64_t _imm,
+ bool noAlloc, bool exclusive, bool acrel)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ (IntRegIndex)_dest, _imm)
+ {
+ %(constructor)s;
+ assert(!%(use_uops)d);
+ setExcAcRel(exclusive, acrel);
+ }
+}};
diff --git a/src/arch/arm/isa/templates/misc.isa b/src/arch/arm/isa/templates/misc.isa
index 212897aa0..36db5b6c2 100644
--- a/src/arch/arm/isa/templates/misc.isa
+++ b/src/arch/arm/isa/templates/misc.isa
@@ -1,6 +1,6 @@
// -*- mode:c++ -*-
-// Copyright (c) 2010 ARM Limited
+// Copyright (c) 2010-2013 ARM Limited
// All rights reserved
//
// The license below extends only to copyright in the software and shall
@@ -62,6 +62,69 @@ def template MrsConstructor {{
}
}};
+def template MrsBankedRegDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ protected:
+ uint8_t byteMask;
+ bool r;
+
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
+ uint8_t _sysM, bool _r);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template MrsBankedRegConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest,
+ uint8_t _sysM,
+ bool _r)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest),
+ byteMask(_sysM), r(_r)
+ {
+ %(constructor)s;
+ if (!(condCode == COND_AL || condCode == COND_UC)) {
+ for (int x = 0; x < _numDestRegs; x++) {
+ _srcRegIdx[_numSrcRegs++] = _destRegIdx[x];
+ }
+ }
+ }
+}};
+
+def template MsrBankedRegDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ protected:
+ bool r;
+
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _op1,
+ uint8_t _sysM, bool _r);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template MsrBankedRegConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _op1,
+ uint8_t _sysM,
+ bool _r)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _op1, _sysM),
+ r(_r)
+ {
+ %(constructor)s;
+ if (!(condCode == COND_AL || condCode == COND_UC)) {
+ for (int x = 0; x < _numDestRegs; x++) {
+ _srcRegIdx[_numSrcRegs++] = _destRegIdx[x];
+ }
+ }
+ }
+}};
+
def template MsrRegDeclare {{
class %(class_name)s : public %(base_class)s
{
@@ -114,6 +177,66 @@ def template MsrImmConstructor {{
}
}};
+def template MrrcOpDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ protected:
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _op1,
+ IntRegIndex _dest, IntRegIndex _dest2, uint32_t imm);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template MrrcOpConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex op1,
+ IntRegIndex dest,
+ IntRegIndex dest2,
+ uint32_t imm)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, op1, dest,
+ dest2, imm)
+ {
+ %(constructor)s;
+ if (!(condCode == COND_AL || condCode == COND_UC)) {
+ for (int x = 0; x < _numDestRegs; x++) {
+ _srcRegIdx[_numSrcRegs++] = _destRegIdx[x];
+ }
+ }
+ }
+}};
+
+def template McrrOpDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ protected:
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, IntRegIndex _op1, IntRegIndex _op2,
+ IntRegIndex _dest, uint32_t imm);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template McrrOpConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex op1,
+ IntRegIndex op2,
+ IntRegIndex dest,
+ uint32_t imm)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, op1, op2,
+ dest, imm)
+ {
+ %(constructor)s;
+ if (!(condCode == COND_AL || condCode == COND_UC)) {
+ for (int x = 0; x < _numDestRegs; x++) {
+ _srcRegIdx[_numSrcRegs++] = _destRegIdx[x];
+ }
+ }
+ }
+}};
+
def template ImmOpDeclare {{
class %(class_name)s : public %(base_class)s
{
@@ -310,6 +433,35 @@ def template RegRegImmOpConstructor {{
}
}};
+def template RegImmImmOpDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ protected:
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, uint64_t _imm1, uint64_t _imm2);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template RegImmImmOpConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest,
+ uint64_t _imm1,
+ uint64_t _imm2)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _imm1, _imm2)
+ {
+ %(constructor)s;
+ if (!(condCode == COND_AL || condCode == COND_UC)) {
+ for (int x = 0; x < _numDestRegs; x++) {
+ _srcRegIdx[_numSrcRegs++] = _destRegIdx[x];
+ }
+ }
+ }
+}};
+
def template RegRegImmImmOpDeclare {{
class %(class_name)s : public %(base_class)s
{
diff --git a/src/arch/arm/isa/templates/misc64.isa b/src/arch/arm/isa/templates/misc64.isa
new file mode 100644
index 000000000..09d3d4470
--- /dev/null
+++ b/src/arch/arm/isa/templates/misc64.isa
@@ -0,0 +1,91 @@
+// -*- mode:c++ -*-
+
+// Copyright (c) 2011 ARM Limited
+// All rights reserved
+//
+// The license below extends only to copyright in the software and shall
+// not be construed as granting a license to any other intellectual
+// property including but not limited to intellectual property relating
+// to a hardware implementation of the functionality of the software
+// licensed hereunder. You may use the software subject to the license
+// terms below provided that you ensure that this notice is replicated
+// unmodified and in its entirety in all distributions of the software,
+// modified or unmodified, in source code or in binary form.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met: redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer;
+// redistributions in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution;
+// neither the name of the copyright holders nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: Gabe Black
+
+def template RegRegImmImmOp64Declare {{
+class %(class_name)s : public %(base_class)s
+{
+ protected:
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _op1,
+ uint64_t _imm1, uint64_t _imm2);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template RegRegImmImmOp64Constructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest,
+ IntRegIndex _op1,
+ uint64_t _imm1,
+ uint64_t _imm2)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _imm1, _imm2)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template RegRegRegImmOp64Declare {{
+class %(class_name)s : public %(base_class)s
+{
+ protected:
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _op1,
+ IntRegIndex _op2, uint64_t _imm);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template RegRegRegImmOp64Constructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest,
+ IntRegIndex _op1,
+ IntRegIndex _op2,
+ uint64_t _imm)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _op2, _imm)
+ {
+ %(constructor)s;
+ }
+}};
+
diff --git a/src/arch/arm/isa/templates/neon.isa b/src/arch/arm/isa/templates/neon.isa
index 573d245b8..ffa6b53d4 100644
--- a/src/arch/arm/isa/templates/neon.isa
+++ b/src/arch/arm/isa/templates/neon.isa
@@ -1,6 +1,6 @@
// -*- mode:c++ -*-
-// Copyright (c) 2010 ARM Limited
+// Copyright (c) 2010-2012 ARM Limited
// All rights reserved
//
// The license below extends only to copyright in the software and shall
@@ -39,8 +39,26 @@
let {{
simdEnabledCheckCode = '''
- if (!neonEnabled(Cpacr, Cpsr, Fpexc))
- return disabledFault();
+ {
+ uint32_t issEnCheck;
+ bool trapEnCheck;
+ uint32_t seq;
+ if (!vfpNeonEnabled(seq, Hcptr, Nsacr, Cpacr, Cpsr, issEnCheck,
+ trapEnCheck, xc->tcBase(), Fpexc, true))
+ {return disabledFault();}
+ if (trapEnCheck) {
+ CPSR cpsrEnCheck = Cpsr;
+ if (cpsrEnCheck.mode == MODE_HYP) {
+ return new UndefinedInstruction(machInst, issEnCheck,
+ EC_TRAPPED_HCPTR);
+ } else {
+ if (!inSecureState(Scr, Cpsr)) {
+ return new HypervisorTrap(machInst, issEnCheck,
+ EC_TRAPPED_HCPTR);
+ }
+ }
+ }
+ }
'''
}};
diff --git a/src/arch/arm/isa/templates/neon64.isa b/src/arch/arm/isa/templates/neon64.isa
new file mode 100644
index 000000000..d20e4e653
--- /dev/null
+++ b/src/arch/arm/isa/templates/neon64.isa
@@ -0,0 +1,527 @@
+// -*- mode: c++ -*-
+
+// Copyright (c) 2012-2013 ARM Limited
+// All rights reserved
+//
+// The license below extends only to copyright in the software and shall
+// not be construed as granting a license to any other intellectual
+// property including but not limited to intellectual property relating
+// to a hardware implementation of the functionality of the software
+// licensed hereunder. You may use the software subject to the license
+// terms below provided that you ensure that this notice is replicated
+// unmodified and in its entirety in all distributions of the software,
+// modified or unmodified, in source code or in binary form.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met: redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer;
+// redistributions in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution;
+// neither the name of the copyright holders nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: Mbou Eyole
+// Giacomo Gabrielli
+
+let {{
+ simd64EnabledCheckCode = vfp64EnabledCheckCode
+}};
+
+def template NeonX2RegOpDeclare {{
+template <class _Element>
+class %(class_name)s : public %(base_class)s
+{
+ protected:
+ typedef _Element Element;
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _op1, IntRegIndex _op2)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _op2)
+ {
+ %(constructor)s;
+ }
+
+ %(BasicExecDeclare)s
+};
+}};
+
+def template NeonX2RegImmOpDeclare {{
+template <class _Element>
+class %(class_name)s : public %(base_class)s
+{
+ protected:
+ typedef _Element Element;
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _op1, IntRegIndex _op2,
+ uint64_t _imm)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _op2, _imm)
+ {
+ %(constructor)s;
+ }
+
+ %(BasicExecDeclare)s
+};
+}};
+
+def template NeonX1RegOpDeclare {{
+template <class _Element>
+class %(class_name)s : public %(base_class)s
+{
+ protected:
+ typedef _Element Element;
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _op1)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1)
+ {
+ %(constructor)s;
+ }
+
+ %(BasicExecDeclare)s
+};
+}};
+
+def template NeonX1RegImmOpDeclare {{
+template <class _Element>
+class %(class_name)s : public %(base_class)s
+{
+ protected:
+ typedef _Element Element;
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _op1, uint64_t _imm)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _imm)
+ {
+ %(constructor)s;
+ }
+
+ %(BasicExecDeclare)s
+};
+}};
+
+def template NeonX1Reg2ImmOpDeclare {{
+template <class _Element>
+class %(class_name)s : public %(base_class)s
+{
+ protected:
+ typedef _Element Element;
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _op1, uint64_t _imm1,
+ uint64_t _imm2)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _imm1, _imm2)
+ {
+ %(constructor)s;
+ }
+
+ %(BasicExecDeclare)s
+};
+}};
+
+def template NeonX1RegImmOnlyOpDeclare {{
+template <class _Element>
+class %(class_name)s : public %(base_class)s
+{
+ protected:
+ typedef _Element Element;
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, uint64_t _imm)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _imm)
+ {
+ %(constructor)s;
+ }
+
+ %(BasicExecDeclare)s
+};
+}};
+
+def template NeonXExecDeclare {{
+ template
+ Fault %(class_name)s<%(targs)s>::execute(
+ %(CPU_exec_context)s *, Trace::InstRecord *) const;
+}};
+
+def template NeonXEqualRegOpExecute {{
+ template <class Element>
+ Fault %(class_name)s<Element>::execute(%(CPU_exec_context)s *xc,
+ Trace::InstRecord *traceData) const
+ {
+ Fault fault = NoFault;
+ %(op_decl)s;
+ %(op_rd)s;
+
+ const unsigned rCount = %(r_count)d;
+ const unsigned eCount = rCount * sizeof(FloatRegBits) / sizeof(Element);
+ const unsigned eCountFull = 4 * sizeof(FloatRegBits) / sizeof(Element);
+
+ union RegVect {
+ FloatRegBits regs[rCount];
+ Element elements[eCount];
+ };
+
+ union FullRegVect {
+ FloatRegBits regs[4];
+ Element elements[eCountFull];
+ };
+
+ %(code)s;
+ if (fault == NoFault)
+ {
+ %(op_wb)s;
+ }
+
+ return fault;
+ }
+}};
+
+def template NeonXUnequalRegOpExecute {{
+ template <class Element>
+ Fault %(class_name)s<Element>::execute(%(CPU_exec_context)s *xc,
+ Trace::InstRecord *traceData) const
+ {
+ typedef typename bigger_type_t<Element>::type BigElement;
+ Fault fault = NoFault;
+ %(op_decl)s;
+ %(op_rd)s;
+
+ const unsigned rCount = %(r_count)d;
+ const unsigned eCount = rCount * sizeof(FloatRegBits) / sizeof(Element);
+ const unsigned eCountFull = 4 * sizeof(FloatRegBits) / sizeof(Element);
+
+ union RegVect {
+ FloatRegBits regs[rCount];
+ Element elements[eCount];
+ BigElement bigElements[eCount / 2];
+ };
+
+ union BigRegVect {
+ FloatRegBits regs[2 * rCount];
+ BigElement elements[eCount];
+ };
+
+ union FullRegVect {
+ FloatRegBits regs[4];
+ Element elements[eCountFull];
+ };
+
+ %(code)s;
+ if (fault == NoFault)
+ {
+ %(op_wb)s;
+ }
+
+ return fault;
+ }
+}};
+
+def template MicroNeonMemDeclare64 {{
+ class %(class_name)s : public %(base_class)s
+ {
+ protected:
+ // True if the base register is SP (used for SP alignment checking)
+ bool baseIsSP;
+ // Access size in bytes
+ uint8_t accSize;
+ // Vector element size (0 -> 8-bit, 1 -> 16-bit, 2 -> 32-bit,
+ // 3 -> 64-bit)
+ uint8_t eSize;
+
+ public:
+ %(class_name)s(ExtMachInst machInst, RegIndex _dest, RegIndex _ura,
+ uint32_t _imm, unsigned extraMemFlags, bool _baseIsSP,
+ uint8_t _accSize, uint8_t _eSize)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest,
+ _ura, _imm),
+ baseIsSP(_baseIsSP), accSize(_accSize), eSize(_eSize)
+ {
+ memAccessFlags |= extraMemFlags;
+ %(constructor)s;
+ }
+
+ %(BasicExecDeclare)s
+ %(InitiateAccDeclare)s
+ %(CompleteAccDeclare)s
+ };
+}};
+
+def template NeonLoadExecute64 {{
+ Fault %(class_name)s::execute(
+ %(CPU_exec_context)s *xc, Trace::InstRecord *traceData) const
+ {
+ Addr EA;
+ Fault fault = NoFault;
+
+ %(op_decl)s;
+ %(mem_decl)s;
+ %(op_rd)s;
+ %(ea_code)s;
+
+ MemUnion memUnion;
+ uint8_t *dataPtr = memUnion.bytes;
+
+ if (fault == NoFault) {
+ fault = xc->readMem(EA, dataPtr, accSize, memAccessFlags);
+ %(memacc_code)s;
+ }
+
+ if (fault == NoFault) {
+ %(op_wb)s;
+ }
+
+ return fault;
+ }
+}};
+
+def template NeonLoadInitiateAcc64 {{
+ Fault %(class_name)s::initiateAcc(
+ %(CPU_exec_context)s *xc, Trace::InstRecord *traceData) const
+ {
+ Addr EA;
+ Fault fault = NoFault;
+
+ %(op_decl)s;
+ %(mem_decl)s;
+ %(op_rd)s;
+ %(ea_code)s;
+
+ MemUnion memUnion;
+ uint8_t *dataPtr = memUnion.bytes;
+
+ if (fault == NoFault) {
+ fault = xc->readMem(EA, dataPtr, accSize, memAccessFlags);
+ }
+
+ return fault;
+ }
+}};
+
+def template NeonLoadCompleteAcc64 {{
+ Fault %(class_name)s::completeAcc(
+ PacketPtr pkt, %(CPU_exec_context)s *xc,
+ Trace::InstRecord *traceData) const
+ {
+ Fault fault = NoFault;
+
+ %(mem_decl)s;
+ %(op_decl)s;
+ %(op_rd)s;
+
+ MemUnion &memUnion = *(MemUnion *)pkt->getPtr<uint8_t>();
+
+ if (fault == NoFault) {
+ %(memacc_code)s;
+ }
+
+ if (fault == NoFault) {
+ %(op_wb)s;
+ }
+
+ return fault;
+ }
+}};
+
+def template NeonStoreExecute64 {{
+ Fault %(class_name)s::execute(
+ %(CPU_exec_context)s *xc, Trace::InstRecord *traceData) const
+ {
+ Addr EA;
+ Fault fault = NoFault;
+
+ %(op_decl)s;
+ %(mem_decl)s;
+ %(op_rd)s;
+ %(ea_code)s;
+
+ MemUnion memUnion;
+ uint8_t *dataPtr = memUnion.bytes;
+
+ if (fault == NoFault) {
+ %(memacc_code)s;
+ }
+
+ if (fault == NoFault) {
+ fault = xc->writeMem(dataPtr, accSize, EA, memAccessFlags,
+ NULL);
+ }
+
+ if (fault == NoFault) {
+ %(op_wb)s;
+ }
+
+ return fault;
+ }
+}};
+
+def template NeonStoreInitiateAcc64 {{
+ Fault %(class_name)s::initiateAcc(
+ %(CPU_exec_context)s *xc, Trace::InstRecord *traceData) const
+ {
+ Addr EA;
+ Fault fault = NoFault;
+
+ %(op_decl)s;
+ %(mem_decl)s;
+ %(op_rd)s;
+ %(ea_code)s;
+
+ MemUnion memUnion;
+ if (fault == NoFault) {
+ %(memacc_code)s;
+ }
+
+ if (fault == NoFault) {
+ fault = xc->writeMem(memUnion.bytes, accSize, EA, memAccessFlags,
+ NULL);
+ }
+
+ return fault;
+ }
+}};
+
+def template NeonStoreCompleteAcc64 {{
+ Fault %(class_name)s::completeAcc(
+ PacketPtr pkt, %(CPU_exec_context)s *xc,
+ Trace::InstRecord *traceData) const
+ {
+ return NoFault;
+ }
+}};
+
+def template VMemMultDeclare64 {{
+ class %(class_name)s : public %(base_class)s
+ {
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, RegIndex rn, RegIndex vd,
+ RegIndex rm, uint8_t eSize, uint8_t dataSize,
+ uint8_t numStructElems, uint8_t numRegs, bool wb);
+ %(BasicExecPanic)s
+ };
+}};
+
+def template VMemSingleDeclare64 {{
+ class %(class_name)s : public %(base_class)s
+ {
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst, RegIndex rn, RegIndex vd,
+ RegIndex rm, uint8_t eSize, uint8_t dataSize,
+ uint8_t numStructElems, uint8_t index, bool wb,
+ bool replicate = false);
+ %(BasicExecPanic)s
+ };
+}};
+
+def template VMemMultConstructor64 {{
+ %(class_name)s::%(class_name)s(
+ ExtMachInst machInst, RegIndex rn, RegIndex vd, RegIndex rm,
+ uint8_t _eSize, uint8_t _dataSize, uint8_t _numStructElems,
+ uint8_t _numRegs, bool _wb) :
+ %(base_class)s(
+ "%(mnemonic)s", machInst, %(op_class)s, rn, vd, rm,
+ _eSize, _dataSize, _numStructElems, _numRegs, _wb)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template VMemSingleConstructor64 {{
+ %(class_name)s::%(class_name)s(
+ ExtMachInst machInst, RegIndex rn, RegIndex vd, RegIndex rm,
+ uint8_t _eSize, uint8_t _dataSize, uint8_t _numStructElems,
+ uint8_t _index, bool _wb, bool _replicate) :
+ %(base_class)s(
+ "%(mnemonic)s", machInst, %(op_class)s, rn, vd, rm,
+ _eSize, _dataSize, _numStructElems, _index, _wb,
+ _replicate)
+ {
+ %(constructor)s;
+ }
+}};
+
+def template MicroNeonMixDeclare64 {{
+ class %(class_name)s : public %(base_class)s
+ {
+ public:
+ %(class_name)s(ExtMachInst machInst, RegIndex _dest, RegIndex _op1,
+ uint8_t _eSize, uint8_t _dataSize,
+ uint8_t _numStructElems, uint8_t _numRegs,
+ uint8_t _step) :
+ %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _eSize, _dataSize, _numStructElems,
+ _numRegs, _step)
+ {
+ %(constructor)s;
+ }
+
+ %(BasicExecDeclare)s
+ };
+}};
+
+def template MicroNeonMixLaneDeclare64 {{
+ class %(class_name)s : public %(base_class)s
+ {
+ public:
+ %(class_name)s(ExtMachInst machInst, RegIndex _dest, RegIndex _op1,
+ uint8_t _eSize, uint8_t _dataSize,
+ uint8_t _numStructElems, uint8_t _lane, uint8_t _step,
+ bool _replicate = false) :
+ %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _eSize, _dataSize, _numStructElems,
+ _lane, _step, _replicate)
+ {
+ %(constructor)s;
+ }
+
+ %(BasicExecDeclare)s
+ };
+}};
+
+def template MicroNeonMixExecute64 {{
+ Fault %(class_name)s::execute(%(CPU_exec_context)s *xc,
+ Trace::InstRecord *traceData) const
+ {
+ Fault fault = NoFault;
+ uint64_t resTemp = 0;
+ resTemp = resTemp;
+ %(op_decl)s;
+ %(op_rd)s;
+
+ %(code)s;
+ if (fault == NoFault)
+ {
+ %(op_wb)s;
+ }
+
+ return fault;
+ }
+}};
diff --git a/src/arch/arm/isa/templates/templates.isa b/src/arch/arm/isa/templates/templates.isa
index 148139225..2263cdff4 100644
--- a/src/arch/arm/isa/templates/templates.isa
+++ b/src/arch/arm/isa/templates/templates.isa
@@ -1,6 +1,6 @@
// -*- mode:c++ -*-
-// Copyright (c) 2010 ARM Limited
+// Copyright (c) 2010-2011 ARM Limited
// All rights reserved
//
// The license below extends only to copyright in the software and shall
@@ -40,26 +40,37 @@
//Basic instruction templates
##include "basic.isa"
+//Templates for AArch64 bit data instructions.
+##include "data64.isa"
+
//Templates for predicated instructions
##include "pred.isa"
//Templates for memory instructions
##include "mem.isa"
+//Templates for AArch64 memory instructions
+##include "mem64.isa"
+
//Miscellaneous instructions that don't fit elsewhere
##include "misc.isa"
+##include "misc64.isa"
//Templates for microcoded memory instructions
##include "macromem.isa"
//Templates for branches
##include "branch.isa"
+##include "branch64.isa"
//Templates for multiplies
##include "mult.isa"
//Templates for VFP instructions
##include "vfp.isa"
+##include "vfp64.isa"
//Templates for Neon instructions
##include "neon.isa"
+
+##include "neon64.isa"
diff --git a/src/arch/arm/isa/templates/vfp.isa b/src/arch/arm/isa/templates/vfp.isa
index 90dd751ff..176b6604c 100644
--- a/src/arch/arm/isa/templates/vfp.isa
+++ b/src/arch/arm/isa/templates/vfp.isa
@@ -1,6 +1,6 @@
// -*- mode:c++ -*-
-// Copyright (c) 2010 ARM Limited
+// Copyright (c) 2010-2013 ARM Limited
// All rights reserved
//
// The license below extends only to copyright in the software and shall
@@ -39,32 +39,117 @@
let {{
vfpEnabledCheckCode = '''
- if (!vfpEnabled(Cpacr, Cpsr, Fpexc))
- return disabledFault();
+ uint32_t issEnCheck;
+ bool trapEnCheck;
+ uint32_t seq;
+ if (!vfpNeonEnabled(seq,Hcptr, Nsacr, Cpacr, Cpsr, issEnCheck,
+ trapEnCheck, xc->tcBase(), Fpexc))
+ {return disabledFault();}
+ if (trapEnCheck) {
+ CPSR cpsrEnCheck = Cpsr;
+ if (cpsrEnCheck.mode == MODE_HYP) {
+ return new UndefinedInstruction(machInst, issEnCheck,
+ EC_TRAPPED_HCPTR);
+ } else {
+ if (!inSecureState(Scr, Cpsr)) {
+ return new HypervisorTrap(machInst, issEnCheck,
+ EC_TRAPPED_HCPTR);
+ }
+ }
+ }
+ '''
+
+ vfp64EnabledCheckCode = '''
+ CPSR cpsrEnCheck = Cpsr;
+ ExceptionLevel el = (ExceptionLevel) (uint8_t) cpsrEnCheck.el;
+ if (!vfpNeon64Enabled(Cpacr64, el))
+ return new SupervisorTrap(machInst, 0x1E00000,
+ EC_TRAPPED_SIMD_FP);
+
+ if (ArmSystem::haveVirtualization(xc->tcBase()) && el <= EL2) {
+ HCPTR cptrEnCheck = xc->tcBase()->readMiscReg(MISCREG_CPTR_EL2);
+ if (cptrEnCheck.tfp)
+ return new HypervisorTrap(machInst, 0x1E00000,
+ EC_TRAPPED_SIMD_FP);
+ }
+
+ if (ArmSystem::haveSecurity(xc->tcBase())) {
+ HCPTR cptrEnCheck = xc->tcBase()->readMiscReg(MISCREG_CPTR_EL3);
+ if (cptrEnCheck.tfp)
+ return new SecureMonitorTrap(machInst, 0x1E00000,
+ EC_TRAPPED_SIMD_FP);
+ }
'''
vmsrEnabledCheckCode = '''
- if (!vfpEnabled(Cpacr, Cpsr))
+ uint32_t issEnCheck;
+ bool trapEnCheck;
+ uint32_t seq;
+ if (!vfpNeonEnabled(seq,Hcptr, Nsacr, Cpacr, Cpsr, issEnCheck,
+ trapEnCheck, xc->tcBase()))
if (dest != (int)MISCREG_FPEXC && dest != (int)MISCREG_FPSID)
- return disabledFault();
+ {return disabledFault();}
if (!inPrivilegedMode(Cpsr))
if (dest != (int)MISCREG_FPSCR)
return disabledFault();
-
+ if (trapEnCheck) {
+ CPSR cpsrEnCheck = Cpsr;
+ if (cpsrEnCheck.mode == MODE_HYP) {
+ return new UndefinedInstruction(machInst, issEnCheck,
+ EC_TRAPPED_HCPTR);
+ } else {
+ if (!inSecureState(Scr, Cpsr)) {
+ return new HypervisorTrap(machInst, issEnCheck,
+ EC_TRAPPED_HCPTR);
+ }
+ }
+ }
'''
vmrsEnabledCheckCode = '''
- if (!vfpEnabled(Cpacr, Cpsr))
+ uint32_t issEnCheck;
+ bool trapEnCheck;
+ uint32_t seq;
+ if (!vfpNeonEnabled(seq,Hcptr, Nsacr, Cpacr, Cpsr, issEnCheck,
+ trapEnCheck, xc->tcBase()))
if (op1 != (int)MISCREG_FPEXC && op1 != (int)MISCREG_FPSID &&
op1 != (int)MISCREG_MVFR0 && op1 != (int)MISCREG_MVFR1)
- return disabledFault();
+ {return disabledFault();}
if (!inPrivilegedMode(Cpsr))
if (op1 != (int)MISCREG_FPSCR)
return disabledFault();
+ if (trapEnCheck) {
+ CPSR cpsrEnCheck = Cpsr;
+ if (cpsrEnCheck.mode == MODE_HYP) {
+ return new UndefinedInstruction(machInst, issEnCheck,
+ EC_TRAPPED_HCPTR);
+ } else {
+ if (!inSecureState(Scr, Cpsr)) {
+ return new HypervisorTrap(machInst, issEnCheck,
+ EC_TRAPPED_HCPTR);
+ }
+ }
+ }
'''
vmrsApsrEnabledCheckCode = '''
- if (!vfpEnabled(Cpacr, Cpsr))
- return disabledFault();
+ uint32_t issEnCheck;
+ bool trapEnCheck;
+ uint32_t seq;
+ if (!vfpNeonEnabled(seq,Hcptr, Nsacr, Cpacr, Cpsr, issEnCheck,
+ trapEnCheck, xc->tcBase()))
+ {return disabledFault();}
+ if (trapEnCheck) {
+ CPSR cpsrEnCheck = Cpsr;
+ if (cpsrEnCheck.mode == MODE_HYP) {
+ return new UndefinedInstruction(machInst, issEnCheck,
+ EC_TRAPPED_HCPTR);
+ } else {
+ if (!inSecureState(Scr, Cpsr)) {
+ return new HypervisorTrap(machInst, issEnCheck,
+ EC_TRAPPED_HCPTR);
+ }
+ }
+ }
'''
}};
diff --git a/src/arch/arm/isa/templates/vfp64.isa b/src/arch/arm/isa/templates/vfp64.isa
new file mode 100644
index 000000000..518cedaae
--- /dev/null
+++ b/src/arch/arm/isa/templates/vfp64.isa
@@ -0,0 +1,140 @@
+// -*- mode:c++ -*-
+
+// Copyright (c) 2012 ARM Limited
+// All rights reserved
+//
+// The license below extends only to copyright in the software and shall
+// not be construed as granting a license to any other intellectual
+// property including but not limited to intellectual property relating
+// to a hardware implementation of the functionality of the software
+// licensed hereunder. You may use the software subject to the license
+// terms below provided that you ensure that this notice is replicated
+// unmodified and in its entirety in all distributions of the software,
+// modified or unmodified, in source code or in binary form.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met: redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer;
+// redistributions in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution;
+// neither the name of the copyright holders nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: Thomas Grocutt
+
+def template AA64FpRegRegOpConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _op1,
+ VfpMicroMode mode)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, mode)
+ {
+ %(constructor)s;
+ for (int x = 0; x < _numDestRegs; x++) {
+ _srcRegIdx[_numSrcRegs++] = _destRegIdx[x];
+ }
+ }
+}};
+
+def template AA64FpRegRegOpConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _op1,
+ VfpMicroMode mode)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, mode)
+ {
+ %(constructor)s;
+ for (int x = 0; x < _numDestRegs; x++) {
+ _srcRegIdx[_numSrcRegs++] = _destRegIdx[x];
+ }
+ }
+}};
+
+def template AA64FpRegImmOpConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, uint64_t _imm, VfpMicroMode mode)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _imm, mode)
+ {
+ %(constructor)s;
+ for (int x = 0; x < _numDestRegs; x++) {
+ _srcRegIdx[_numSrcRegs++] = _destRegIdx[x];
+ }
+ }
+}};
+
+def template AA64FpRegRegImmOpConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest,
+ IntRegIndex _op1,
+ uint64_t _imm,
+ VfpMicroMode mode)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _imm, mode)
+ {
+ %(constructor)s;
+ for (int x = 0; x < _numDestRegs; x++) {
+ _srcRegIdx[_numSrcRegs++] = _destRegIdx[x];
+ }
+ }
+}};
+
+def template AA64FpRegRegRegOpConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest,
+ IntRegIndex _op1,
+ IntRegIndex _op2,
+ VfpMicroMode mode)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _op2, mode)
+ {
+ %(constructor)s;
+ for (int x = 0; x < _numDestRegs; x++) {
+ _srcRegIdx[_numSrcRegs++] = _destRegIdx[x];
+ }
+ }
+}};
+
+def template AA64FpRegRegRegRegOpDeclare {{
+class %(class_name)s : public %(base_class)s
+{
+ public:
+ // Constructor
+ %(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest, IntRegIndex _op1, IntRegIndex _op2,
+ IntRegIndex _op3, VfpMicroMode mode = VfpNotAMicroop);
+ %(BasicExecDeclare)s
+};
+}};
+
+def template AA64FpRegRegRegRegOpConstructor {{
+ inline %(class_name)s::%(class_name)s(ExtMachInst machInst,
+ IntRegIndex _dest,
+ IntRegIndex _op1,
+ IntRegIndex _op2,
+ IntRegIndex _op3,
+ VfpMicroMode mode)
+ : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
+ _dest, _op1, _op2, _op3, mode)
+ {
+ %(constructor)s;
+ for (int x = 0; x < _numDestRegs; x++) {
+ _srcRegIdx[_numSrcRegs++] = _destRegIdx[x];
+ }
+ }
+}};