summaryrefslogtreecommitdiff
path: root/ArmPkg
diff options
context:
space:
mode:
authorandrewfish <andrewfish@6f19259b-4bc3-4df7-8a09-765794883524>2010-02-19 18:51:10 +0000
committerandrewfish <andrewfish@6f19259b-4bc3-4df7-8a09-765794883524>2010-02-19 18:51:10 +0000
commit98bc0c8c056271095ae2a3a9ab7f2c3ccd64117e (patch)
tree7aec4b8d6c212f1aad09a4282502330ccd580dc6 /ArmPkg
parent752d258a42349bf5895efced6c1be1dd5cdfae66 (diff)
downloadedk2-platforms-98bc0c8c056271095ae2a3a9ab7f2c3ccd64117e.tar.xz
Sync gcc with armasm. update some memory barriers.
git-svn-id: https://edk2.svn.sourceforge.net/svnroot/edk2/trunk/edk2@10025 6f19259b-4bc3-4df7-8a09-765794883524
Diffstat (limited to 'ArmPkg')
-rw-r--r--ArmPkg/Drivers/CpuDxe/CpuDxe.inf4
-rw-r--r--ArmPkg/Drivers/CpuDxe/ExceptionSupport.ARMv6.S281
-rwxr-xr-xArmPkg/Drivers/CpuDxe/ExceptionSupport.ARMv6.asm2
-rw-r--r--ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.S27
-rw-r--r--ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.asm71
-rw-r--r--ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.c130
-rw-r--r--ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.h1
-rw-r--r--ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S91
-rw-r--r--ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm96
-rw-r--r--ArmPkg/Library/ArmLib/Common/ArmLibPrivate.h20
10 files changed, 570 insertions, 153 deletions
diff --git a/ArmPkg/Drivers/CpuDxe/CpuDxe.inf b/ArmPkg/Drivers/CpuDxe/CpuDxe.inf
index d88d488fad..add7ae5067 100644
--- a/ArmPkg/Drivers/CpuDxe/CpuDxe.inf
+++ b/ArmPkg/Drivers/CpuDxe/CpuDxe.inf
@@ -32,13 +32,13 @@
# Prior to ARMv6 we have multiple stacks, one per mode
#
# ExceptionSupport.asm | RVCT
- ExceptionSupport.S | GCC
+# ExceptionSupport.S | GCC
#
# ARMv6 or later uses a single stack via srs/stm instructions
#
ExceptionSupport.ARMv6.asm | RVCT
-# ExceptionSupport.ARMv6.S | GCC
+ ExceptionSupport.ARMv6.S | GCC
Mmu.c
diff --git a/ArmPkg/Drivers/CpuDxe/ExceptionSupport.ARMv6.S b/ArmPkg/Drivers/CpuDxe/ExceptionSupport.ARMv6.S
new file mode 100644
index 0000000000..646aca76a4
--- /dev/null
+++ b/ArmPkg/Drivers/CpuDxe/ExceptionSupport.ARMv6.S
@@ -0,0 +1,281 @@
+#------------------------------------------------------------------------------
+#
+# Use ARMv6 instruction to operate on a single stack
+#
+# Copyright (c) 2008-2010 Apple Inc. All rights reserved.
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+#------------------------------------------------------------------------------
+
+/*
+
+This is the stack constructed by the exception handler (low address to high address)
+ # R0 - IFAR is EFI_SYSTEM_CONTEXT for ARM
+ Reg Offset
+ === ======
+ R0 0x00 # stmfd SP!,{R0-R12}
+ R1 0x04
+ R2 0x08
+ R3 0x0c
+ R4 0x10
+ R5 0x14
+ R6 0x18
+ R7 0x1c
+ R8 0x20
+ R9 0x24
+ R10 0x28
+ R11 0x2c
+ R12 0x30
+ SP 0x34 # reserved via adding 0x20 (32) to the SP
+ LR 0x38
+ PC 0x3c
+ CPSR 0x40
+ DFSR 0x44
+ DFAR 0x48
+ IFSR 0x4c
+ IFAR 0x50
+
+ LR 0x54 # SVC Link register (we need to restore it)
+
+ LR 0x58 # pushed by srsfd
+ CPSR 0x5c
+
+ */
+
+
+.globl ASM_PFX(ExceptionHandlersStart)
+.globl ASM_PFX(ExceptionHandlersEnd)
+.globl ASM_PFX(CommonExceptionEntry)
+.globl ASM_PFX(AsmCommonExceptionEntry)
+.globl ASM_PFX(CommonCExceptionHandler)
+
+.text
+.align 3
+
+
+//
+// This code gets copied to the ARM vector table
+// ExceptionHandlersStart - ExceptionHandlersEnd gets copied
+//
+ASM_PFX(ExceptionHandlersStart):
+
+ASM_PFX(Reset):
+ b ASM_PFX(ResetEntry)
+
+ASM_PFX(UndefinedInstruction):
+ b ASM_PFX(UndefinedInstructionEntry)
+
+ASM_PFX(SoftwareInterrupt):
+ b ASM_PFX(SoftwareInterruptEntry)
+
+ASM_PFX(PrefetchAbort):
+ b ASM_PFX(PrefetchAbortEntry)
+
+ASM_PFX(DataAbort):
+ b ASM_PFX(DataAbortEntry)
+
+ASM_PFX(ReservedException):
+ b ASM_PFX(ReservedExceptionEntry)
+
+ASM_PFX(Irq):
+ b ASM_PFX(IrqEntry)
+
+ASM_PFX(Fiq):
+ b ASM_PFX(FiqEntry)
+
+ASM_PFX(ResetEntry):
+ srsdb #0x13! @ Store return state on SVC stack
+ @ We are already in SVC mode
+
+ stmfd SP!,{LR} @ Store the link register for the current mode
+ sub SP,SP,#0x20 @ Save space for SP, LR, PC, IFAR - CPSR
+ stmfd SP!,{R0-R12} @ Store the register state
+
+ mov R0,#0 @ ExceptionType
+ ldr R1,ASM_PFX(CommonExceptionEntry)
+ bx R1
+
+ASM_PFX(UndefinedInstructionEntry):
+ sub LR, LR, #4 @ Only -2 for Thumb, adjust in CommonExceptionEntry
+ srsdb #0x13! @ Store return state on SVC stack
+ cps #0x13 @ Switch to SVC for common stack
+ stmfd SP!,{LR} @ Store the link register for the current mode
+ sub SP,SP,#0x20 @ Save space for SP, LR, PC, IFAR - CPSR
+ stmfd SP!,{R0-R12} @ Store the register state
+
+ mov R0,#1 @ ExceptionType
+ ldr R1,ASM_PFX(CommonExceptionEntry)
+ bx R1
+
+ASM_PFX(SoftwareInterruptEntry):
+ sub LR, LR, #4 @ Only -2 for Thumb, adjust in CommonExceptionEntry
+ srsdb #0x13! @ Store return state on SVC stack
+ @ We are already in SVC mode
+ stmfd SP!,{LR} @ Store the link register for the current mode
+ sub SP,SP,#0x20 @ Save space for SP, LR, PC, IFAR - CPSR
+ stmfd SP!,{R0-R12} @ Store the register state
+
+ mov R0,#2 @ ExceptionType
+ ldr R1,ASM_PFX(CommonExceptionEntry)
+ bx R1
+
+ASM_PFX(PrefetchAbortEntry):
+ sub LR,LR,#4
+ srsdb #0x13! @ Store return state on SVC stack
+ cps #0x13 @ Switch to SVC for common stack
+ stmfd SP!,{LR} @ Store the link register for the current mode
+ sub SP,SP,#0x20 @ Save space for SP, LR, PC, IFAR - CPSR
+ stmfd SP!,{R0-R12} @ Store the register state
+
+ mov R0,#3 @ ExceptionType
+ ldr R1,ASM_PFX(CommonExceptionEntry)
+ bx R1
+
+ASM_PFX(DataAbortEntry):
+ sub LR,LR,#8
+ srsdb #0x13! @ Store return state on SVC stack
+ cps #0x13 @ Switch to SVC for common stack
+ stmfd SP!,{LR} @ Store the link register for the current mode
+ sub SP,SP,#0x20 @ Save space for SP, LR, PC, IFAR - CPSR
+ stmfd SP!,{R0-R12} @ Store the register state
+
+ mov R0,#4
+ ldr R1,ASM_PFX(CommonExceptionEntry)
+ bx R1
+
+ASM_PFX(ReservedExceptionEntry):
+ srsdb #0x13! @ Store return state on SVC stack
+ cps #0x13 @ Switch to SVC for common stack
+ stmfd SP!,{LR} @ Store the link register for the current mode
+ sub SP,SP,#0x20 @ Save space for SP, LR, PC, IFAR - CPSR
+ stmfd SP!,{R0-R12} @ Store the register state
+
+ mov R0,#5
+ ldr R1,ASM_PFX(CommonExceptionEntry)
+ bx R1
+
+ASM_PFX(IrqEntry):
+ sub LR,LR,#4
+ srsdb #0x13! @ Store return state on SVC stack
+ cps #0x13 @ Switch to SVC for common stack
+ stmfd SP!,{LR} @ Store the link register for the current mode
+ sub SP,SP,#0x20 @ Save space for SP, LR, PC, IFAR - CPSR
+ stmfd SP!,{R0-R12} @ Store the register state
+
+ mov R0,#6 @ ExceptionType
+ ldr R1,ASM_PFX(CommonExceptionEntry)
+ bx R1
+
+ASM_PFX(FiqEntry):
+ sub LR,LR,#4
+ srsdb #0x13! @ Store return state on SVC stack
+ cps #0x13 @ Switch to SVC for common stack
+ stmfd SP!,{LR} @ Store the link register for the current mode
+ sub SP,SP,#0x20 @ Save space for SP, LR, PC, IFAR - CPSR
+ stmfd SP!,{R0-R12} @ Store the register state
+ @ Since we have already switch to SVC R8_fiq - R12_fiq
+ @ never get used or saved
+ mov R0,#7 @ ExceptionType
+ ldr R1,ASM_PFX(CommonExceptionEntry)
+ bx R1
+
+//
+// This gets patched by the C code that patches in the vector table
+//
+ASM_PFX(CommonExceptionEntry):
+ .byte 0x12
+ .byte 0x34
+ .byte 0x56
+ .byte 0x78
+
+ASM_PFX(ExceptionHandlersEnd):
+
+//
+// This code runs from CpuDxe driver loaded address. It is patched into
+// CommonExceptionEntry.
+//
+ASM_PFX(AsmCommonExceptionEntry):
+ mrc p15, 0, R1, c6, c0, 2 @ Read IFAR
+ str R1, [SP, #0x50] @ Store it in EFI_SYSTEM_CONTEXT_ARM.IFAR
+
+ mrc p15, 0, R1, c5, c0, 1 @ Read IFSR
+ str R1, [SP, #0x4c] @ Store it in EFI_SYSTEM_CONTEXT_ARM.IFSR
+
+ mrc p15, 0, R1, c6, c0, 0 @ Read DFAR
+ str R1, [SP, #0x48] @ Store it in EFI_SYSTEM_CONTEXT_ARM.DFAR
+
+ mrc p15, 0, R1, c5, c0, 0 @ Read DFSR
+ str R1, [SP, #0x44] @ Store it in EFI_SYSTEM_CONTEXT_ARM.DFSR
+
+ ldr R1, [SP, #0x5c] @ srsdb saved pre-exception CPSR on the stack
+ str R1, [SP, #0x40] @ Store it in EFI_SYSTEM_CONTEXT_ARM.CPSR
+
+ add R2, SP, #0x38 @ Make R2 point to EFI_SYSTEM_CONTEXT_ARM.LR
+ and R3, R1, #0x1f @ Check CPSR to see if User or System Mode
+ cmp R3, #0x1f @ if ((CPSR == 0x10) || (CPSR == 0x1df))
+ cmpne R3, #0x10 @
+ stmeqed R2, {lr}^ @ save unbanked lr
+ @ else
+ stmneed R2, {lr} @ save SVC lr
+
+
+ ldr R5, [SP, #0x58] @ PC is the LR pushed by srsfd
+ @ Check to see if we have to adjust for Thumb entry
+ sub r4, r0, #1 @ if (ExceptionType == 1 || ExceptionType ==2)) {
+ cmp r4, #1 @ // UND & SVC have differnt LR adjust for Thumb
+ bhi NoAdjustNeeded
+
+ tst r1, #0x20 @ if ((CPSR & T)) == T) { // Thumb Mode on entry
+ addne R5, R5, #2 @ PC += 2@
+ str R5,[SP,#0x58] @ Update LR value pused by srsfd
+
+NoAdjustNeeded:
+
+ str R5, [SP, #0x3c] @ Store it in EFI_SYSTEM_CONTEXT_ARM.PC
+
+ sub R1, SP, #0x60 @ We pused 0x60 bytes on the stack
+ str R1, [SP, #0x34] @ Store it in EFI_SYSTEM_CONTEXT_ARM.SP
+
+ @ R0 is ExceptionType
+ mov R1,SP @ R1 is SystemContext
+
+/*
+VOID
+EFIAPI
+CommonCExceptionHandler (
+ IN EFI_EXCEPTION_TYPE ExceptionType, R0
+ IN OUT EFI_SYSTEM_CONTEXT SystemContext R1
+ )
+
+*/
+ blx ASM_PFX(CommonCExceptionHandler) @ Call exception handler
+
+ ldr R1,[SP,#0x3c] @ EFI_SYSTEM_CONTEXT_ARM.PC
+ str R1,[SP,#0x58] @ Store it back to srsfd stack slot so it can be restored
+
+ ldr R1,[SP,#0x40] @ EFI_SYSTEM_CONTEXT_ARM.CPSR
+ str R1,[SP,#0x5c] @ Store it back to srsfd stack slot so it can be restored
+
+ add R3, SP, #0x54 @ Make R3 point to SVC LR saved on entry
+ add R2, SP, #0x38 @ Make R2 point to EFI_SYSTEM_CONTEXT_ARM.LR
+ and R1, R1, #0x1f @ Check to see if User or System Mode
+ cmp R1, #0x1f @ if ((CPSR == 0x10) || (CPSR == 0x1f))
+ cmpne R1, #0x10 @
+ ldmeqed R2, {lr}^ @ restore unbanked lr
+ @ else
+ ldmneed R3, {lr} @ restore SVC lr, via ldmfd SP!, {LR}
+
+ ldmfd SP!,{R0-R12} @ Restore general purpose registers
+ @ Exception handler can not change SP
+
+ add SP,SP,#0x20 @ Clear out the remaining stack space
+ ldmfd SP!,{LR} @ restore the link register for this context
+ rfefd SP! @ return from exception via srsfd stack slot
+
diff --git a/ArmPkg/Drivers/CpuDxe/ExceptionSupport.ARMv6.asm b/ArmPkg/Drivers/CpuDxe/ExceptionSupport.ARMv6.asm
index 6e960133b1..3bcca4d6aa 100755
--- a/ArmPkg/Drivers/CpuDxe/ExceptionSupport.ARMv6.asm
+++ b/ArmPkg/Drivers/CpuDxe/ExceptionSupport.ARMv6.asm
@@ -1,5 +1,7 @@
//------------------------------------------------------------------------------
//
+// Use ARMv6 instruction to operate on a single stack
+//
// Copyright (c) 2008-2010 Apple Inc. All rights reserved.
//
// All rights reserved. This program and the accompanying materials
diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.S b/ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.S
index 57d2734528..fac928af36 100644
--- a/ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.S
+++ b/ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.S
@@ -25,6 +25,9 @@
.globl ASM_PFX(ArmSetDomainAccessControl)
.globl ASM_PFX(CPSRMaskInsert)
.globl ASM_PFX(CPSRRead)
+.globl ASM_PFX(ReadCCSIDR)
+.globl ASM_PFX(ReadCLIDR)
+
#------------------------------------------------------------------------------
@@ -37,18 +40,11 @@ ASM_PFX(Cp15CacheInfo):
bx LR
ASM_PFX(ArmEnableInterrupts):
- mrs R0,CPSR
- bic R0,R0,#0x80 @Enable IRQ interrupts
- msr CPSR_c,R0
+ cpsie i
bx LR
ASM_PFX(ArmDisableInterrupts):
- mrs R0,CPSR
- orr R1,R0,#0x80 @Disable IRQ interrupts
- msr CPSR_c,R1
- tst R0,#0x80
- moveq R0,#1
- movne R0,#0
+ cpsid i
bx LR
ASM_PFX(ArmGetInterruptState):
@@ -61,10 +57,12 @@ ASM_PFX(ArmGetInterruptState):
ASM_PFX(ArmInvalidateTlb):
mov r0,#0
mcr p15,0,r0,c8,c7,0
+ isb
bx lr
ASM_PFX(ArmSetTranslationTableBaseAddress):
mcr p15,0,r0,c2,c0,0
+ isb
bx lr
ASM_PFX(ArmGetTranslationTableBaseAddress):
@@ -74,6 +72,7 @@ ASM_PFX(ArmGetTranslationTableBaseAddress):
ASM_PFX(ArmSetDomainAccessControl):
mcr p15,0,r0,c3,c0,0
+ isb
bx lr
ASM_PFX(CPSRMaskInsert): @ on entry, r0 is the mask and r1 is the field to insert
@@ -92,4 +91,14 @@ ASM_PFX(CPSRRead):
mrs r0, cpsr
bx lr
+ASM_PFX(ReadCCSIDR):
+ mcr p15,2,r0,c0,c0,0 @ Write Cache Size Selection Register (CSSELR)
+ isb
+ mrc p15,1,r0,c0,c0,0 @ Read current CP15 Cache Size ID Register (CCSIDR)
+ bx lr
+
+
+ASM_PFX(ReadCLIDR):
+ mrc p15,1,r0,c0,c0,1 @ Read CP15 Cache Level ID Register
+
ASM_FUNCTION_REMOVE_IF_UNREFERENCED
diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.asm b/ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.asm
index cf5173997c..5d3083457e 100644
--- a/ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.asm
+++ b/ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.asm
@@ -24,95 +24,55 @@
EXPORT ArmSetDomainAccessControl
EXPORT CPSRMaskInsert
EXPORT CPSRRead
+ EXPORT ReadCCSIDR
AREA ArmLibSupport, CODE, READONLY
Cp15IdCode
- DSB
- ISB
mrc p15,0,R0,c0,c0,0
- DSB
- ISB
bx LR
Cp15CacheInfo
- DSB
- ISB
mrc p15,0,R0,c0,c0,1
- DSB
- ISB
bx LR
ArmEnableInterrupts
- DSB
- ISB
- mrs R0,CPSR
- bic R0,R0,#0x80 ;Enable IRQ interrupts
- msr CPSR_c,R0
- DSB
- ISB
+ CPSIE i
bx LR
ArmDisableInterrupts
- DSB
- ISB
- mrs R0,CPSR
- orr R1,R0,#0x80 ;Disable IRQ interrupts
- msr CPSR_c,R1
- tst R0,#0x80
- moveq R0,#1
- movne R0,#0
- DSB
- ISB
+ CPSID i
bx LR
ArmGetInterruptState
- DSB
- ISB
mrs R0,CPSR
tst R0,#0x80 ;Check if IRQ is enabled.
moveq R0,#1
movne R0,#0
- DSB
- ISB
bx LR
ArmInvalidateTlb
- DSB
- ISB
mov r0,#0
mcr p15,0,r0,c8,c7,0
- DSB
ISB
bx lr
ArmSetTranslationTableBaseAddress
- DSB
- ISB
mcr p15,0,r0,c2,c0,0
- DSB
ISB
bx lr
ArmGetTranslationTableBaseAddress
- DSB
- ISB
mrc p15,0,r0,c2,c0,0
- DSB
ISB
bx lr
ArmSetDomainAccessControl
- DSB
- ISB
mcr p15,0,r0,c3,c0,0
- DSB
ISB
bx lr
CPSRMaskInsert ; on entry, r0 is the mask and r1 is the field to insert
- DSB
- ISB
stmfd sp!, {r4-r12, lr} ; save all the banked registers
mov r3, sp ; copy the stack pointer into a non-banked register
mrs r2, cpsr ; read the cpsr
@@ -120,20 +80,33 @@ CPSRMaskInsert ; on entry, r0 is the mask and r1 is the field to in
and r1, r1, r0 ; clear bits outside the mask in the input
orr r2, r2, r1 ; set field
msr cpsr_cxsf, r2 ; write back cpsr (may have caused a mode switch)
+ ISB
mov sp, r3 ; restore stack pointer
ldmfd sp!, {r4-r12, lr} ; restore registers
- DSB
- ISB
bx lr ; return (hopefully thumb-safe!)
CPSRRead
- DSB
- ISB
mrs r0, cpsr
- DSB
- ISB
bx lr
+
+// UINT32
+// ReadCCSIDR (
+// IN UINT32 CSSELR
+// )
+ReadCCSIDR
+ MCR p15,2,r0,c0,c0,0 ; Write Cache Size Selection Register (CSSELR)
+ ISB
+ MRC p15,1,<Rt>,c0,c0,0 ; Read current CP15 Cache Size ID Register (CCSIDR)
+ BX lr
+
+
+// UINT32
+// ReadCLIDR (
+// IN UINT32 CSSELR
+// )
+ReadCLIDR
+ MRC p15,1,<Rt>,c0,c0,1 ; Read CP15 Cache Level ID Register
END
diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.c b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.c
index 12ef56c5e6..464a3d7a58 100644
--- a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.c
+++ b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.c
@@ -11,13 +11,14 @@
WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
**/
-
+#include <Uefi.h>
#include <Chipset/ArmV7.h>
#include <Library/ArmLib.h>
#include <Library/BaseLib.h>
#include <Library/BaseMemoryLib.h>
#include <Library/MemoryAllocationLib.h>
#include "ArmV7Lib.h"
+#include "ArmLibPrivate.h"
VOID
FillTranslationTable (
@@ -136,7 +137,9 @@ ArmCacheArchitecture (
VOID
)
{
- return ARM_CACHE_ARCHITECTURE_SEPARATE;
+ UINT32 CLIDR = ReadCLIDR ();
+
+ return CLIDR; // BugBug Fix Me
}
BOOLEAN
@@ -145,7 +148,18 @@ ArmDataCachePresent (
VOID
)
{
- return TRUE;
+ UINT32 CLIDR = ReadCLIDR ();
+
+ if ((CLIDR & 0x2) == 0x2) {
+ // Instruction cache exists
+ return TRUE;
+ }
+ if ((CLIDR & 0x7) == 0x4) {
+ // Unified cache
+ return TRUE;
+ }
+
+ return FALSE;
}
UINTN
@@ -154,7 +168,17 @@ ArmDataCacheSize (
VOID
)
{
- return 16 * 1024;
+ UINT32 NumSets;
+ UINT32 Associativity;
+ UINT32 LineSize;
+ UINT32 CCSIDR = ReadCCSIDR (0);
+
+ LineSize = (1 << (CCSIDR + 2));
+ Associativity = ((CCSIDR >> 3) & 0x3ff) + 1;
+ NumSets = ((CCSIDR >> 13) & 0x7fff) + 1;
+
+ // LineSize is in words (4 byte chunks)
+ return NumSets * Associativity * LineSize * 4;
}
UINTN
@@ -163,7 +187,9 @@ ArmDataCacheAssociativity (
VOID
)
{
- return 4;
+ UINT32 CCSIDR = ReadCCSIDR (0);
+
+ return ((CCSIDR >> 3) & 0x3ff) + 1;
}
UINTN
@@ -171,7 +197,9 @@ ArmDataCacheSets (
VOID
)
{
- return 64;
+ UINT32 CCSIDR = ReadCCSIDR (0);
+
+ return ((CCSIDR >> 13) & 0x7fff) + 1;
}
UINTN
@@ -180,7 +208,10 @@ ArmDataCacheLineLength (
VOID
)
{
- return 64;
+ UINT32 CCSIDR = ReadCCSIDR (0) & 7;
+
+ // * 4 converts to bytes
+ return (1 << (CCSIDR + 2)) * 4;
}
BOOLEAN
@@ -189,7 +220,18 @@ ArmInstructionCachePresent (
VOID
)
{
- return TRUE;
+ UINT32 CLIDR = ReadCLIDR ();
+
+ if ((CLIDR & 1) == 1) {
+ // Instruction cache exists
+ return TRUE;
+ }
+ if ((CLIDR & 0x7) == 0x4) {
+ // Unified cache
+ return TRUE;
+ }
+
+ return FALSE;
}
UINTN
@@ -198,7 +240,17 @@ ArmInstructionCacheSize (
VOID
)
{
- return 16 * 1024;
+ UINT32 NumSets;
+ UINT32 Associativity;
+ UINT32 LineSize;
+ UINT32 CCSIDR = ReadCCSIDR (1);
+
+ LineSize = (1 << (CCSIDR + 2));
+ Associativity = ((CCSIDR >> 3) & 0x3ff) + 1;
+ NumSets = ((CCSIDR >> 13) & 0x7fff) + 1;
+
+ // LineSize is in words (4 byte chunks)
+ return NumSets * Associativity * LineSize * 4;
}
UINTN
@@ -207,55 +259,53 @@ ArmInstructionCacheAssociativity (
VOID
)
{
- return 4;
+ UINT32 CCSIDR = ReadCCSIDR (1);
+
+ return ((CCSIDR >> 3) & 0x3ff) + 1;
+// return 4;
}
UINTN
EFIAPI
+ArmInstructionCacheSets (
+ VOID
+ )
+{
+ UINT32 CCSIDR = ReadCCSIDR (1);
+
+ return ((CCSIDR >> 13) & 0x7fff) + 1;
+}
+
+UINTN
+EFIAPI
ArmInstructionCacheLineLength (
VOID
)
{
- return 64;
+ UINT32 CCSIDR = ReadCCSIDR (1) & 7;
+
+ // * 4 converts to bytes
+ return (1 << (CCSIDR + 2)) * 4;
+
+// return 64;
}
+
VOID
ArmV7DataCacheOperation (
IN ARM_V7_CACHE_OPERATION DataCacheOperation
)
{
- UINTN Set;
- UINTN SetCount;
- UINTN SetShift;
- UINTN Way;
- UINTN WayCount;
- UINTN WayShift;
- UINT32 SetWayFormat;
UINTN SavedInterruptState;
- SetCount = ArmDataCacheSets();
- WayCount = ArmDataCacheAssociativity();
+ SavedInterruptState = ArmGetInterruptState ();
- // ARMv7 Manual, System Control Coprocessor chapter
- SetShift = 6;
- WayShift = 32 - LowBitSet32 ((UINT32)WayCount);
-
- SavedInterruptState = ArmDisableInterrupts();
-
- for (Way = 0; Way < WayCount; Way++) {
- for (Set = 0; Set < SetCount; Set++) {
- // Build the format that the CP15 instruction can understand
- SetWayFormat = (Way << WayShift) | (Set << SetShift);
-
- // Pass it through
- (*DataCacheOperation)(SetWayFormat);
- }
- }
+ ArmV7AllDataCachesOperation (DataCacheOperation);
- ArmDrainWriteBuffer();
+ ArmDrainWriteBuffer ();
if (SavedInterruptState) {
- ArmEnableInterrupts();
+ ArmEnableInterrupts ();
}
}
@@ -265,7 +315,7 @@ ArmInvalidateDataCache (
VOID
)
{
- ArmV7DataCacheOperation(ArmInvalidateDataCacheEntryBySetWay);
+ ArmV7DataCacheOperation (ArmInvalidateDataCacheEntryBySetWay);
}
VOID
@@ -274,7 +324,7 @@ ArmCleanInvalidateDataCache (
VOID
)
{
- ArmV7DataCacheOperation(ArmCleanInvalidateDataCacheEntryBySetWay);
+ ArmV7DataCacheOperation (ArmCleanInvalidateDataCacheEntryBySetWay);
}
VOID
@@ -283,5 +333,5 @@ ArmCleanDataCache (
VOID
)
{
- ArmV7DataCacheOperation(ArmCleanDataCacheEntryBySetWay);
+ ArmV7DataCacheOperation (ArmCleanDataCacheEntryBySetWay);
}
diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.h b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.h
index 970b6f1e34..161f9afb34 100644
--- a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.h
+++ b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.h
@@ -15,7 +15,6 @@
#ifndef __ARM_V7_LIB_H__
#define __ARM_V7_LIB_H__
-typedef VOID (*ARM_V7_CACHE_OPERATION)(UINT32);
VOID
EFIAPI
diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S
index 90d2c4b92f..2cde8e2039 100644
--- a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S
+++ b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S
@@ -33,43 +33,58 @@
.globl ASM_PFX(ArmDisableExtendPTConfig)
.globl ASM_PFX(ArmEnableBranchPrediction)
.globl ASM_PFX(ArmDisableBranchPrediction)
+.globl ASM_PFX(ArmV7AllDataCachesOperation)
.set DC_ON, (0x1<<2)
.set IC_ON, (0x1<<12)
-.set XP_ON, (0x1<<23)
+
ASM_PFX(ArmInvalidateDataCacheEntryByMVA):
- mcr p15, 0, r0, c7, c6, 1 @invalidate single data cache line
+ mcr p15, 0, r0, c7, c6, 1 @invalidate single data cache line
+ dsb
+ isb
bx lr
ASM_PFX(ArmCleanDataCacheEntryByMVA):
mcr p15, 0, r0, c7, c10, 1 @clean single data cache line
+ dsb
+ isb
bx lr
ASM_PFX(ArmCleanInvalidateDataCacheEntryByMVA):
mcr p15, 0, r0, c7, c14, 1 @clean and invalidate single data cache line
+ dsb
+ isb
bx lr
ASM_PFX(ArmInvalidateDataCacheEntryBySetWay):
mcr p15, 0, r0, c7, c6, 2 @ Invalidate this line
+ dsb
+ isb
bx lr
ASM_PFX(ArmCleanInvalidateDataCacheEntryBySetWay):
mcr p15, 0, r0, c7, c14, 2 @ Clean and Invalidate this line
+ dsb
+ isb
bx lr
ASM_PFX(ArmCleanDataCacheEntryBySetWay):
mcr p15, 0, r0, c7, c10, 2 @ Clean this line
+ dsb
+ isb
bx lr
ASM_PFX(ArmDrainWriteBuffer):
mcr p15, 0, r0, c7, c10, 4 @ Drain write buffer for sync
+ dsb
+ isb
bx lr
@@ -77,7 +92,8 @@ ASM_PFX(ArmInvalidateInstructionCache):
mov R0,#0
mcr p15,0,R0,c7,c5,0 @Invalidate entire instruction cache
mov R0,#0
- mcr p15,0,R0,c7,c5,4 @Instruction synchronization barrier
+ dsb
+ isb
bx LR
ASM_PFX(ArmEnableMmu):
@@ -99,9 +115,8 @@ ASM_PFX(ArmDisableMmu):
bic R0,R0,#1
mcr p15,0,R0,c1,c0,0 @Disable MMU
mov R0,#0
- mcr p15,0,R0,c7,c10,4 @Data synchronization barrier
- mov R0,#0
- mcr p15,0,R0,c7,c5,4 @Instruction synchronization barrier
+ dsb
+ isb
bx LR
ASM_PFX(ArmEnableDataCache):
@@ -109,6 +124,8 @@ ASM_PFX(ArmEnableDataCache):
mrc p15,0,R0,c1,c0,0 @Read control register configuration data
orr R0,R0,R1 @Set C bit
mcr p15,0,r0,c1,c0,0 @Write control register configuration data
+ dsb
+ isb
bx LR
ASM_PFX(ArmDisableDataCache):
@@ -116,6 +133,8 @@ ASM_PFX(ArmDisableDataCache):
mrc p15,0,R0,c1,c0,0 @Read control register configuration data
bic R0,R0,R1 @Clear C bit
mcr p15,0,r0,c1,c0,0 @Write control register configuration data
+ dsb
+ isb
bx LR
ASM_PFX(ArmEnableInstructionCache):
@@ -123,6 +142,8 @@ ASM_PFX(ArmEnableInstructionCache):
mrc p15,0,R0,c1,c0,0 @Read control register configuration data
orr R0,R0,R1 @Set I bit
mcr p15,0,r0,c1,c0,0 @Write control register configuration data
+ dsb
+ isb
bx LR
ASM_PFX(ArmDisableInstructionCache):
@@ -130,18 +151,76 @@ ASM_PFX(ArmDisableInstructionCache):
mrc p15,0,R0,c1,c0,0 @Read control register configuration data
bic R0,R0,R1 @Clear I bit.
mcr p15,0,r0,c1,c0,0 @Write control register configuration data
+ dsb
+ isb
bx LR
ASM_PFX(ArmEnableBranchPrediction):
mrc p15, 0, r0, c1, c0, 0
orr r0, r0, #0x00000800
mcr p15, 0, r0, c1, c0, 0
+ dsb
+ isb
bx LR
ASM_PFX(ArmDisableBranchPrediction):
mrc p15, 0, r0, c1, c0, 0
bic r0, r0, #0x00000800
mcr p15, 0, r0, c1, c0, 0
+ dsb
+ isb
bx LR
+
+ASM_PFX(ArmV7AllDataCachesOperation):
+ stmfd SP!,{r4-r12, LR}
+ mov R1, R0 @ Save Function call in R1
+ mrc p15, 1, R6, c0, c0, 1 @ Read CLIDR
+ ands R3, R6, #0x7000000 @ Mask out all but Level of Coherency (LoC)
+ mov R3, R3, LSR #23 @ Cache level value (naturally aligned)
+ beq L_Finished
+ mov R10, #0
+
+Loop1:
+ add R2, R10, R10, LSR #1 @ Work out 3xcachelevel
+ mov R12, R6, LSR R2 @ bottom 3 bits are the Cache type for this level
+ and R12, R12, #7 @ get those 3 bits alone
+ cmp R12, #2
+ blt L_Skip @ no cache or only instruction cache at this level
+ mcr p15, 2, R10, c0, c0, 0 @ write the Cache Size selection register (CSSELR) // OR in 1 for Instruction
+ isb @ ISB to sync the change to the CacheSizeID reg
+ mcr p15, 1, R12, c0, c0, 0 @ reads current Cache Size ID register (CCSIDR)
+ and R2, R12, #0x7 @ extract the line length field
+ and R2, R2, #4 @ add 4 for the line length offset (log2 16 bytes)
+ mov R4, #0x400
+ sub R4, R4, #1
+ ands R4, R4, R12, LSR #3 @ R4 is the max number on the way size (right aligned)
+ clz R5, R4 @ R5 is the bit position of the way size increment
+ mov R7, #0x00008000
+ sub R7, R7, #1
+ ands R7, R7, R12, LSR #13 @ R7 is the max number of the index size (right aligned)
+
+Loop2:
+ mov R9, R4 @ R9 working copy of the max way size (right aligned)
+
+Loop3:
+ orr R0, R10, R9, LSL R5 @ factor in the way number and cache number into R11
+ orr R0, R0, R7, LSL R2 @ factor in the index number
+
+ blx R1
+
+ subs R9, R9, #1 @ decrement the way number
+ bge Loop3
+ subs R7, R7, #1 @ decrement the index
+ bge Loop2
+L_Skip:
+ add R10, R10, #2 @ increment the cache number
+ cmp R3, R10
+ bgt Loop1
+
+L_Finished:
+ ldmfd SP!, {r4-r12, lr}
+ bx LR
+
+
ASM_FUNCTION_REMOVE_IF_UNREFERENCED
diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm
index 7a6c3083a3..3ce5f2ecd8 100644
--- a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm
+++ b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm
@@ -32,7 +32,6 @@
DC_ON EQU ( 0x1:SHL:2 )
IC_ON EQU ( 0x1:SHL:12 )
-XP_ON EQU ( 0x1:SHL:23 )
AREA ArmCacheLib, CODE, READONLY
@@ -40,17 +39,13 @@ XP_ON EQU ( 0x1:SHL:23 )
ArmInvalidateDataCacheEntryByMVA
- DSB
- ISB
- MCR p15, 0, r0, c7, c6, 1 ; invalidate single data cache line
+ MCR p15, 0, r0, c7, c6, 1 ; invalidate single data cache line
DSB
ISB
BX lr
ArmCleanDataCacheEntryByMVA
- DSB
- ISB
MCR p15, 0, r0, c7, c10, 1 ; clean single data cache line
DSB
ISB
@@ -58,8 +53,6 @@ ArmCleanDataCacheEntryByMVA
ArmCleanInvalidateDataCacheEntryByMVA
- DSB
- ISB
MCR p15, 0, r0, c7, c14, 1 ; clean and invalidate single data cache line
DSB
ISB
@@ -67,8 +60,6 @@ ArmCleanInvalidateDataCacheEntryByMVA
ArmInvalidateDataCacheEntryBySetWay
- DSB
- ISB
mcr p15, 0, r0, c7, c6, 2 ; Invalidate this line
DSB
ISB
@@ -76,8 +67,6 @@ ArmInvalidateDataCacheEntryBySetWay
ArmCleanInvalidateDataCacheEntryBySetWay
- DSB
- ISB
mcr p15, 0, r0, c7, c14, 2 ; Clean and Invalidate this line
DSB
ISB
@@ -85,8 +74,6 @@ ArmCleanInvalidateDataCacheEntryBySetWay
ArmCleanDataCacheEntryBySetWay
- DSB
- ISB
mcr p15, 0, r0, c7, c10, 2 ; Clean this line
DSB
ISB
@@ -94,8 +81,6 @@ ArmCleanDataCacheEntryBySetWay
ArmDrainWriteBuffer
- DSB
- ISB
mcr p15, 0, r0, c7, c10, 4 ; Drain write buffer for sync
DSB
ISB
@@ -103,8 +88,6 @@ ArmDrainWriteBuffer
ArmInvalidateInstructionCache
- DSB
- ISB
MOV R0,#0
MCR p15,0,R0,c7,c5,0 ;Invalidate entire instruction cache
MOV R0,#0
@@ -114,8 +97,6 @@ ArmInvalidateInstructionCache
BX LR
ArmEnableMmu
- DSB
- ISB
mrc p15,0,R0,c1,c0,0
orr R0,R0,#1
mcr p15,0,R0,c1,c0,0
@@ -124,33 +105,22 @@ ArmEnableMmu
bx LR
ArmMmuEnabled
- DSB
- ISB
mrc p15,0,R0,c1,c0,0
and R0,R0,#1
- DSB
ISB
bx LR
ArmDisableMmu
- DSB
- ISB
mov R0,#0
mcr p15,0,R0,c13,c0,0 ;FCSE PID register must be cleared before disabling MMU
mrc p15,0,R0,c1,c0,0
bic R0,R0,#1
mcr p15,0,R0,c1,c0,0 ;Disable MMU
- mov R0,#0
- mcr p15,0,R0,c7,c10,4 ;Data synchronization barrier
- mov R0,#0
- mcr p15,0,R0,c7,c5,4 ;Instruction synchronization barrier
DSB
ISB
bx LR
ArmEnableDataCache
- DSB
- ISB
LDR R1,=DC_ON
MRC p15,0,R0,c1,c0,0 ;Read control register configuration data
ORR R0,R0,R1 ;Set C bit
@@ -160,56 +130,90 @@ ArmEnableDataCache
BX LR
ArmDisableDataCache
- DSB
- ISB
LDR R1,=DC_ON
MRC p15,0,R0,c1,c0,0 ;Read control register configuration data
BIC R0,R0,R1 ;Clear C bit
MCR p15,0,r0,c1,c0,0 ;Write control register configuration data
- DSB
ISB
BX LR
ArmEnableInstructionCache
- DSB
- ISB
LDR R1,=IC_ON
MRC p15,0,R0,c1,c0,0 ;Read control register configuration data
ORR R0,R0,R1 ;Set I bit
MCR p15,0,r0,c1,c0,0 ;Write control register configuration data
- DSB
ISB
BX LR
ArmDisableInstructionCache
- DSB
- ISB
LDR R1,=IC_ON
MRC p15,0,R0,c1,c0,0 ;Read control register configuration data
BIC R0,R0,R1 ;Clear I bit.
MCR p15,0,r0,c1,c0,0 ;Write control register configuration data
- DSB
ISB
BX LR
ArmEnableBranchPrediction
- DSB
- ISB
mrc p15, 0, r0, c1, c0, 0
orr r0, r0, #0x00000800
mcr p15, 0, r0, c1, c0, 0
- DSB
ISB
bx LR
ArmDisableBranchPrediction
- DSB
- ISB
mrc p15, 0, r0, c1, c0, 0
bic r0, r0, #0x00000800
mcr p15, 0, r0, c1, c0, 0
- DSB
ISB
bx LR
+
+ArmV7AllDataCachesOperation
+ STMFD SP!,{r4-r12, LR}
+ MOV R1, R0 ; Save Function call in R1
+ MRC p15, 1, R6, c0, c0, 1 ; Read CLIDR
+ ANDS R3, R6, #&7000000 ; Mask out all but Level of Coherency (LoC)
+ MOV R3, R3, LSR #23 ; Cache level value (naturally aligned)
+ BEQ Finished
+ MOV R10, #0
+
+Loop1
+ ADD R2, R10, R10, LSR #1 ; Work out 3xcachelevel
+ MOV R12, R6, LSR R2 ; bottom 3 bits are the Cache type for this level
+ AND R12, R12, #7 ; get those 3 bits alone
+ CMP R12, #2
+ BLT Skip ; no cache or only instruction cache at this level
+ MCR p15, 2, R10, c0, c0, 0 ; write the Cache Size selection register (CSSELR) // OR in 1 for Instruction
+ ISB ; ISB to sync the change to the CacheSizeID reg
+ MRC p15, 1, R12, c0, c0, 0 ; reads current Cache Size ID register (CCSIDR)
+ AND R2, R12, #&7 ; extract the line length field
+ ADD R2, R2, #4 ; add 4 for the line length offset (log2 16 bytes)
+ LDR R4, =0x3FF
+ ANDS R4, R4, R12, LSR #3 ; R4 is the max number on the way size (right aligned)
+ CLZ R5, R4 ; R5 is the bit position of the way size increment
+ LDR R7, =0x00007FFF
+ ANDS R7, R7, R12, LSR #13 ; R7 is the max number of the index size (right aligned)
+
+Loop2
+ MOV R9, R4 ; R9 working copy of the max way size (right aligned)
+
+Loop3
+ ORR R0, R10, R9, LSL R5 ; factor in the way number and cache number into R11
+ ORR R0, R0, R7, LSL R2 ; factor in the index number
+
+ BLX R1
+
+ SUBS R9, R9, #1 ; decrement the way number
+ BGE Loop3
+ SUBS R7, R7, #1 ; decrement the index
+ BGE Loop2
+Skip
+ ADD R10, R10, #2 ; increment the cache number
+ CMP R3, R10
+ BGT Loop1
+
+Finished
+ LDMFD SP!, {r4-r12, lr}
+ BX LR
+
END
diff --git a/ArmPkg/Library/ArmLib/Common/ArmLibPrivate.h b/ArmPkg/Library/ArmLib/Common/ArmLibPrivate.h
index d1d2523947..b24d322785 100644
--- a/ArmPkg/Library/ArmLib/Common/ArmLibPrivate.h
+++ b/ArmPkg/Library/ArmLib/Common/ArmLibPrivate.h
@@ -56,6 +56,8 @@
#define CACHE_ARCHITECTURE_UNIFIED (0UL)
#define CACHE_ARCHITECTURE_SEPARATE (1UL)
+typedef VOID (*ARM_V7_CACHE_OPERATION)(UINT32);
+
VOID
CPSRMaskInsert (
IN UINT32 Mask,
@@ -67,4 +69,22 @@ CPSRRead (
VOID
);
+
+UINT32
+ReadCCSIDR (
+ IN UINT32 CSSELR
+ );
+
+
+UINT32
+ReadCLIDR (
+ VOID
+ );
+
+VOID
+ArmV7AllDataCachesOperation (
+ IN ARM_V7_CACHE_OPERATION DataCacheOperation
+ );
+
+
#endif // __ARM_LIB_PRIVATE_H__