summaryrefslogtreecommitdiff
path: root/ArmPkg/Library
diff options
context:
space:
mode:
authorandrewfish <andrewfish@6f19259b-4bc3-4df7-8a09-765794883524>2010-02-19 18:51:10 +0000
committerandrewfish <andrewfish@6f19259b-4bc3-4df7-8a09-765794883524>2010-02-19 18:51:10 +0000
commit98bc0c8c056271095ae2a3a9ab7f2c3ccd64117e (patch)
tree7aec4b8d6c212f1aad09a4282502330ccd580dc6 /ArmPkg/Library
parent752d258a42349bf5895efced6c1be1dd5cdfae66 (diff)
downloadedk2-platforms-98bc0c8c056271095ae2a3a9ab7f2c3ccd64117e.tar.xz
Sync gcc with armasm. update some memory barriers.
git-svn-id: https://edk2.svn.sourceforge.net/svnroot/edk2/trunk/edk2@10025 6f19259b-4bc3-4df7-8a09-765794883524
Diffstat (limited to 'ArmPkg/Library')
-rw-r--r--ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.S27
-rw-r--r--ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.asm71
-rw-r--r--ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.c130
-rw-r--r--ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.h1
-rw-r--r--ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S91
-rw-r--r--ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm96
-rw-r--r--ArmPkg/Library/ArmLib/Common/ArmLibPrivate.h20
7 files changed, 285 insertions, 151 deletions
diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.S b/ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.S
index 57d2734528..fac928af36 100644
--- a/ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.S
+++ b/ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.S
@@ -25,6 +25,9 @@
.globl ASM_PFX(ArmSetDomainAccessControl)
.globl ASM_PFX(CPSRMaskInsert)
.globl ASM_PFX(CPSRRead)
+.globl ASM_PFX(ReadCCSIDR)
+.globl ASM_PFX(ReadCLIDR)
+
#------------------------------------------------------------------------------
@@ -37,18 +40,11 @@ ASM_PFX(Cp15CacheInfo):
bx LR
ASM_PFX(ArmEnableInterrupts):
- mrs R0,CPSR
- bic R0,R0,#0x80 @Enable IRQ interrupts
- msr CPSR_c,R0
+ cpsie i
bx LR
ASM_PFX(ArmDisableInterrupts):
- mrs R0,CPSR
- orr R1,R0,#0x80 @Disable IRQ interrupts
- msr CPSR_c,R1
- tst R0,#0x80
- moveq R0,#1
- movne R0,#0
+ cpsid i
bx LR
ASM_PFX(ArmGetInterruptState):
@@ -61,10 +57,12 @@ ASM_PFX(ArmGetInterruptState):
ASM_PFX(ArmInvalidateTlb):
mov r0,#0
mcr p15,0,r0,c8,c7,0
+ isb
bx lr
ASM_PFX(ArmSetTranslationTableBaseAddress):
mcr p15,0,r0,c2,c0,0
+ isb
bx lr
ASM_PFX(ArmGetTranslationTableBaseAddress):
@@ -74,6 +72,7 @@ ASM_PFX(ArmGetTranslationTableBaseAddress):
ASM_PFX(ArmSetDomainAccessControl):
mcr p15,0,r0,c3,c0,0
+ isb
bx lr
ASM_PFX(CPSRMaskInsert): @ on entry, r0 is the mask and r1 is the field to insert
@@ -92,4 +91,14 @@ ASM_PFX(CPSRRead):
mrs r0, cpsr
bx lr
+ASM_PFX(ReadCCSIDR):
+ mcr p15,2,r0,c0,c0,0 @ Write Cache Size Selection Register (CSSELR)
+ isb
+ mrc p15,1,r0,c0,c0,0 @ Read current CP15 Cache Size ID Register (CCSIDR)
+ bx lr
+
+
+ASM_PFX(ReadCLIDR):
+ mrc p15,1,r0,c0,c0,1 @ Read CP15 Cache Level ID Register
+
ASM_FUNCTION_REMOVE_IF_UNREFERENCED
diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.asm b/ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.asm
index cf5173997c..5d3083457e 100644
--- a/ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.asm
+++ b/ArmPkg/Library/ArmLib/ArmV7/ArmLibSupport.asm
@@ -24,95 +24,55 @@
EXPORT ArmSetDomainAccessControl
EXPORT CPSRMaskInsert
EXPORT CPSRRead
+ EXPORT ReadCCSIDR
AREA ArmLibSupport, CODE, READONLY
Cp15IdCode
- DSB
- ISB
mrc p15,0,R0,c0,c0,0
- DSB
- ISB
bx LR
Cp15CacheInfo
- DSB
- ISB
mrc p15,0,R0,c0,c0,1
- DSB
- ISB
bx LR
ArmEnableInterrupts
- DSB
- ISB
- mrs R0,CPSR
- bic R0,R0,#0x80 ;Enable IRQ interrupts
- msr CPSR_c,R0
- DSB
- ISB
+ CPSIE i
bx LR
ArmDisableInterrupts
- DSB
- ISB
- mrs R0,CPSR
- orr R1,R0,#0x80 ;Disable IRQ interrupts
- msr CPSR_c,R1
- tst R0,#0x80
- moveq R0,#1
- movne R0,#0
- DSB
- ISB
+ CPSID i
bx LR
ArmGetInterruptState
- DSB
- ISB
mrs R0,CPSR
tst R0,#0x80 ;Check if IRQ is enabled.
moveq R0,#1
movne R0,#0
- DSB
- ISB
bx LR
ArmInvalidateTlb
- DSB
- ISB
mov r0,#0
mcr p15,0,r0,c8,c7,0
- DSB
ISB
bx lr
ArmSetTranslationTableBaseAddress
- DSB
- ISB
mcr p15,0,r0,c2,c0,0
- DSB
ISB
bx lr
ArmGetTranslationTableBaseAddress
- DSB
- ISB
mrc p15,0,r0,c2,c0,0
- DSB
ISB
bx lr
ArmSetDomainAccessControl
- DSB
- ISB
mcr p15,0,r0,c3,c0,0
- DSB
ISB
bx lr
CPSRMaskInsert ; on entry, r0 is the mask and r1 is the field to insert
- DSB
- ISB
stmfd sp!, {r4-r12, lr} ; save all the banked registers
mov r3, sp ; copy the stack pointer into a non-banked register
mrs r2, cpsr ; read the cpsr
@@ -120,20 +80,33 @@ CPSRMaskInsert ; on entry, r0 is the mask and r1 is the field to in
and r1, r1, r0 ; clear bits outside the mask in the input
orr r2, r2, r1 ; set field
msr cpsr_cxsf, r2 ; write back cpsr (may have caused a mode switch)
+ ISB
mov sp, r3 ; restore stack pointer
ldmfd sp!, {r4-r12, lr} ; restore registers
- DSB
- ISB
bx lr ; return (hopefully thumb-safe!)
CPSRRead
- DSB
- ISB
mrs r0, cpsr
- DSB
- ISB
bx lr
+
+// UINT32
+// ReadCCSIDR (
+// IN UINT32 CSSELR
+// )
+ReadCCSIDR
+ MCR p15,2,r0,c0,c0,0 ; Write Cache Size Selection Register (CSSELR)
+ ISB
+ MRC p15,1,<Rt>,c0,c0,0 ; Read current CP15 Cache Size ID Register (CCSIDR)
+ BX lr
+
+
+// UINT32
+// ReadCLIDR (
+// IN UINT32 CSSELR
+// )
+ReadCLIDR
+ MRC p15,1,<Rt>,c0,c0,1 ; Read CP15 Cache Level ID Register
END
diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.c b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.c
index 12ef56c5e6..464a3d7a58 100644
--- a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.c
+++ b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.c
@@ -11,13 +11,14 @@
WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
**/
-
+#include <Uefi.h>
#include <Chipset/ArmV7.h>
#include <Library/ArmLib.h>
#include <Library/BaseLib.h>
#include <Library/BaseMemoryLib.h>
#include <Library/MemoryAllocationLib.h>
#include "ArmV7Lib.h"
+#include "ArmLibPrivate.h"
VOID
FillTranslationTable (
@@ -136,7 +137,9 @@ ArmCacheArchitecture (
VOID
)
{
- return ARM_CACHE_ARCHITECTURE_SEPARATE;
+ UINT32 CLIDR = ReadCLIDR ();
+
+ return CLIDR; // BugBug Fix Me
}
BOOLEAN
@@ -145,7 +148,18 @@ ArmDataCachePresent (
VOID
)
{
- return TRUE;
+ UINT32 CLIDR = ReadCLIDR ();
+
+ if ((CLIDR & 0x2) == 0x2) {
+ // Instruction cache exists
+ return TRUE;
+ }
+ if ((CLIDR & 0x7) == 0x4) {
+ // Unified cache
+ return TRUE;
+ }
+
+ return FALSE;
}
UINTN
@@ -154,7 +168,17 @@ ArmDataCacheSize (
VOID
)
{
- return 16 * 1024;
+ UINT32 NumSets;
+ UINT32 Associativity;
+ UINT32 LineSize;
+ UINT32 CCSIDR = ReadCCSIDR (0);
+
+ LineSize = (1 << (CCSIDR + 2));
+ Associativity = ((CCSIDR >> 3) & 0x3ff) + 1;
+ NumSets = ((CCSIDR >> 13) & 0x7fff) + 1;
+
+ // LineSize is in words (4 byte chunks)
+ return NumSets * Associativity * LineSize * 4;
}
UINTN
@@ -163,7 +187,9 @@ ArmDataCacheAssociativity (
VOID
)
{
- return 4;
+ UINT32 CCSIDR = ReadCCSIDR (0);
+
+ return ((CCSIDR >> 3) & 0x3ff) + 1;
}
UINTN
@@ -171,7 +197,9 @@ ArmDataCacheSets (
VOID
)
{
- return 64;
+ UINT32 CCSIDR = ReadCCSIDR (0);
+
+ return ((CCSIDR >> 13) & 0x7fff) + 1;
}
UINTN
@@ -180,7 +208,10 @@ ArmDataCacheLineLength (
VOID
)
{
- return 64;
+ UINT32 CCSIDR = ReadCCSIDR (0) & 7;
+
+ // * 4 converts to bytes
+ return (1 << (CCSIDR + 2)) * 4;
}
BOOLEAN
@@ -189,7 +220,18 @@ ArmInstructionCachePresent (
VOID
)
{
- return TRUE;
+ UINT32 CLIDR = ReadCLIDR ();
+
+ if ((CLIDR & 1) == 1) {
+ // Instruction cache exists
+ return TRUE;
+ }
+ if ((CLIDR & 0x7) == 0x4) {
+ // Unified cache
+ return TRUE;
+ }
+
+ return FALSE;
}
UINTN
@@ -198,7 +240,17 @@ ArmInstructionCacheSize (
VOID
)
{
- return 16 * 1024;
+ UINT32 NumSets;
+ UINT32 Associativity;
+ UINT32 LineSize;
+ UINT32 CCSIDR = ReadCCSIDR (1);
+
+ LineSize = (1 << (CCSIDR + 2));
+ Associativity = ((CCSIDR >> 3) & 0x3ff) + 1;
+ NumSets = ((CCSIDR >> 13) & 0x7fff) + 1;
+
+ // LineSize is in words (4 byte chunks)
+ return NumSets * Associativity * LineSize * 4;
}
UINTN
@@ -207,55 +259,53 @@ ArmInstructionCacheAssociativity (
VOID
)
{
- return 4;
+ UINT32 CCSIDR = ReadCCSIDR (1);
+
+ return ((CCSIDR >> 3) & 0x3ff) + 1;
+// return 4;
}
UINTN
EFIAPI
+ArmInstructionCacheSets (
+ VOID
+ )
+{
+ UINT32 CCSIDR = ReadCCSIDR (1);
+
+ return ((CCSIDR >> 13) & 0x7fff) + 1;
+}
+
+UINTN
+EFIAPI
ArmInstructionCacheLineLength (
VOID
)
{
- return 64;
+ UINT32 CCSIDR = ReadCCSIDR (1) & 7;
+
+ // * 4 converts to bytes
+ return (1 << (CCSIDR + 2)) * 4;
+
+// return 64;
}
+
VOID
ArmV7DataCacheOperation (
IN ARM_V7_CACHE_OPERATION DataCacheOperation
)
{
- UINTN Set;
- UINTN SetCount;
- UINTN SetShift;
- UINTN Way;
- UINTN WayCount;
- UINTN WayShift;
- UINT32 SetWayFormat;
UINTN SavedInterruptState;
- SetCount = ArmDataCacheSets();
- WayCount = ArmDataCacheAssociativity();
+ SavedInterruptState = ArmGetInterruptState ();
- // ARMv7 Manual, System Control Coprocessor chapter
- SetShift = 6;
- WayShift = 32 - LowBitSet32 ((UINT32)WayCount);
-
- SavedInterruptState = ArmDisableInterrupts();
-
- for (Way = 0; Way < WayCount; Way++) {
- for (Set = 0; Set < SetCount; Set++) {
- // Build the format that the CP15 instruction can understand
- SetWayFormat = (Way << WayShift) | (Set << SetShift);
-
- // Pass it through
- (*DataCacheOperation)(SetWayFormat);
- }
- }
+ ArmV7AllDataCachesOperation (DataCacheOperation);
- ArmDrainWriteBuffer();
+ ArmDrainWriteBuffer ();
if (SavedInterruptState) {
- ArmEnableInterrupts();
+ ArmEnableInterrupts ();
}
}
@@ -265,7 +315,7 @@ ArmInvalidateDataCache (
VOID
)
{
- ArmV7DataCacheOperation(ArmInvalidateDataCacheEntryBySetWay);
+ ArmV7DataCacheOperation (ArmInvalidateDataCacheEntryBySetWay);
}
VOID
@@ -274,7 +324,7 @@ ArmCleanInvalidateDataCache (
VOID
)
{
- ArmV7DataCacheOperation(ArmCleanInvalidateDataCacheEntryBySetWay);
+ ArmV7DataCacheOperation (ArmCleanInvalidateDataCacheEntryBySetWay);
}
VOID
@@ -283,5 +333,5 @@ ArmCleanDataCache (
VOID
)
{
- ArmV7DataCacheOperation(ArmCleanDataCacheEntryBySetWay);
+ ArmV7DataCacheOperation (ArmCleanDataCacheEntryBySetWay);
}
diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.h b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.h
index 970b6f1e34..161f9afb34 100644
--- a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.h
+++ b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Lib.h
@@ -15,7 +15,6 @@
#ifndef __ARM_V7_LIB_H__
#define __ARM_V7_LIB_H__
-typedef VOID (*ARM_V7_CACHE_OPERATION)(UINT32);
VOID
EFIAPI
diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S
index 90d2c4b92f..2cde8e2039 100644
--- a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S
+++ b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S
@@ -33,43 +33,58 @@
.globl ASM_PFX(ArmDisableExtendPTConfig)
.globl ASM_PFX(ArmEnableBranchPrediction)
.globl ASM_PFX(ArmDisableBranchPrediction)
+.globl ASM_PFX(ArmV7AllDataCachesOperation)
.set DC_ON, (0x1<<2)
.set IC_ON, (0x1<<12)
-.set XP_ON, (0x1<<23)
+
ASM_PFX(ArmInvalidateDataCacheEntryByMVA):
- mcr p15, 0, r0, c7, c6, 1 @invalidate single data cache line
+ mcr p15, 0, r0, c7, c6, 1 @invalidate single data cache line
+ dsb
+ isb
bx lr
ASM_PFX(ArmCleanDataCacheEntryByMVA):
mcr p15, 0, r0, c7, c10, 1 @clean single data cache line
+ dsb
+ isb
bx lr
ASM_PFX(ArmCleanInvalidateDataCacheEntryByMVA):
mcr p15, 0, r0, c7, c14, 1 @clean and invalidate single data cache line
+ dsb
+ isb
bx lr
ASM_PFX(ArmInvalidateDataCacheEntryBySetWay):
mcr p15, 0, r0, c7, c6, 2 @ Invalidate this line
+ dsb
+ isb
bx lr
ASM_PFX(ArmCleanInvalidateDataCacheEntryBySetWay):
mcr p15, 0, r0, c7, c14, 2 @ Clean and Invalidate this line
+ dsb
+ isb
bx lr
ASM_PFX(ArmCleanDataCacheEntryBySetWay):
mcr p15, 0, r0, c7, c10, 2 @ Clean this line
+ dsb
+ isb
bx lr
ASM_PFX(ArmDrainWriteBuffer):
mcr p15, 0, r0, c7, c10, 4 @ Drain write buffer for sync
+ dsb
+ isb
bx lr
@@ -77,7 +92,8 @@ ASM_PFX(ArmInvalidateInstructionCache):
mov R0,#0
mcr p15,0,R0,c7,c5,0 @Invalidate entire instruction cache
mov R0,#0
- mcr p15,0,R0,c7,c5,4 @Instruction synchronization barrier
+ dsb
+ isb
bx LR
ASM_PFX(ArmEnableMmu):
@@ -99,9 +115,8 @@ ASM_PFX(ArmDisableMmu):
bic R0,R0,#1
mcr p15,0,R0,c1,c0,0 @Disable MMU
mov R0,#0
- mcr p15,0,R0,c7,c10,4 @Data synchronization barrier
- mov R0,#0
- mcr p15,0,R0,c7,c5,4 @Instruction synchronization barrier
+ dsb
+ isb
bx LR
ASM_PFX(ArmEnableDataCache):
@@ -109,6 +124,8 @@ ASM_PFX(ArmEnableDataCache):
mrc p15,0,R0,c1,c0,0 @Read control register configuration data
orr R0,R0,R1 @Set C bit
mcr p15,0,r0,c1,c0,0 @Write control register configuration data
+ dsb
+ isb
bx LR
ASM_PFX(ArmDisableDataCache):
@@ -116,6 +133,8 @@ ASM_PFX(ArmDisableDataCache):
mrc p15,0,R0,c1,c0,0 @Read control register configuration data
bic R0,R0,R1 @Clear C bit
mcr p15,0,r0,c1,c0,0 @Write control register configuration data
+ dsb
+ isb
bx LR
ASM_PFX(ArmEnableInstructionCache):
@@ -123,6 +142,8 @@ ASM_PFX(ArmEnableInstructionCache):
mrc p15,0,R0,c1,c0,0 @Read control register configuration data
orr R0,R0,R1 @Set I bit
mcr p15,0,r0,c1,c0,0 @Write control register configuration data
+ dsb
+ isb
bx LR
ASM_PFX(ArmDisableInstructionCache):
@@ -130,18 +151,76 @@ ASM_PFX(ArmDisableInstructionCache):
mrc p15,0,R0,c1,c0,0 @Read control register configuration data
bic R0,R0,R1 @Clear I bit.
mcr p15,0,r0,c1,c0,0 @Write control register configuration data
+ dsb
+ isb
bx LR
ASM_PFX(ArmEnableBranchPrediction):
mrc p15, 0, r0, c1, c0, 0
orr r0, r0, #0x00000800
mcr p15, 0, r0, c1, c0, 0
+ dsb
+ isb
bx LR
ASM_PFX(ArmDisableBranchPrediction):
mrc p15, 0, r0, c1, c0, 0
bic r0, r0, #0x00000800
mcr p15, 0, r0, c1, c0, 0
+ dsb
+ isb
bx LR
+
+ASM_PFX(ArmV7AllDataCachesOperation):
+ stmfd SP!,{r4-r12, LR}
+ mov R1, R0 @ Save Function call in R1
+ mrc p15, 1, R6, c0, c0, 1 @ Read CLIDR
+ ands R3, R6, #0x7000000 @ Mask out all but Level of Coherency (LoC)
+ mov R3, R3, LSR #23 @ Cache level value (naturally aligned)
+ beq L_Finished
+ mov R10, #0
+
+Loop1:
+ add R2, R10, R10, LSR #1 @ Work out 3xcachelevel
+ mov R12, R6, LSR R2 @ bottom 3 bits are the Cache type for this level
+ and R12, R12, #7 @ get those 3 bits alone
+ cmp R12, #2
+ blt L_Skip @ no cache or only instruction cache at this level
+ mcr p15, 2, R10, c0, c0, 0 @ write the Cache Size selection register (CSSELR) // OR in 1 for Instruction
+ isb @ ISB to sync the change to the CacheSizeID reg
+ mcr p15, 1, R12, c0, c0, 0 @ reads current Cache Size ID register (CCSIDR)
+ and R2, R12, #0x7 @ extract the line length field
+ and R2, R2, #4 @ add 4 for the line length offset (log2 16 bytes)
+ mov R4, #0x400
+ sub R4, R4, #1
+ ands R4, R4, R12, LSR #3 @ R4 is the max number on the way size (right aligned)
+ clz R5, R4 @ R5 is the bit position of the way size increment
+ mov R7, #0x00008000
+ sub R7, R7, #1
+ ands R7, R7, R12, LSR #13 @ R7 is the max number of the index size (right aligned)
+
+Loop2:
+ mov R9, R4 @ R9 working copy of the max way size (right aligned)
+
+Loop3:
+ orr R0, R10, R9, LSL R5 @ factor in the way number and cache number into R11
+ orr R0, R0, R7, LSL R2 @ factor in the index number
+
+ blx R1
+
+ subs R9, R9, #1 @ decrement the way number
+ bge Loop3
+ subs R7, R7, #1 @ decrement the index
+ bge Loop2
+L_Skip:
+ add R10, R10, #2 @ increment the cache number
+ cmp R3, R10
+ bgt Loop1
+
+L_Finished:
+ ldmfd SP!, {r4-r12, lr}
+ bx LR
+
+
ASM_FUNCTION_REMOVE_IF_UNREFERENCED
diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm
index 7a6c3083a3..3ce5f2ecd8 100644
--- a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm
+++ b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm
@@ -32,7 +32,6 @@
DC_ON EQU ( 0x1:SHL:2 )
IC_ON EQU ( 0x1:SHL:12 )
-XP_ON EQU ( 0x1:SHL:23 )
AREA ArmCacheLib, CODE, READONLY
@@ -40,17 +39,13 @@ XP_ON EQU ( 0x1:SHL:23 )
ArmInvalidateDataCacheEntryByMVA
- DSB
- ISB
- MCR p15, 0, r0, c7, c6, 1 ; invalidate single data cache line
+ MCR p15, 0, r0, c7, c6, 1 ; invalidate single data cache line
DSB
ISB
BX lr
ArmCleanDataCacheEntryByMVA
- DSB
- ISB
MCR p15, 0, r0, c7, c10, 1 ; clean single data cache line
DSB
ISB
@@ -58,8 +53,6 @@ ArmCleanDataCacheEntryByMVA
ArmCleanInvalidateDataCacheEntryByMVA
- DSB
- ISB
MCR p15, 0, r0, c7, c14, 1 ; clean and invalidate single data cache line
DSB
ISB
@@ -67,8 +60,6 @@ ArmCleanInvalidateDataCacheEntryByMVA
ArmInvalidateDataCacheEntryBySetWay
- DSB
- ISB
mcr p15, 0, r0, c7, c6, 2 ; Invalidate this line
DSB
ISB
@@ -76,8 +67,6 @@ ArmInvalidateDataCacheEntryBySetWay
ArmCleanInvalidateDataCacheEntryBySetWay
- DSB
- ISB
mcr p15, 0, r0, c7, c14, 2 ; Clean and Invalidate this line
DSB
ISB
@@ -85,8 +74,6 @@ ArmCleanInvalidateDataCacheEntryBySetWay
ArmCleanDataCacheEntryBySetWay
- DSB
- ISB
mcr p15, 0, r0, c7, c10, 2 ; Clean this line
DSB
ISB
@@ -94,8 +81,6 @@ ArmCleanDataCacheEntryBySetWay
ArmDrainWriteBuffer
- DSB
- ISB
mcr p15, 0, r0, c7, c10, 4 ; Drain write buffer for sync
DSB
ISB
@@ -103,8 +88,6 @@ ArmDrainWriteBuffer
ArmInvalidateInstructionCache
- DSB
- ISB
MOV R0,#0
MCR p15,0,R0,c7,c5,0 ;Invalidate entire instruction cache
MOV R0,#0
@@ -114,8 +97,6 @@ ArmInvalidateInstructionCache
BX LR
ArmEnableMmu
- DSB
- ISB
mrc p15,0,R0,c1,c0,0
orr R0,R0,#1
mcr p15,0,R0,c1,c0,0
@@ -124,33 +105,22 @@ ArmEnableMmu
bx LR
ArmMmuEnabled
- DSB
- ISB
mrc p15,0,R0,c1,c0,0
and R0,R0,#1
- DSB
ISB
bx LR
ArmDisableMmu
- DSB
- ISB
mov R0,#0
mcr p15,0,R0,c13,c0,0 ;FCSE PID register must be cleared before disabling MMU
mrc p15,0,R0,c1,c0,0
bic R0,R0,#1
mcr p15,0,R0,c1,c0,0 ;Disable MMU
- mov R0,#0
- mcr p15,0,R0,c7,c10,4 ;Data synchronization barrier
- mov R0,#0
- mcr p15,0,R0,c7,c5,4 ;Instruction synchronization barrier
DSB
ISB
bx LR
ArmEnableDataCache
- DSB
- ISB
LDR R1,=DC_ON
MRC p15,0,R0,c1,c0,0 ;Read control register configuration data
ORR R0,R0,R1 ;Set C bit
@@ -160,56 +130,90 @@ ArmEnableDataCache
BX LR
ArmDisableDataCache
- DSB
- ISB
LDR R1,=DC_ON
MRC p15,0,R0,c1,c0,0 ;Read control register configuration data
BIC R0,R0,R1 ;Clear C bit
MCR p15,0,r0,c1,c0,0 ;Write control register configuration data
- DSB
ISB
BX LR
ArmEnableInstructionCache
- DSB
- ISB
LDR R1,=IC_ON
MRC p15,0,R0,c1,c0,0 ;Read control register configuration data
ORR R0,R0,R1 ;Set I bit
MCR p15,0,r0,c1,c0,0 ;Write control register configuration data
- DSB
ISB
BX LR
ArmDisableInstructionCache
- DSB
- ISB
LDR R1,=IC_ON
MRC p15,0,R0,c1,c0,0 ;Read control register configuration data
BIC R0,R0,R1 ;Clear I bit.
MCR p15,0,r0,c1,c0,0 ;Write control register configuration data
- DSB
ISB
BX LR
ArmEnableBranchPrediction
- DSB
- ISB
mrc p15, 0, r0, c1, c0, 0
orr r0, r0, #0x00000800
mcr p15, 0, r0, c1, c0, 0
- DSB
ISB
bx LR
ArmDisableBranchPrediction
- DSB
- ISB
mrc p15, 0, r0, c1, c0, 0
bic r0, r0, #0x00000800
mcr p15, 0, r0, c1, c0, 0
- DSB
ISB
bx LR
+
+ArmV7AllDataCachesOperation
+ STMFD SP!,{r4-r12, LR}
+ MOV R1, R0 ; Save Function call in R1
+ MRC p15, 1, R6, c0, c0, 1 ; Read CLIDR
+ ANDS R3, R6, #&7000000 ; Mask out all but Level of Coherency (LoC)
+ MOV R3, R3, LSR #23 ; Cache level value (naturally aligned)
+ BEQ Finished
+ MOV R10, #0
+
+Loop1
+ ADD R2, R10, R10, LSR #1 ; Work out 3xcachelevel
+ MOV R12, R6, LSR R2 ; bottom 3 bits are the Cache type for this level
+ AND R12, R12, #7 ; get those 3 bits alone
+ CMP R12, #2
+ BLT Skip ; no cache or only instruction cache at this level
+ MCR p15, 2, R10, c0, c0, 0 ; write the Cache Size selection register (CSSELR) // OR in 1 for Instruction
+ ISB ; ISB to sync the change to the CacheSizeID reg
+ MRC p15, 1, R12, c0, c0, 0 ; reads current Cache Size ID register (CCSIDR)
+ AND R2, R12, #&7 ; extract the line length field
+ ADD R2, R2, #4 ; add 4 for the line length offset (log2 16 bytes)
+ LDR R4, =0x3FF
+ ANDS R4, R4, R12, LSR #3 ; R4 is the max number on the way size (right aligned)
+ CLZ R5, R4 ; R5 is the bit position of the way size increment
+ LDR R7, =0x00007FFF
+ ANDS R7, R7, R12, LSR #13 ; R7 is the max number of the index size (right aligned)
+
+Loop2
+ MOV R9, R4 ; R9 working copy of the max way size (right aligned)
+
+Loop3
+ ORR R0, R10, R9, LSL R5 ; factor in the way number and cache number into R11
+ ORR R0, R0, R7, LSL R2 ; factor in the index number
+
+ BLX R1
+
+ SUBS R9, R9, #1 ; decrement the way number
+ BGE Loop3
+ SUBS R7, R7, #1 ; decrement the index
+ BGE Loop2
+Skip
+ ADD R10, R10, #2 ; increment the cache number
+ CMP R3, R10
+ BGT Loop1
+
+Finished
+ LDMFD SP!, {r4-r12, lr}
+ BX LR
+
END
diff --git a/ArmPkg/Library/ArmLib/Common/ArmLibPrivate.h b/ArmPkg/Library/ArmLib/Common/ArmLibPrivate.h
index d1d2523947..b24d322785 100644
--- a/ArmPkg/Library/ArmLib/Common/ArmLibPrivate.h
+++ b/ArmPkg/Library/ArmLib/Common/ArmLibPrivate.h
@@ -56,6 +56,8 @@
#define CACHE_ARCHITECTURE_UNIFIED (0UL)
#define CACHE_ARCHITECTURE_SEPARATE (1UL)
+typedef VOID (*ARM_V7_CACHE_OPERATION)(UINT32);
+
VOID
CPSRMaskInsert (
IN UINT32 Mask,
@@ -67,4 +69,22 @@ CPSRRead (
VOID
);
+
+UINT32
+ReadCCSIDR (
+ IN UINT32 CSSELR
+ );
+
+
+UINT32
+ReadCLIDR (
+ VOID
+ );
+
+VOID
+ArmV7AllDataCachesOperation (
+ IN ARM_V7_CACHE_OPERATION DataCacheOperation
+ );
+
+
#endif // __ARM_LIB_PRIVATE_H__