diff options
author | Ard Biesheuvel <ard.biesheuvel@linaro.org> | 2016-05-11 10:38:47 +0200 |
---|---|---|
committer | Ard Biesheuvel <ard.biesheuvel@linaro.org> | 2016-05-12 13:53:08 +0200 |
commit | cf580da1bc4c16026cb1732f741a892b2d3d3d67 (patch) | |
tree | d4b3b771dbfb0914e8af897c27b3c8668084e185 | |
parent | 14b2ebc30c8bc98f668fa78171b659e2cdc33aa5 (diff) | |
download | edk2-platforms-cf580da1bc4c16026cb1732f741a892b2d3d3d67.tar.xz |
ArmPkg/ArmLib: don't invalidate entire I-cache on range operation
Instead of cleaning the data cache to the PoU by virtual address and
subsequently invalidating the entire I-cache, invalidate only the
range that we just cleaned. This way, we don't invalidate other
cachelines unnecessarily.
Contributed-under: TianoCore Contribution Agreement 1.0
Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Reviewed-by: Leif Lindholm <leif.lindholm@linaro.org>
-rw-r--r-- | ArmPkg/Include/Library/ArmLib.h | 10 | ||||
-rw-r--r-- | ArmPkg/Library/ArmCacheMaintenanceLib/ArmCacheMaintenanceLib.c | 28 | ||||
-rw-r--r-- | ArmPkg/Library/ArmLib/AArch64/AArch64Support.S | 5 | ||||
-rw-r--r-- | ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S | 5 | ||||
-rw-r--r-- | ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm | 6 |
5 files changed, 43 insertions, 11 deletions
diff --git a/ArmPkg/Include/Library/ArmLib.h b/ArmPkg/Include/Library/ArmLib.h index 1689f0072d..4608b0cccc 100644 --- a/ArmPkg/Include/Library/ArmLib.h +++ b/ArmPkg/Include/Library/ArmLib.h @@ -183,13 +183,19 @@ ArmInvalidateDataCacheEntryByMVA ( VOID
EFIAPI
-ArmCleanDataCacheEntryToPoUByMVA(
+ArmCleanDataCacheEntryToPoUByMVA (
IN UINTN Address
);
VOID
EFIAPI
-ArmCleanDataCacheEntryByMVA(
+ArmInvalidateInstructionCacheEntryToPoUByMVA (
+ IN UINTN Address
+ );
+
+VOID
+EFIAPI
+ArmCleanDataCacheEntryByMVA (
IN UINTN Address
);
diff --git a/ArmPkg/Library/ArmCacheMaintenanceLib/ArmCacheMaintenanceLib.c b/ArmPkg/Library/ArmCacheMaintenanceLib/ArmCacheMaintenanceLib.c index 1045f9068f..0759e38cd4 100644 --- a/ArmPkg/Library/ArmCacheMaintenanceLib/ArmCacheMaintenanceLib.c +++ b/ArmPkg/Library/ArmCacheMaintenanceLib/ArmCacheMaintenanceLib.c @@ -17,15 +17,16 @@ #include <Library/DebugLib.h>
#include <Library/PcdLib.h>
+STATIC
VOID
CacheRangeOperation (
IN VOID *Start,
IN UINTN Length,
- IN LINE_OPERATION LineOperation
+ IN LINE_OPERATION LineOperation,
+ IN UINTN LineLength
)
{
- UINTN ArmCacheLineLength = ArmDataCacheLineLength();
- UINTN ArmCacheLineAlignmentMask = ArmCacheLineLength - 1;
+ UINTN ArmCacheLineAlignmentMask = LineLength - 1;
// Align address (rounding down)
UINTN AlignedAddress = (UINTN)Start - ((UINTN)Start & ArmCacheLineAlignmentMask);
@@ -34,7 +35,7 @@ CacheRangeOperation ( // Perform the line operation on an address in each cache line
while (AlignedAddress < EndAddress) {
LineOperation(AlignedAddress);
- AlignedAddress += ArmCacheLineLength;
+ AlignedAddress += LineLength;
}
ArmDataSynchronizationBarrier ();
}
@@ -64,8 +65,14 @@ InvalidateInstructionCacheRange ( IN UINTN Length
)
{
- CacheRangeOperation (Address, Length, ArmCleanDataCacheEntryToPoUByMVA);
- ArmInvalidateInstructionCache ();
+ CacheRangeOperation (Address, Length, ArmCleanDataCacheEntryToPoUByMVA,
+ ArmDataCacheLineLength ());
+ CacheRangeOperation (Address, Length,
+ ArmInvalidateInstructionCacheEntryToPoUByMVA,
+ ArmInstructionCacheLineLength ());
+
+ ArmInstructionSynchronizationBarrier ();
+
return Address;
}
@@ -85,7 +92,8 @@ WriteBackInvalidateDataCacheRange ( IN UINTN Length
)
{
- CacheRangeOperation(Address, Length, ArmCleanInvalidateDataCacheEntryByMVA);
+ CacheRangeOperation(Address, Length, ArmCleanInvalidateDataCacheEntryByMVA,
+ ArmDataCacheLineLength ());
return Address;
}
@@ -105,7 +113,8 @@ WriteBackDataCacheRange ( IN UINTN Length
)
{
- CacheRangeOperation(Address, Length, ArmCleanDataCacheEntryByMVA);
+ CacheRangeOperation(Address, Length, ArmCleanDataCacheEntryByMVA,
+ ArmDataCacheLineLength ());
return Address;
}
@@ -116,6 +125,7 @@ InvalidateDataCacheRange ( IN UINTN Length
)
{
- CacheRangeOperation(Address, Length, ArmInvalidateDataCacheEntryByMVA);
+ CacheRangeOperation(Address, Length, ArmInvalidateDataCacheEntryByMVA,
+ ArmDataCacheLineLength ());
return Address;
}
diff --git a/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S b/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S index 43f7a795ac..9441f47e30 100644 --- a/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S +++ b/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S @@ -23,6 +23,7 @@ GCC_ASM_EXPORT (ArmInvalidateInstructionCache) GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryByMVA)
GCC_ASM_EXPORT (ArmCleanDataCacheEntryByMVA)
GCC_ASM_EXPORT (ArmCleanDataCacheEntryToPoUByMVA)
+GCC_ASM_EXPORT (ArmInvalidateInstructionCacheEntryToPoUByMVA)
GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryByMVA)
GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryBySetWay)
GCC_ASM_EXPORT (ArmCleanDataCacheEntryBySetWay)
@@ -80,6 +81,10 @@ ASM_PFX(ArmCleanDataCacheEntryToPoUByMVA): dc cvau, x0 // Clean single data cache line to PoU
ret
+ASM_PFX(ArmInvalidateInstructionCacheEntryToPoUByMVA):
+ ic ivau, x0 // Invalidate single instruction cache line to PoU
+ ret
+
ASM_PFX(ArmCleanInvalidateDataCacheEntryByMVA):
dc civac, x0 // Clean and invalidate single data cache line
diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S index 50c760f335..c765032c9e 100644 --- a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S +++ b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S @@ -18,6 +18,7 @@ GCC_ASM_EXPORT (ArmInvalidateInstructionCache)
GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryByMVA)
+GCC_ASM_EXPORT (ArmInvalidateInstructionCacheEntryToPoUByMVA)
GCC_ASM_EXPORT (ArmCleanDataCacheEntryByMVA)
GCC_ASM_EXPORT (ArmCleanDataCacheEntryToPoUByMVA)
GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryByMVA)
@@ -74,6 +75,10 @@ ASM_PFX(ArmCleanDataCacheEntryToPoUByMVA): mcr p15, 0, r0, c7, c11, 1 @clean single data cache line to PoU
bx lr
+ASM_PFX(ArmInvalidateInstructionCacheEntryToPoUByMVA):
+ mcr p15, 0, r0, c7, c5, 1 @Invalidate single instruction cache line to PoU
+ mcr p15, 0, r0, c7, c5, 7 @Invalidate branch predictor
+ bx lr
ASM_PFX(ArmCleanInvalidateDataCacheEntryByMVA):
mcr p15, 0, r0, c7, c14, 1 @clean and invalidate single data cache line
diff --git a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm index a460bd2da7..2363ee4576 100644 --- a/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm +++ b/ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.asm @@ -34,6 +34,12 @@ CTRL_I_BIT EQU (1 << 12) bx lr
+ RVCT_ASM_EXPORT ArmInvalidateInstructionCacheEntryToPoUByMVA
+ mcr p15, 0, r0, c7, c5, 1 ; invalidate single instruction cache line to PoU
+ mcr p15, 0, r0, c7, c5, 7 ; invalidate branch predictor
+ bx lr
+
+
RVCT_ASM_EXPORT ArmCleanDataCacheEntryToPoUByMVA
mcr p15, 0, r0, c7, c11, 1 ; clean single data cache line to PoU
bx lr
|