summaryrefslogtreecommitdiff
path: root/src/arch/arm
diff options
context:
space:
mode:
authorJulius Werner <jwerner@chromium.org>2014-01-24 16:23:08 -0800
committerMarc Jones <marc.jones@se-eng.com>2014-11-13 06:49:41 +0100
commit7c6e489b23476ad6149c173b6971327aa2eb7726 (patch)
tree07cf2798f857b5455ac83989c774984c010a8470 /src/arch/arm
parent168b0f9e380ababdc1189bac6cd8b121d1adf3b7 (diff)
downloadcoreboot-7c6e489b23476ad6149c173b6971327aa2eb7726.tar.xz
arm: Put assembly functions into separate sections
This patch changes the ENTRY() macro in asm.h to create a new section for every assembler function, thus providing dcache_clean/invalidate_all and friends with the same --gc-sections goodness that our C functions have. This requires a few minor changes of moving around data (to make sure it ends up in the right section) and changing some libgcc functions (which apparently need to have two names?), but nothing serious. (You may note that some of our assembly functions have data, sometimes even writable, within the same .text section. This has been this way before and I'm not looking to change it for now, although it's not totally clean. Since we don't enforce read-only sections through paging, it doesn't really hurt.) BUG=None TEST=Nyan and Snow still boot. Confirm dcache_invalidate_all is not output into any binary anymore since no one actually uses it. Original-Change-Id: I247b29d6173ba516c8dff59126c93b66f7dc4b8d Original-Signed-off-by: Julius Werner <jwerner@chromium.org> Original-Reviewed-on: https://chromium-review.googlesource.com/183891 (cherry picked from commit 4a3f2e45e06cc8592d56c3577f41ff879f10e9cc) Signed-off-by: Marc Jones <marc.jones@se-eng.com> Change-Id: Ieaa4f2ea9d81c5b9e2b36a772ff9610bdf6446f9 Reviewed-on: http://review.coreboot.org/7451 Tested-by: build bot (Jenkins) Reviewed-by: David Hendricks <dhendrix@chromium.org>
Diffstat (limited to 'src/arch/arm')
-rw-r--r--src/arch/arm/armv4/bootblock.S1
-rw-r--r--src/arch/arm/armv7/bootblock.S1
-rw-r--r--src/arch/arm/armv7/exception_asm.S16
-rw-r--r--src/arch/arm/bootblock.ld4
-rw-r--r--src/arch/arm/include/arch/asm.h6
-rw-r--r--src/arch/arm/libgcc/ashldi3.S6
-rw-r--r--src/arch/arm/libgcc/lib1funcs.S12
-rw-r--r--src/arch/arm/libgcc/lshrdi3.S6
-rw-r--r--src/arch/arm/libgcc/muldi3.S6
-rw-r--r--src/arch/arm/memcpy.S2
-rw-r--r--src/arch/arm/memmove.S2
-rw-r--r--src/arch/arm/memset.S3
12 files changed, 33 insertions, 32 deletions
diff --git a/src/arch/arm/armv4/bootblock.S b/src/arch/arm/armv4/bootblock.S
index 58a34cff7e..e4d43029fe 100644
--- a/src/arch/arm/armv4/bootblock.S
+++ b/src/arch/arm/armv4/bootblock.S
@@ -31,7 +31,6 @@
#include <arch/asm.h>
-.section ".start", "a", %progbits
ENTRY(_start)
/*
* Set the cpu to System mode with IRQ and FIQ disabled. Prefetch/Data
diff --git a/src/arch/arm/armv7/bootblock.S b/src/arch/arm/armv7/bootblock.S
index 9ed5d543aa..a15d1672ca 100644
--- a/src/arch/arm/armv7/bootblock.S
+++ b/src/arch/arm/armv7/bootblock.S
@@ -31,7 +31,6 @@
#include <arch/asm.h>
-.section ".start", "a", %progbits
.arm
ENTRY(_start)
/*
diff --git a/src/arch/arm/armv7/exception_asm.S b/src/arch/arm/armv7/exception_asm.S
index dd456363e9..1f369bcc05 100644
--- a/src/arch/arm/armv7/exception_asm.S
+++ b/src/arch/arm/armv7/exception_asm.S
@@ -27,13 +27,7 @@
* SUCH DAMAGE.
*/
- .global exception_stack_end
-exception_stack_end:
- .word 0
-
-exception_handler:
- .word 0
-
+ .text
.align 6
.arm
@@ -88,6 +82,7 @@ exception_common:
ldmfd sp!, { pc }^
+ .align 2
_undefined_instruction: .word exception_undefined_instruction
_software_interrupt: .word exception_software_interrupt
_prefetch_abort: .word exception_prefetch_abort
@@ -96,6 +91,13 @@ _not_used: .word exception_not_used
_irq: .word exception_irq
_fiq: .word exception_fiq
+ .global exception_stack_end
+exception_stack_end:
+ .word 0
+
+exception_handler:
+ .word 0
+
.thumb
.global set_vbar
.thumb_func
diff --git a/src/arch/arm/bootblock.ld b/src/arch/arm/bootblock.ld
index 2b04b22475..8d05c7b5a5 100644
--- a/src/arch/arm/bootblock.ld
+++ b/src/arch/arm/bootblock.ld
@@ -35,7 +35,7 @@ SECTIONS
. = CONFIG_BOOTBLOCK_BASE;
.bootblock . : {
- *(.start);
+ *(.text._start);
KEEP(*(.id));
*(.text);
*(.text.*);
@@ -56,4 +56,4 @@ SECTIONS
*(.note.*)
*(.ARM.*)
}
-} \ No newline at end of file
+}
diff --git a/src/arch/arm/include/arch/asm.h b/src/arch/arm/include/arch/asm.h
index a57ce4dafa..420fa602a6 100644
--- a/src/arch/arm/include/arch/asm.h
+++ b/src/arch/arm/include/arch/asm.h
@@ -43,16 +43,14 @@
END(name)
#define ENTRY(name) \
- .globl name; \
+ .section .text.name, "ax", %progbits; \
+ .global name; \
ALIGN; \
name:
#define END(name) \
.size name, .-name
-/* Everything should go into the text section by default. */
- .text
-
/* Thumb code uses the (new) unified assembly syntax. */
THUMB( .syntax unified )
diff --git a/src/arch/arm/libgcc/ashldi3.S b/src/arch/arm/libgcc/ashldi3.S
index bf7e7e75c4..3009f18b83 100644
--- a/src/arch/arm/libgcc/ashldi3.S
+++ b/src/arch/arm/libgcc/ashldi3.S
@@ -39,7 +39,8 @@ Boston, MA 02110-1301, USA. */
#endif
ENTRY(__ashldi3)
-ENTRY(__aeabi_llsl)
+.global __aeabi_llsl
+__aeabi_llsl:
subs r3, r2, #32
rsb ip, r2, #32
@@ -51,7 +52,8 @@ ENTRY(__aeabi_llsl)
mov al, al, lsl r2
mov pc, lr
+.type __aeabi_llsl, %function
+.size __aeabi_llsl, .-__aeabi_llsl
ENDPROC(__ashldi3)
-ENDPROC(__aeabi_llsl)
#endif
diff --git a/src/arch/arm/libgcc/lib1funcs.S b/src/arch/arm/libgcc/lib1funcs.S
index 533c369d6e..f78a937afc 100644
--- a/src/arch/arm/libgcc/lib1funcs.S
+++ b/src/arch/arm/libgcc/lib1funcs.S
@@ -206,7 +206,8 @@ Boston, MA 02111-1307, USA. */
ENTRY(__udivsi3)
-ENTRY(__aeabi_uidiv)
+.global __aeabi_uidiv
+__aeabi_uidiv:
subs r2, r1, #1
moveq pc, lr
@@ -230,8 +231,9 @@ ENTRY(__aeabi_uidiv)
mov r0, r0, lsr r2
mov pc, lr
+.type __aeabi_uidiv, %function
+.size __aeabi_uidiv, .-__aeabi_uidiv
ENDPROC(__udivsi3)
-ENDPROC(__aeabi_uidiv)
ENTRY(__umodsi3)
@@ -250,7 +252,8 @@ ENTRY(__umodsi3)
ENDPROC(__umodsi3)
ENTRY(__divsi3)
-ENTRY(__aeabi_idiv)
+.global __aeabi_idiv
+__aeabi_idiv:
cmp r1, #0
eor ip, r0, r1 @ save the sign of the result.
@@ -287,8 +290,9 @@ ENTRY(__aeabi_idiv)
rsbmi r0, r0, #0
mov pc, lr
+.type __aeabi_idiv, %function
+.size __aeabi_idiv, .-__aeabi_idiv
ENDPROC(__divsi3)
-ENDPROC(__aeabi_idiv)
ENTRY(__modsi3)
diff --git a/src/arch/arm/libgcc/lshrdi3.S b/src/arch/arm/libgcc/lshrdi3.S
index 5a5809a17e..7af0c83123 100644
--- a/src/arch/arm/libgcc/lshrdi3.S
+++ b/src/arch/arm/libgcc/lshrdi3.S
@@ -39,7 +39,8 @@ Boston, MA 02110-1301, USA. */
#endif
ENTRY(__lshrdi3)
-ENTRY(__aeabi_llsr)
+.global __aeabi_llsr
+__aeabi_llsr:
subs r3, r2, #32
rsb ip, r2, #32
@@ -51,7 +52,8 @@ ENTRY(__aeabi_llsr)
mov ah, ah, lsr r2
mov pc, lr
+.type __aeabi_llsr, %function
+.size __aeabi_llsr, .-__aeabi_llsr
ENDPROC(__lshrdi3)
-ENDPROC(__aeabi_llsr)
#endif
diff --git a/src/arch/arm/libgcc/muldi3.S b/src/arch/arm/libgcc/muldi3.S
index f6a40a8dc2..652dacab26 100644
--- a/src/arch/arm/libgcc/muldi3.S
+++ b/src/arch/arm/libgcc/muldi3.S
@@ -27,7 +27,8 @@
#endif
ENTRY(__muldi3)
-ENTRY(__aeabi_lmul)
+.global __aeabi_lmul
+__aeabi_lmul:
mul xh, yl, xh
mla xh, xl, yh, xh
@@ -45,7 +46,8 @@ ENTRY(__aeabi_lmul)
adc xh, xh, ip, lsr #16
mov pc, lr
+.type __aeabi_lmul, %function
+.size __aeabi_lmul, .-__aeabi_lmul
ENDPROC(__muldi3)
-ENDPROC(__aeabi_lmul)
#endif
diff --git a/src/arch/arm/memcpy.S b/src/arch/arm/memcpy.S
index b8f857bb56..1388d05ad9 100644
--- a/src/arch/arm/memcpy.S
+++ b/src/arch/arm/memcpy.S
@@ -52,8 +52,6 @@
ldmfd sp!, {r0, \reg1, \reg2}
.endm
- .text
-
/* Prototype: void *memcpy(void *dest, const void *src, size_t n); */
ENTRY(memcpy)
diff --git a/src/arch/arm/memmove.S b/src/arch/arm/memmove.S
index dc29f7458c..bd5f8f1ac5 100644
--- a/src/arch/arm/memmove.S
+++ b/src/arch/arm/memmove.S
@@ -13,8 +13,6 @@
#include <arch/asm.h>
#include "asmlib.h"
- .text
-
/*
* Prototype: void *memmove(void *dest, const void *src, size_t n);
*
diff --git a/src/arch/arm/memset.S b/src/arch/arm/memset.S
index 945767c599..0c1102deb4 100644
--- a/src/arch/arm/memset.S
+++ b/src/arch/arm/memset.S
@@ -13,9 +13,6 @@
#include <arch/asm.h>
#include "asmlib.h"
- .text
- .align 5
-
ENTRY(memset)
ands r3, r0, #3 @ 1 unaligned?
mov ip, r0 @ preserve r0 as return value