blob: e6246c39da32d6a65123fb7eee5c306037d6ddcd (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
|
/* SPDX-License-Identifier: GPL-2.0-only */
#ifndef __ARM_ARM64_ASM_H
#define __ARM_ARM64_ASM_H
#define ENDPROC(name) \
.type name, %function; \
END(name)
#define ENTRY_WITH_ALIGN(name, bits) \
.section .text.name, "ax", %progbits; \
.global name; \
.align bits; \
name:
#define ENTRY(name) ENTRY_WITH_ALIGN(name, 2)
#define END(name) \
.size name, .-name
/*
* Certain SoCs have an alignment requiremnt for the CPU reset vector.
* Align to a 64 byte typical cacheline for now.
*/
#define CPU_RESET_ENTRY(name) ENTRY_WITH_ALIGN(name, 6)
#define ENTRY_WEAK(name) \
ENTRY(name) \
.weak name \
#endif /* __ARM_ARM64_ASM_H */
|