diff options
author | Aaron Durbin <adurbin@chromium.org> | 2018-09-13 02:10:45 -0600 |
---|---|---|
committer | Aaron Durbin <adurbin@chromium.org> | 2018-09-14 08:16:37 +0000 |
commit | 75a62e76486f63f6dadb5492c205570ace81e9d5 (patch) | |
tree | c3338d2ddd7b2f9f51f35432a24087fc289999fb /src/include/cpu/x86/msr.h | |
parent | cf9ea55473cde8b9a2b9494eca452df7783376e5 (diff) | |
download | coreboot-75a62e76486f63f6dadb5492c205570ace81e9d5.tar.xz |
complier.h: add __always_inline and use it in code base
Add a __always_inline macro that wraps __attribute__((always_inline))
and replace current users with the macro, excluding files under
src/vendorcode.
Change-Id: Ic57e474c1d2ca7cc0405ac677869f78a28d3e529
Signed-off-by: Aaron Durbin <adurbin@chromium.org>
Reviewed-on: https://review.coreboot.org/28587
Tested-by: build bot (Jenkins) <no-reply@coreboot.org>
Reviewed-by: Julius Werner <jwerner@google.com>
Diffstat (limited to 'src/include/cpu/x86/msr.h')
-rw-r--r-- | src/include/cpu/x86/msr.h | 16 |
1 files changed, 8 insertions, 8 deletions
diff --git a/src/include/cpu/x86/msr.h b/src/include/cpu/x86/msr.h index 74c2521af5..290c54a499 100644 --- a/src/include/cpu/x86/msr.h +++ b/src/include/cpu/x86/msr.h @@ -1,6 +1,8 @@ #ifndef CPU_X86_MSR_H #define CPU_X86_MSR_H +#include <compiler.h> + /* Intel SDM: Table 2-1 * IA-32 architectural MSR: Extended Feature Enable Register */ @@ -50,19 +52,18 @@ msr_t soc_msr_read(unsigned int index); void soc_msr_write(unsigned int index, msr_t msr); /* Handle MSR references in the other source code */ -static inline __attribute__((always_inline)) msr_t rdmsr(unsigned int index) +static __always_inline msr_t rdmsr(unsigned int index) { return soc_msr_read(index); } -static inline __attribute__((always_inline)) void wrmsr(unsigned int index, - msr_t msr) +static __always_inline void wrmsr(unsigned int index, msr_t msr) { soc_msr_write(index, msr); } #else /* CONFIG_SOC_SETS_MSRS */ -/* The following functions require the always_inline due to AMD +/* The following functions require the __always_inline due to AMD * function STOP_CAR_AND_CPU that disables cache as * RAM, the cache as RAM stack can no longer be used. Called * functions must be inlined to avoid stack usage. Also, the @@ -70,9 +71,9 @@ static inline __attribute__((always_inline)) void wrmsr(unsigned int index, * allocated them from the stack. With gcc 4.5.0, some functions * declared as inline are not being inlined. This patch forces * these functions to always be inlined by adding the qualifier - * __attribute__((always_inline)) to their declaration. + * __always_inline to their declaration. */ -static inline __attribute__((always_inline)) msr_t rdmsr(unsigned int index) +static __always_inline msr_t rdmsr(unsigned int index) { msr_t result; __asm__ __volatile__ ( @@ -83,8 +84,7 @@ static inline __attribute__((always_inline)) msr_t rdmsr(unsigned int index) return result; } -static inline __attribute__((always_inline)) void wrmsr(unsigned int index, - msr_t msr) +static __always_inline void wrmsr(unsigned int index, msr_t msr) { __asm__ __volatile__ ( "wrmsr" |