diff options
author | Alexandru Gagniuc <mr.nuke.me@gmail.com> | 2015-09-30 20:23:09 -0700 |
---|---|---|
committer | Alexandru Gagniuc <mr.nuke.me@gmail.com> | 2015-10-15 03:52:49 +0000 |
commit | 86091f94b6ca58f4b8795503b274492d6a935c15 (patch) | |
tree | db6e5f77dc57850b25574aed5063743ca4bc4d48 /src/cpu/amd | |
parent | 58562405c8c416a415652516b8af31b204b4ff0d (diff) | |
download | coreboot-86091f94b6ca58f4b8795503b274492d6a935c15.tar.xz |
cpu/mtrr.h: Fix macro names for MTRR registers
We use UNDERSCORE_CASE. For the MTRR macros that refer to an MSR,
we also remove the _MSR suffix, as they are, by definition, MSRs.
Change-Id: Id4483a75d62cf1b478a9105ee98a8f55140ce0ef
Signed-off-by: Alexandru Gagniuc <mr.nuke.me@gmail.com>
Reviewed-on: http://review.coreboot.org/11761
Reviewed-by: Aaron Durbin <adurbin@chromium.org>
Tested-by: build bot (Jenkins)
Diffstat (limited to 'src/cpu/amd')
-rw-r--r-- | src/cpu/amd/agesa/s3_resume.c | 8 | ||||
-rw-r--r-- | src/cpu/amd/car/cache_as_ram.inc | 76 | ||||
-rw-r--r-- | src/cpu/amd/car/disable_cache_as_ram.c | 10 | ||||
-rw-r--r-- | src/cpu/amd/model_fxx/model_fxx_init.c | 22 | ||||
-rw-r--r-- | src/cpu/amd/pi/s3_resume.c | 8 | ||||
-rw-r--r-- | src/cpu/amd/smm/smm_init.c | 6 |
6 files changed, 65 insertions, 65 deletions
diff --git a/src/cpu/amd/agesa/s3_resume.c b/src/cpu/amd/agesa/s3_resume.c index 98671f47d5..17364de5e0 100644 --- a/src/cpu/amd/agesa/s3_resume.c +++ b/src/cpu/amd/agesa/s3_resume.c @@ -81,15 +81,15 @@ static void set_resume_cache(void) /* Enable caching for 0 - coreboot ram using variable mtrr */ msr.lo = 0 | MTRR_TYPE_WRBACK; msr.hi = 0; - wrmsr(MTRRphysBase_MSR(0), msr); - msr.lo = ~(CONFIG_RAMTOP - 1) | MTRRphysMaskValid; + wrmsr(MTRR_PHYS_BASE(0), msr); + msr.lo = ~(CONFIG_RAMTOP - 1) | MTRR_PHYS_MASK_VALID; msr.hi = (1 << (CONFIG_CPU_ADDR_BITS - 32)) - 1; - wrmsr(MTRRphysMask_MSR(0), msr); + wrmsr(MTRR_PHYS_MASK(0), msr); /* Set the default memory type and disable fixed and enable variable MTRRs */ msr.hi = 0; msr.lo = (1 << 11); - wrmsr(MTRRdefType_MSR, msr); + wrmsr(MTRR_DEF_TYPE_MSR, msr); enable_cache(); } diff --git a/src/cpu/amd/car/cache_as_ram.inc b/src/cpu/amd/car/cache_as_ram.inc index 133daace3d..0b2bc60bea 100644 --- a/src/cpu/amd/car/cache_as_ram.inc +++ b/src/cpu/amd/car/cache_as_ram.inc @@ -76,9 +76,9 @@ cache_as_ram_setup: cvtsd2si %xmm3, %ebx /* Check if cpu_init_detected. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr - andl $MTRRdefTypeEn, %eax + andl $MTRR_DEF_TYPE_EN, %eax movl %eax, %ebx /* We store the status. */ jmp_if_k8(CAR_FAM10_out_post_errata) @@ -270,27 +270,27 @@ clear_fixed_var_mtrr_out: #if CacheSize > 0x8000 /* Enable caching for 32K-64K using fixed MTRR. */ - movl $MTRRfix4K_C0000_MSR, %ecx + movl $MTRR_FIX_4K_C0000, %ecx simplemask CacheSize, 0x8000 wrmsr #endif #if CacheSize > 0x10000 /* Enable caching for 64K-96K using fixed MTRR. */ - movl $MTRRfix4K_D0000_MSR, %ecx + movl $MTRR_FIX_4K_D0000, %ecx simplemask CacheSize, 0x10000 wrmsr #endif #if CacheSize > 0x18000 /* Enable caching for 96K-128K using fixed MTRR. */ - movl $MTRRfix4K_D8000_MSR, %ecx + movl $MTRR_FIX_4K_D8000, %ecx simplemask CacheSize, 0x18000 wrmsr #endif /* Enable caching for 0-32K using fixed MTRR. */ - movl $MTRRfix4K_C8000_MSR, %ecx + movl $MTRR_FIX_4K_C8000, %ecx simplemask CacheSize, 0 wrmsr @@ -305,7 +305,7 @@ clear_fixed_var_mtrr_out: /* Enable write base caching so we can do execute in place (XIP) * on the flash ROM. */ - movl $MTRRphysBase_MSR(1), %ecx + movl $MTRR_PHYS_BASE(1), %ecx xorl %edx, %edx /* * IMPORTANT: The following calculation _must_ be done at runtime. See @@ -316,19 +316,19 @@ clear_fixed_var_mtrr_out: orl $MTRR_TYPE_WRBACK, %eax wrmsr - movl $MTRRphysMask_MSR(1), %ecx + movl $MTRR_PHYS_MASK(1), %ecx movl $0xff, %edx /* (1 << (CONFIG_CPU_ADDR_BITS - 32)) - 1 for K8 (CONFIG_CPU_ADDR_BITS = 40) */ jmp_if_k8(wbcache_post_fam10_setup) movl $0xffff, %edx /* (1 << (CONFIG_CPU_ADDR_BITS - 32)) - 1 for FAM10 (CONFIG_CPU_ADDR_BITS = 48) */ wbcache_post_fam10_setup: - movl $(~(CONFIG_XIP_ROM_SIZE - 1) | MTRRphysMaskValid), %eax + movl $(~(CONFIG_XIP_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax wrmsr #endif /* CONFIG_XIP_ROM_SIZE */ /* Set the default memory type and enable fixed and variable MTRRs. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx xorl %edx, %edx - movl $(MTRRdefTypeEn | MTRRdefTypeFixEn), %eax + movl $(MTRR_DEF_TYPE_EN | MTRR_DEF_TYPE_FIX_EN), %eax wrmsr /* Enable the MTRRs and IORRs in SYSCFG. */ @@ -462,35 +462,35 @@ cache_as_ram_switch_stack: all_mtrr_msrs: /* fixed MTRR MSRs */ - .long MTRRfix64K_00000_MSR - .long MTRRfix16K_80000_MSR - .long MTRRfix16K_A0000_MSR - .long MTRRfix4K_C0000_MSR - .long MTRRfix4K_C8000_MSR - .long MTRRfix4K_D0000_MSR - .long MTRRfix4K_D8000_MSR - .long MTRRfix4K_E0000_MSR - .long MTRRfix4K_E8000_MSR - .long MTRRfix4K_F0000_MSR - .long MTRRfix4K_F8000_MSR + .long MTRR_FIX_64K_00000 + .long MTRR_FIX_16K_80000 + .long MTRR_FIX_16K_A0000 + .long MTRR_FIX_4K_C0000 + .long MTRR_FIX_4K_C8000 + .long MTRR_FIX_4K_D0000 + .long MTRR_FIX_4K_D8000 + .long MTRR_FIX_4K_E0000 + .long MTRR_FIX_4K_E8000 + .long MTRR_FIX_4K_F0000 + .long MTRR_FIX_4K_F8000 /* var MTRR MSRs */ - .long MTRRphysBase_MSR(0) - .long MTRRphysMask_MSR(0) - .long MTRRphysBase_MSR(1) - .long MTRRphysMask_MSR(1) - .long MTRRphysBase_MSR(2) - .long MTRRphysMask_MSR(2) - .long MTRRphysBase_MSR(3) - .long MTRRphysMask_MSR(3) - .long MTRRphysBase_MSR(4) - .long MTRRphysMask_MSR(4) - .long MTRRphysBase_MSR(5) - .long MTRRphysMask_MSR(5) - .long MTRRphysBase_MSR(6) - .long MTRRphysMask_MSR(6) - .long MTRRphysBase_MSR(7) - .long MTRRphysMask_MSR(7) + .long MTRR_PHYS_BASE(0) + .long MTRR_PHYS_MASK(0) + .long MTRR_PHYS_BASE(1) + .long MTRR_PHYS_MASK(1) + .long MTRR_PHYS_BASE(2) + .long MTRR_PHYS_MASK(2) + .long MTRR_PHYS_BASE(3) + .long MTRR_PHYS_MASK(3) + .long MTRR_PHYS_BASE(4) + .long MTRR_PHYS_MASK(4) + .long MTRR_PHYS_BASE(5) + .long MTRR_PHYS_MASK(5) + .long MTRR_PHYS_BASE(6) + .long MTRR_PHYS_MASK(6) + .long MTRR_PHYS_BASE(7) + .long MTRR_PHYS_MASK(7) /* Variable IORR MTRR MSRs */ .long IORRBase_MSR(0) diff --git a/src/cpu/amd/car/disable_cache_as_ram.c b/src/cpu/amd/car/disable_cache_as_ram.c index d3a381210e..3b464b8776 100644 --- a/src/cpu/amd/car/disable_cache_as_ram.c +++ b/src/cpu/amd/car/disable_cache_as_ram.c @@ -33,15 +33,15 @@ static inline __attribute__((always_inline)) void disable_cache_as_ram(void) msr.lo = 0; msr.hi = 0; - wrmsr(MTRRfix4K_C8000_MSR, msr); + wrmsr(MTRR_FIX_4K_C8000, msr); #if CONFIG_DCACHE_RAM_SIZE > 0x8000 - wrmsr(MTRRfix4K_C0000_MSR, msr); + wrmsr(MTRR_FIX_4K_C0000, msr); #endif #if CONFIG_DCACHE_RAM_SIZE > 0x10000 - wrmsr(MTRRfix4K_D0000_MSR, msr); + wrmsr(MTRR_FIX_4K_D0000, msr); #endif #if CONFIG_DCACHE_RAM_SIZE > 0x18000 - wrmsr(MTRRfix4K_D8000_MSR, msr); + wrmsr(MTRR_FIX_4K_D8000, msr); #endif /* disable fixed mtrr from now on, it will be enabled by ramstage again*/ @@ -53,7 +53,7 @@ static inline __attribute__((always_inline)) void disable_cache_as_ram(void) msr.hi = 0; msr.lo = (1 << 11); - wrmsr(MTRRdefType_MSR, msr); + wrmsr(MTRR_DEF_TYPE_MSR, msr); enable_cache(); } diff --git a/src/cpu/amd/model_fxx/model_fxx_init.c b/src/cpu/amd/model_fxx/model_fxx_init.c index a6561ee560..268f1b2dad 100644 --- a/src/cpu/amd/model_fxx/model_fxx_init.c +++ b/src/cpu/amd/model_fxx/model_fxx_init.c @@ -105,12 +105,12 @@ static void save_mtrr_state(struct mtrr_state *state) { int i; for (i = 0; i < MTRR_COUNT; i++) { - state->mtrrs[i].base = rdmsr(MTRRphysBase_MSR(i)); - state->mtrrs[i].mask = rdmsr(MTRRphysMask_MSR(i)); + state->mtrrs[i].base = rdmsr(MTRR_PHYS_BASE(i)); + state->mtrrs[i].mask = rdmsr(MTRR_PHYS_MASK(i)); } state->top_mem = rdmsr(TOP_MEM); state->top_mem2 = rdmsr(TOP_MEM2); - state->def_type = rdmsr(MTRRdefType_MSR); + state->def_type = rdmsr(MTRR_DEF_TYPE_MSR); } static void restore_mtrr_state(struct mtrr_state *state) @@ -119,12 +119,12 @@ static void restore_mtrr_state(struct mtrr_state *state) disable_cache(); for (i = 0; i < MTRR_COUNT; i++) { - wrmsr(MTRRphysBase_MSR(i), state->mtrrs[i].base); - wrmsr(MTRRphysMask_MSR(i), state->mtrrs[i].mask); + wrmsr(MTRR_PHYS_BASE(i), state->mtrrs[i].base); + wrmsr(MTRR_PHYS_MASK(i), state->mtrrs[i].mask); } wrmsr(TOP_MEM, state->top_mem); wrmsr(TOP_MEM2, state->top_mem2); - wrmsr(MTRRdefType_MSR, state->def_type); + wrmsr(MTRR_DEF_TYPE_MSR, state->def_type); enable_cache(); } @@ -158,22 +158,22 @@ static void set_init_ecc_mtrrs(void) for (i = 0; i < MTRR_COUNT; i++) { msr_t zero; zero.lo = zero.hi = 0; - wrmsr(MTRRphysBase_MSR(i), zero); - wrmsr(MTRRphysMask_MSR(i), zero); + wrmsr(MTRR_PHYS_BASE(i), zero); + wrmsr(MTRR_PHYS_MASK(i), zero); } /* Write back cache the first 1MB */ msr.hi = 0x00000000; msr.lo = 0x00000000 | MTRR_TYPE_WRBACK; - wrmsr(MTRRphysBase_MSR(0), msr); + wrmsr(MTRR_PHYS_BASE(0), msr); msr.hi = 0x000000ff; msr.lo = ~((CONFIG_RAMTOP) - 1) | 0x800; - wrmsr(MTRRphysMask_MSR(0), msr); + wrmsr(MTRR_PHYS_MASK(0), msr); /* Set the default type to write combining */ msr.hi = 0x00000000; msr.lo = 0xc00 | MTRR_TYPE_WRCOMB; - wrmsr(MTRRdefType_MSR, msr); + wrmsr(MTRR_DEF_TYPE_MSR, msr); /* Set TOP_MEM to 4G */ msr.hi = 0x00000001; diff --git a/src/cpu/amd/pi/s3_resume.c b/src/cpu/amd/pi/s3_resume.c index 943fd976e7..88b5713b3f 100644 --- a/src/cpu/amd/pi/s3_resume.c +++ b/src/cpu/amd/pi/s3_resume.c @@ -271,15 +271,15 @@ static void set_resume_cache(void) /* Enable caching for 0 - coreboot ram using variable mtrr */ msr.lo = 0 | MTRR_TYPE_WRBACK; msr.hi = 0; - wrmsr(MTRRphysBase_MSR(0), msr); - msr.lo = ~(CONFIG_RAMTOP - 1) | MTRRphysMaskValid; + wrmsr(MTRR_PHYS_BASE(0), msr); + msr.lo = ~(CONFIG_RAMTOP - 1) | MTRR_PHYS_MASK_VALID; msr.hi = (1 << (CONFIG_CPU_ADDR_BITS - 32)) - 1; - wrmsr(MTRRphysMask_MSR(0), msr); + wrmsr(MTRR_PHYS_MASK(0), msr); /* Set the default memory type and disable fixed and enable variable MTRRs */ msr.hi = 0; msr.lo = (1 << 11); - wrmsr(MTRRdefType_MSR, msr); + wrmsr(MTRR_DEF_TYPE_MSR, msr); enable_cache(); } diff --git a/src/cpu/amd/smm/smm_init.c b/src/cpu/amd/smm/smm_init.c index 2e9a4c9ba3..e13f24f03a 100644 --- a/src/cpu/amd/smm/smm_init.c +++ b/src/cpu/amd/smm/smm_init.c @@ -39,7 +39,7 @@ void smm_init(void) /* Back up MSRs for later restore */ syscfg_orig = rdmsr(SYSCFG_MSR); - mtrr_aseg_orig = rdmsr(MTRRfix16K_A0000_MSR); + mtrr_aseg_orig = rdmsr(MTRR_FIX_16K_A0000); /* MTRR changes don't like an enabled cache */ disable_cache(); @@ -57,7 +57,7 @@ void smm_init(void) /* set DRAM access to 0xa0000 */ msr.lo = 0x18181818; msr.hi = 0x18181818; - wrmsr(MTRRfix16K_A0000_MSR, msr); + wrmsr(MTRR_FIX_16K_A0000, msr); /* enable the extended features */ msr = syscfg_orig; @@ -73,7 +73,7 @@ void smm_init(void) /* Restore SYSCFG and MTRR */ wrmsr(SYSCFG_MSR, syscfg_orig); - wrmsr(MTRRfix16K_A0000_MSR, mtrr_aseg_orig); + wrmsr(MTRR_FIX_16K_A0000, mtrr_aseg_orig); enable_cache(); /* CPU MSR are set in CPU init */ |