summaryrefslogtreecommitdiff
path: root/src/soc/broadcom
diff options
context:
space:
mode:
authorElyes HAOUAS <ehaouas@noos.fr>2016-08-25 21:07:59 +0200
committerMartin Roth <martinroth@google.com>2016-08-31 20:09:42 +0200
commit4a83f1cf24b793db40606febb8e27cee90452590 (patch)
tree74943b190d2aa7c97da72d59f816157d4bc947e9 /src/soc/broadcom
parent3c80408fc8aa7b4099493acd7420f8d62ce65a48 (diff)
downloadcoreboot-4a83f1cf24b793db40606febb8e27cee90452590.tar.xz
src/soc: Add required space before opening parenthesis '('
Change-Id: Ifc47f103492a2cd6c818dfd64be971d34afbe0a4 Signed-off-by: Elyes HAOUAS <ehaouas@noos.fr> Reviewed-on: https://review.coreboot.org/16324 Tested-by: build bot (Jenkins) Reviewed-by: Martin Roth <martinroth@google.com>
Diffstat (limited to 'src/soc/broadcom')
-rw-r--r--src/soc/broadcom/cygnus/ddr_init.c112
-rw-r--r--src/soc/broadcom/cygnus/shmoo_and28.c530
-rw-r--r--src/soc/broadcom/cygnus/ydc_ddr_bist.c12
3 files changed, 327 insertions, 327 deletions
diff --git a/src/soc/broadcom/cygnus/ddr_init.c b/src/soc/broadcom/cygnus/ddr_init.c
index 7fa2a568b1..c31635c955 100644
--- a/src/soc/broadcom/cygnus/ddr_init.c
+++ b/src/soc/broadcom/cygnus/ddr_init.c
@@ -315,7 +315,7 @@ int cygnus_phy_powerup(void)
data = reg32_read((volatile uint32_t *)CRMU_DDR_PHY_AON_CTRL);
- if(reg32_read((volatile uint32_t *)CRMU_IHOST_POR_WAKEUP_FLAG)==0)
+ if (reg32_read((volatile uint32_t *)CRMU_IHOST_POR_WAKEUP_FLAG)==0)
{
/* Step 1: POWRON */
data = reg32_read((volatile uint32_t *)CRMU_DDR_PHY_AON_CTRL);
@@ -328,7 +328,7 @@ int cygnus_phy_powerup(void)
data |= 0x10;// assert power OK
reg32_write((volatile uint32_t *)CRMU_DDR_PHY_AON_CTRL, data);
- while(count--)
+ while (count--)
__udelay(2);
}
@@ -350,10 +350,10 @@ int cygnus_phy_powerup(void)
reg32_write((volatile uint32_t *)CRMU_DDR_PHY_AON_CTRL, data);
count = 20;
- while(count--)
+ while (count--)
__udelay(2);
- if(reg32_read((volatile uint32_t *)CRMU_IHOST_POR_WAKEUP_FLAG)==0)
+ if (reg32_read((volatile uint32_t *)CRMU_IHOST_POR_WAKEUP_FLAG)==0)
{
/* Step 5: release reset */
data |= 0x20;// de-assert reset
@@ -363,7 +363,7 @@ int cygnus_phy_powerup(void)
{
printk(BIOS_INFO, "DeepSleep wakeup: ddr phy init bypassed 2\n");
}
- while((reg32_read((volatile uint32_t *)DDR_S1_IDM_IO_STATUS) & 0x08) != 0x08) {
+ while ((reg32_read((volatile uint32_t *)DDR_S1_IDM_IO_STATUS) & 0x08) != 0x08) {
//poll DDR_S1_IDM_IO_STATUS__o_phy_pwrup_rsb
}
@@ -381,14 +381,14 @@ void dump_phy_regs(void)
{
int i;
printk(BIOS_DEBUG, "\n PHY register dump: Control registers\n");
- for(i = 0; i <= 0x94; i+=4)
+ for (i = 0; i <= 0x94; i+=4)
{
printk(BIOS_DEBUG, "0x%03x,\t0x%08x,\n", i,
*(volatile uint32_t *)(DDR_PHY_CONTROL_REGS_REVISION + i));
}
printk(BIOS_DEBUG, "\n PHY register dump: Wordlane0 registers\n");
- for(i = 0; i <= 0xc5; i+=4)
+ for (i = 0; i <= 0xc5; i+=4)
{
printk(BIOS_DEBUG, "0x%03x,\t0x%08x,\n", i,
*(volatile uint32_t *)(DDR_PHY_BYTE_LANE_0_VDL_CONTROL_WR_DQS_P + i));
@@ -402,7 +402,7 @@ void ddr_init_regs(unsigned int * tblptr)
unsigned int offset = *tblptr;
unsigned int *addr = (unsigned int *)DDR_DENALI_CTL_00;
- while(offset != 0xffffffff) {
+ while (offset != 0xffffffff) {
++tblptr;
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
addr[offset] = *tblptr;
@@ -420,7 +420,7 @@ void ddr_phy_ctl_regs_ovrd(unsigned int * tblptr)
unsigned int *addr = (unsigned int *)DDR_PHY_CONTROL_REGS_REVISION;
unsigned int val;
- while(offset != 0xffffffff) {
+ while (offset != 0xffffffff) {
++tblptr;
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
addr[offset/4] = *tblptr;
@@ -459,7 +459,7 @@ int ReWriteModeRegisters( void )
break;
}
--j;
- } while( j );
+ } while ( j );
if ( j == 0 && (reg32_read( (volatile uint32_t *)DDR_DENALI_CTL_89) & (1 << 18) ) == 0 ) {
printk(BIOS_ERR, "Error: DRAM mode registers write failed\n");
@@ -512,7 +512,7 @@ static int simple_memory_test(void *start, uint32_t len)
paddr = (volatile uint32_t *)start;
rand_c_value = RAND_C_INIT;
rand_t_value = RAND_T_INIT;
- for(i=0; i<len; i++, paddr++) {
+ for (i=0; i<len; i++, paddr++) {
rand_c_value *= RAND_MAGIC_3;
rand_t_value ^= rand_t_value >> 15;
rand_t_value ^= rand_t_value << 17;
@@ -523,7 +523,7 @@ static int simple_memory_test(void *start, uint32_t len)
paddr = (volatile uint32_t *)start;
rand_c_value = RAND_C_INIT;
rand_t_value = RAND_T_INIT;
- for(i=0; i<len; i++, paddr++) {
+ for (i=0; i<len; i++, paddr++) {
rand_c_value *= RAND_MAGIC_3;
rand_t_value ^= rand_t_value >> 15;
rand_t_value ^= rand_t_value << 17;
@@ -726,7 +726,7 @@ static int write_shmoo_to_flash(void *buf, int length)
/* Check if erasing is required */
flptr = (volatile uint32_t *)(IPROC_QSPI_MEM_BASE + offset / 4 * 4);
j = (length - 1) / 4 + 1;
- for(i=0; i<j; i++, flptr++) {
+ for (i=0; i<j; i++, flptr++) {
if (*flptr != 0xFFFFFFFF) {
erase = 1;
break;
@@ -785,7 +785,7 @@ static int write_shmoo_to_flash(void *buf, int length)
flptr = (volatile uint32_t *)(IPROC_NOR_MEM_BASE + offset / 4 * 4);
shmoo_start = flptr;
j = (length - 1) / 4 + 1;
- for(i=0; i<j; i++, flptr++) {
+ for (i=0; i<j; i++, flptr++) {
if (*flptr != 0xFFFFFFFF) {
erase = 1;
break;
@@ -902,7 +902,7 @@ static volatile uint32_t *validate_flash_shmoo_values(struct shmoo_signature *ps
return NULL;
}
chksum = 0;
- for(i=0; i<length * 2; i++, ptr++) {
+ for (i=0; i<length * 2; i++, ptr++) {
chksum += *ptr;
}
if (chksum != checksum) {
@@ -937,13 +937,13 @@ static int try_restore_shmoo(void)
unsigned long start;
printk(BIOS_INFO, "Press Ctrl-C to run Shmoo ..... ");
start = get_timer(0);
- while(get_timer(start) <= CONFIG_SHMOO_REUSE_DELAY_MSECS) {
+ while (get_timer(start) <= CONFIG_SHMOO_REUSE_DELAY_MSECS) {
if (tstc()) {
c = getc();
if (c == 0x03) {
printk(BIOS_INFO, "Pressed.\n");
printk(BIOS_INFO, "Do you want to run the Shmoo? [y/N] ");
- for(;;) {
+ for (;;) {
c = getc();
if (c == 'y' || c == 'Y') {
printk(BIOS_INFO, "Y\n");
@@ -972,10 +972,10 @@ static int try_restore_shmoo(void)
/* Restore values from flash */
printk(BIOS_INFO, "Restoring Shmoo parameters from flash ..... ");
flptr += 5;
- for(i=0; i<pairs; i++) {
+ for (i=0; i<pairs; i++) {
reg = (uint32_t *)(*flptr++);
val = (uint32_t *)(*flptr++);
- if( (((uint32_t)reg >= DDR_PHY_WORD_LANE_0_VDL_OVRIDE_BYTE_RD_EN) && ((uint32_t)reg <= (DDR_PHY_WORD_LANE_0_VDL_OVRIDE_BYTE_RD_EN + 0x114)))
+ if ( (((uint32_t)reg >= DDR_PHY_WORD_LANE_0_VDL_OVRIDE_BYTE_RD_EN) && ((uint32_t)reg <= (DDR_PHY_WORD_LANE_0_VDL_OVRIDE_BYTE_RD_EN + 0x114)))
#if (CONFIG_CYGNUS_SHMOO_REUSE_DDR_32BIT || defined(CONFIG_NS_PLUS))
|| (((uint32_t)reg >= DDR_PHY_WORD_LANE_1_VDL_OVRIDE_BYTE_RD_EN) && ((uint32_t)reg <= (DDR_PHY_WORD_LANE_1_VDL_OVRIDE_BYTE_RD_EN + 0x114)))
#endif
@@ -1026,7 +1026,7 @@ void iproc_save_shmoo_values(void)
if (flptr != NULL) {
/* Check if the flash data are the same as current DDR PHY values */
flptr += 5;
- for(i=0; i<pairs; i++) {
+ for (i=0; i<pairs; i++) {
reg = *flptr++;
val = *flptr++;
if (val != reg32_read(reg)) {
@@ -1060,7 +1060,7 @@ void iproc_save_shmoo_values(void)
/* Copy registers and values to buffer */
chksum = 0;
- for(i=0; i<sizeof(ddr_phy_ctl_regs) / sizeof(ddr_phy_ctl_regs[0]); i++) {
+ for (i=0; i<sizeof(ddr_phy_ctl_regs) / sizeof(ddr_phy_ctl_regs[0]); i++) {
reg = (uint32_t)DDR_PHY_CONTROL_REGS_REVISION + ddr_phy_ctl_regs[i];
*ptr++ = reg;
chksum += reg;
@@ -1069,7 +1069,7 @@ void iproc_save_shmoo_values(void)
*ptr++ = val;
chksum += val;
}
- for(i=0; i<sizeof(ddr_phy_wl_regs) / sizeof(ddr_phy_wl_regs[0]); i++) {
+ for (i=0; i<sizeof(ddr_phy_wl_regs) / sizeof(ddr_phy_wl_regs[0]); i++) {
reg = (uint32_t)DDR_PHY_WORD_LANE_0_VDL_OVRIDE_BYTE_RD_EN + ddr_phy_wl_regs[i];
*ptr++ = reg;
chksum += reg;
@@ -1080,7 +1080,7 @@ void iproc_save_shmoo_values(void)
}
#if (CONFIG_CYGNUS_SHMOO_REUSE_DDR_32BIT || defined(CONFIG_NS_PLUS))
if (is_ddr_32bit()) {
- for(i=0; i<sizeof(ddr_phy_wl_regs) / sizeof(ddr_phy_wl_regs[0]); i++) {
+ for (i=0; i<sizeof(ddr_phy_wl_regs) / sizeof(ddr_phy_wl_regs[0]); i++) {
reg = (uint32_t)DDR_PHY_WORD_LANE_1_VDL_OVRIDE_BYTE_RD_EN + ddr_phy_wl_regs[i];
*ptr++ = reg;
chksum += reg;
@@ -1092,7 +1092,7 @@ void iproc_save_shmoo_values(void)
}
#endif /* (CONFIG_CYGNUS_SHMOO_REUSE_DDR_32BIT || defined(CONFIG_NS_PLUS)) */
#ifdef CONFIG_IPROC_DDR_ECC
- for(i=0; i<sizeof(ddr_phy_eccl_regs) / sizeof(ddr_phy_eccl_regs[0]); i++) {
+ for (i=0; i<sizeof(ddr_phy_eccl_regs) / sizeof(ddr_phy_eccl_regs[0]); i++) {
reg = (uint32_t)DDR_DENALI_CTL_00 + ddr_phy_eccl_regs[i];
*ptr++ = reg;
chksum += reg;
@@ -1128,7 +1128,7 @@ static int clear_ddr(uint32_t offset, uint32_t size)
reg32_write((uint32_t *)DDR_BistConfig,reg32_read((uint32_t *)DDR_BistConfig) & ~0x1);
- for( i = 0; i < 1000; i++);
+ for ( i = 0; i < 1000; i++);
#if !defined(CONFIG_IPROC_P7)
reg32_write((volatile uint32_t *)DDR_DENALI_CTL_213, 0x00FFFFFF);
@@ -1163,21 +1163,21 @@ static int clear_ddr(uint32_t offset, uint32_t size)
reg32_set_bits((volatile uint32_t *)DDR_BistConfigurations, 1 << DDR_BistConfigurations__BistEn);
start = get_timer(0);
- while(get_timer(start) <= 10000) {
- if(reg32_read((volatile uint32_t *)DDR_BistStatuses) & (1 << DDR_BistStatuses__BistFinished))
+ while (get_timer(start) <= 10000) {
+ if (reg32_read((volatile uint32_t *)DDR_BistStatuses) & (1 << DDR_BistStatuses__BistFinished))
break;
}
/* Clear BIST_EN bit */
reg32_clear_bits((volatile uint32_t *)DDR_BistConfigurations, 1 << DDR_BistConfigurations__BistEn);
- if((get_timer(start) <= 10000) &&
+ if ((get_timer(start) <= 10000) &&
(!reg32_read((volatile uint32_t *)DDR_BistErrorOccurred)))
{
printk(BIOS_INFO, "clear_ddr: OK\n");
return(0);
}
printk(BIOS_INFO, "clear_ddr: Failed: 0x%lx\n", get_timer(start));
- if(reg32_read((volatile uint32_t *)DDR_BistErrorOccurred))
+ if (reg32_read((volatile uint32_t *)DDR_BistErrorOccurred))
printk(BIOS_ERR, "clear_ddr: Error occurred\n");
return(1);
}
@@ -1198,14 +1198,14 @@ static int simple_ddr_crc32_check(void)
printk(BIOS_INFO, "Checking simple DDR CRC, word start 0x%p, len 0x%08x...\n", buf, len);
- for(offset=0; offset<len; offset++)
+ for (offset=0; offset<len; offset++)
{
crc ^= *buf++;
}
crc_mcu = reg32_read((volatile uint32_t *)0x03012A00);
- if(crc != crc_mcu)
+ if (crc != crc_mcu)
{
printk(BIOS_ERR, "DDR CRC NOT match, old=0x%08x, new=0x%08x!\n", crc_mcu, crc);
return -1;
@@ -1230,11 +1230,11 @@ void ddr_init2(void)
uint32_t pwrctli0 = reg32_read((volatile uint32_t *)IHOST_SCU_POWER_STATUS) & 0x3;
skip_shmoo = reg32_read((volatile uint32_t *)CRMU_IHOST_POR_WAKEUP_FLAG) & 0x1;
- if(pwrctli0==2)
+ if (pwrctli0==2)
{
goto wakeup;
}
- else if(pwrctli0==3)
+ else if (pwrctli0==3)
{
skip_shmoo = 1;
reg32_write((volatile uint32_t *)IHOST_GTIM_GLOB_CTRL, reg32_read((volatile uint32_t *)IHOST_GTIM_GLOB_CTRL)| 0x1);
@@ -1260,7 +1260,7 @@ void ddr_init2(void)
sku_id = (reg32_read((volatile uint32_t *)ROM_S0_IDM_IO_STATUS) >> 2) & 0x03;
#endif
/* See if it is KATANA2, KATANA2 doesn't have right chip ID in ChipcommonA_ChipID */
- if(((sku_id & 0xfff0) == 0xa450) || ((sku_id & 0xfff0) == 0xb450) || sku_id == 0xb248) {
+ if (((sku_id & 0xfff0) == 0xa450) || ((sku_id & 0xfff0) == 0xb450) || sku_id == 0xb248) {
dev_id = 56450; /* KATANA2 */
}
@@ -1290,7 +1290,7 @@ void ddr_init2(void)
printk(BIOS_INFO, "MEMC 0 DDR speed = %dMHz\n", ddr_clk);
status = change_ddr_clock(ddr_clk);
- if(status) {
+ if (status) {
printk(BIOS_INFO, "CRU LCPLL configuratioin failed\n");
goto done;
}
@@ -1326,12 +1326,12 @@ void ddr_init2(void)
reg32_write((volatile uint32_t *)CRU_ddrphy_pwr_ctrl, val);
/* Wait for PHY power up */
- for(i=0; i < 0x19000; i++) {
+ for (i=0; i < 0x19000; i++) {
val = reg32_read((volatile uint32_t *)DDR_S1_IDM_IO_STATUS);
- if((val & (1 << DDR_S1_IDM_IO_STATUS__o_phy_pwrup_rsb)))
+ if ((val & (1 << DDR_S1_IDM_IO_STATUS__o_phy_pwrup_rsb)))
break;
}
- if(i == 0x19000) {
+ if (i == 0x19000) {
printk(BIOS_ERR, "DDR PHY not power up\n");
goto done;
}
@@ -1347,7 +1347,7 @@ void ddr_init2(void)
/* Set the ddr_ck to 400 MHz, 2x memc clock */
reg32_write_masked((volatile uint32_t *)DDR_S1_IDM_IO_CONTROL_DIRECT, 0xfff << 16, /*ddr_clk*/ 0x190 << 16);
- if(pwrctli0==3)
+ if (pwrctli0==3)
{
printk(BIOS_INFO, "\n PRE_SRX call\n");
PRE_SRX();
@@ -1369,13 +1369,13 @@ void ddr_init2(void)
}
/* Wait for PHY ready */
- for(i=0; i < 0x19000; i++) {
+ for (i=0; i < 0x19000; i++) {
val = reg32_read((volatile uint32_t *)DDR_S1_IDM_IO_STATUS);
- if((val & (1 << DDR_S1_IDM_IO_STATUS__o_phy_ready)))
+ if ((val & (1 << DDR_S1_IDM_IO_STATUS__o_phy_ready)))
break; /* DDR PHY is up */
}
- if(i == 0x19000) {
+ if (i == 0x19000) {
printk(BIOS_ERR, "DDR PLL not locked\n");
goto done;
}
@@ -1385,21 +1385,21 @@ void ddr_init2(void)
#endif /* defined(CONFIG_IPROC_P7) */
/* Wait for DDR PHY up */
- for(i=0; i < 0x19000; i++) {
+ for (i=0; i < 0x19000; i++) {
val = reg32_read((volatile uint32_t *)DDR_PHY_CONTROL_REGS_REVISION);
- if( val != 0) {
+ if ( val != 0) {
printk(BIOS_INFO, "PHY revision version: 0x%08x\n", val);
break; /* DDR PHY is up */
}
}
- if(i == 0x19000) {
+ if (i == 0x19000) {
printk(BIOS_ERR, "DDR PHY is not up\n");
return;
}
#if IS_ENABLED(CONFIG_SOC_BROADCOM_CYGNUS)
- if(!skip_shmoo)
+ if (!skip_shmoo)
{
printk(BIOS_INFO, "ddr_init2: Calling soc_and28_shmoo_dram_info_set\n");
/* Cygnus clock speed:
@@ -1434,10 +1434,10 @@ void ddr_init2(void)
#endif
#if IS_ENABLED(CONFIG_SOC_BROADCOM_CYGNUS)
- if(!skip_shmoo)
+ if (!skip_shmoo)
{
printk(BIOS_INFO, "ddr_init2: Calling soc_and28_shmoo_phy_init\n");
- if(soc_and28_shmoo_phy_init(unit, 0) != SOC_E_NONE) {
+ if (soc_and28_shmoo_phy_init(unit, 0) != SOC_E_NONE) {
printk(BIOS_ERR, "DDR PHY initialization failed\n");
goto done;
@@ -1458,7 +1458,7 @@ void ddr_init2(void)
ddr_init_regs(ddr_init_tab);
ddr_type = 1;
- if(ddr_type) {
+ if (ddr_type) {
/* DDR3 */
switch(ddr_clk) {
#ifdef CONFIG_DDR333
@@ -1515,9 +1515,9 @@ void ddr_init2(void)
reg32_set_bits((volatile uint32_t *)DDR_DENALI_CTL_00, 0x01);
#if IS_ENABLED(CONFIG_SOC_BROADCOM_CYGNUS)
- if(!skip_shmoo)
+ if (!skip_shmoo)
{
- while(!(reg32_read((volatile uint32_t *)DDR_DENALI_CTL_175) & 0x100));
+ while (!(reg32_read((volatile uint32_t *)DDR_DENALI_CTL_175) & 0x100));
printk(BIOS_INFO, "ddr_init2: MemC initialization complete\n");
reg32_set_bits((unsigned int *)DDR_DENALI_CTL_177, 0x00100);
@@ -1607,7 +1607,7 @@ void ddr_init2(void)
#if IS_ENABLED(CONFIG_SOC_BROADCOM_CYGNUS)
/* SRX */
- if(skip_shmoo)
+ if (skip_shmoo)
{
// Enter Self refresh (dummy) , to keep Denali happy
reg32_write((unsigned int *)DDR_DENALI_CTL_56, 0x0a050505);
@@ -1629,19 +1629,19 @@ void ddr_init2(void)
// iproc_dump_ddr_regs();
- if(pwrctli0==0)
+ if (pwrctli0==0)
goto done;
wakeup:
printk(BIOS_INFO, "Wakeup from %s\n", pwrctli0==2 ? "SLEEP":"DEEPSLEEP");
- if(pwrctli0==3)
+ if (pwrctli0==3)
{
__udelay(10000);
- if(simple_ddr_crc32_check()<0)
+ if (simple_ddr_crc32_check()<0)
{
printk(BIOS_INFO, "Die...\n");
- while(1);
+ while (1);
}
}
diff --git a/src/soc/broadcom/cygnus/shmoo_and28.c b/src/soc/broadcom/cygnus/shmoo_and28.c
index aba3cf3990..7038a310a4 100644
--- a/src/soc/broadcom/cygnus/shmoo_and28.c
+++ b/src/soc/broadcom/cygnus/shmoo_and28.c
@@ -153,7 +153,7 @@ _run_bist(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, and28_shmoo_err
case SHMOO_AND28_RD_EN:
SOC_IF_ERROR_RETURN(soc_ydc_ddr_bist_run(unit, phy_ndx, &be));
- if(shmoo_dram_info_ptr->interface_bitwidth == 16)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 16)
{
(*seaPtr)[0] = (((be.bist_err_occur) >> 16) | (be.bist_err_occur)) & 0xFFFF;
}
@@ -165,7 +165,7 @@ _run_bist(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, and28_shmoo_err
case SHMOO_AND28_RD_EXTENDED:
SOC_IF_ERROR_RETURN(soc_ydc_ddr_bist_run(unit, phy_ndx, &be));
- if(shmoo_dram_info_ptr->interface_bitwidth == 16)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 16)
{
(*seaPtr)[0] = (((be.bist_err_occur) >> 16) | (be.bist_err_occur)) & 0xFFFF;
}
@@ -177,7 +177,7 @@ _run_bist(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, and28_shmoo_err
case SHMOO_AND28_WR_EXTENDED:
SOC_IF_ERROR_RETURN(soc_ydc_ddr_bist_run(unit, phy_ndx, &be));
- if(shmoo_dram_info_ptr->interface_bitwidth == 16)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 16)
{
(*seaPtr)[0] = (((be.bist_err_occur) >> 16) | (be.bist_err_occur)) & 0xFFFF;
}
@@ -189,7 +189,7 @@ _run_bist(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, and28_shmoo_err
case SHMOO_AND28_ADDR_EXTENDED:
SOC_IF_ERROR_RETURN(soc_ydc_ddr_bist_run(unit, phy_ndx, &be));
- if(shmoo_dram_info_ptr->interface_bitwidth == 16)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 16)
{
(*seaPtr)[0] = (((be.bist_err_occur) >> 16) | (be.bist_err_occur)) & 0xFFFF;
}
@@ -201,7 +201,7 @@ _run_bist(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, and28_shmoo_err
case SHMOO_AND28_CTRL_EXTENDED:
SOC_IF_ERROR_RETURN(soc_ydc_ddr_bist_run(unit, phy_ndx, &be));
- if(shmoo_dram_info_ptr->interface_bitwidth == 16)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 16)
{
(*seaPtr)[0] = (((be.bist_err_occur) >> 16) | (be.bist_err_occur)) & 0xFFFF;
}
@@ -242,7 +242,7 @@ _shmoo_and28_rd_en(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
_initialize_bist(unit, phy_ndx, -1, scPtr);
- for(y = yCapMin; y < yCapMax; y++)
+ for (y = yCapMin; y < yCapMax; y++)
{
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_DQ0P, FORCE, 1);
@@ -292,8 +292,8 @@ _shmoo_and28_rd_en(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EDCN, data);
#endif
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_DQ0P, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_DQ0N, data);
@@ -342,7 +342,7 @@ _shmoo_and28_rd_en(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
}
#endif
- for(x = 0; x < (*scPtr).sizeX; x++)
+ for (x = 0; x < (*scPtr).sizeX; x++)
{
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
@@ -353,8 +353,8 @@ _shmoo_and28_rd_en(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_VDL_CONTROL_RD_EN_CS1, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS1, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_EN_CS0, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_VDL_CONTROL_RD_EN_CS0, data);
@@ -368,8 +368,8 @@ _shmoo_and28_rd_en(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_READ_FIFO_CLEAR, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_READ_FIFO_CLEAR, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_READ_FIFO_CLEAR, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_READ_FIFO_CLEAR, data);
@@ -404,7 +404,7 @@ _shmoo_and28_wr_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
_initialize_bist(unit, phy_ndx, -1, scPtr);
- for(x = 0; x < (*scPtr).sizeX; x++)
+ for (x = 0; x < (*scPtr).sizeX; x++)
{
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_WR_DQ0, FORCE, 1);
@@ -435,8 +435,8 @@ _shmoo_and28_wr_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_WR_EDC, data);
#endif
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_WR_DQ0, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_WR_DQ1, data);
@@ -470,8 +470,8 @@ _shmoo_and28_wr_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_READ_FIFO_CLEAR, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_READ_FIFO_CLEAR, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_READ_FIFO_CLEAR, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_READ_FIFO_CLEAR, data);
@@ -499,7 +499,7 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
uint32 data, temp;
uint32 rd_dqs_pos0, rd_dqs_pos1, rd_en_pos0, rd_en_pos1, rd_dqs_delta0, rd_dqs_delta1;
uint32 rd_dq_fail_count0, rd_dq_fail_count1;
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
uint32 rd_dqs_pos2, rd_dqs_pos3, rd_en_pos2, rd_en_pos3, rd_dqs_delta2, rd_dqs_delta3;
uint32 rd_dq_fail_count2, rd_dq_fail_count3;
#endif
@@ -522,8 +522,8 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_READ(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_DQSP, &data);
rd_dqs_pos1 = DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_DQSP, VDL_STEP);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_READ(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_DQSP, &data);
rd_dqs_pos2 = DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_DQSP, VDL_STEP);
@@ -539,8 +539,8 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_READ(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS0, &data);
rd_en_pos1 = DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_EN_CS0, VDL_STEP);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_READ(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_EN_CS0, &data);
rd_en_pos2 = DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, VDL_STEP);
@@ -562,8 +562,8 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_DQSP, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_DQSN, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_DQSP, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_DQSN, data);
@@ -575,8 +575,8 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
rd_dqs_delta0 = x - rd_dqs_pos0;
rd_dqs_delta1 = x - rd_dqs_pos1;
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
rd_dqs_delta2 = x - rd_dqs_pos2;
rd_dqs_delta3 = x - rd_dqs_pos3;
@@ -586,11 +586,11 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos0 + rd_dqs_delta0;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -604,11 +604,11 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos1 + rd_dqs_delta1;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -619,17 +619,17 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS0, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS1, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
temp = rd_en_pos2 + rd_dqs_delta2;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -643,11 +643,11 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos3 + rd_dqs_delta3;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_3, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_3, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_3, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -662,20 +662,20 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
rd_dq_fail_count0 = 0;
rd_dq_fail_count1 = 0;
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
rd_dq_fail_count2 = 0;
rd_dq_fail_count3 = 0;
}
#endif
- for(x = 0; x < (*scPtr).sizeX; x++)
+ for (x = 0; x < (*scPtr).sizeX; x++)
{
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_DQ0P, FORCE, 1);
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_DQ0P, VDL_STEP, x);
- if(rd_dq_fail_count0 <= SHMOO_AND28_RD_DQ_FAIL_CAP)
+ if (rd_dq_fail_count0 <= SHMOO_AND28_RD_DQ_FAIL_CAP)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_VDL_CONTROL_RD_DQ0P, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_VDL_CONTROL_RD_DQ0N, data);
@@ -700,7 +700,7 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_VDL_CONTROL_RD_EDCN, data);
#endif
}
- if(rd_dq_fail_count1 <= SHMOO_AND28_RD_DQ_FAIL_CAP)
+ if (rd_dq_fail_count1 <= SHMOO_AND28_RD_DQ_FAIL_CAP)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_DQ0P, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_DQ0N, data);
@@ -726,10 +726,10 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
#endif
}
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
- if(rd_dq_fail_count2 <= SHMOO_AND28_RD_DQ_FAIL_CAP)
+ if (rd_dq_fail_count2 <= SHMOO_AND28_RD_DQ_FAIL_CAP)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_DQ0P, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_DQ0N, data);
@@ -754,7 +754,7 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_EDCN, data);
#endif
}
- if(rd_dq_fail_count3 <= SHMOO_AND28_RD_DQ_FAIL_CAP)
+ if (rd_dq_fail_count3 <= SHMOO_AND28_RD_DQ_FAIL_CAP)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_VDL_CONTROL_RD_DQ0P, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_VDL_CONTROL_RD_DQ0N, data);
@@ -787,8 +787,8 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_READ_FIFO_CLEAR, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_READ_FIFO_CLEAR, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_READ_FIFO_CLEAR, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_READ_FIFO_CLEAR, data);
@@ -799,35 +799,35 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
_run_bist(unit, phy_ndx, scPtr, &sea);
- if((sea[0] & 0x000000FF) == 0x000000FF)
+ if ((sea[0] & 0x000000FF) == 0x000000FF)
{
rd_dq_fail_count0++;
}
- if((sea[0] & 0x0000FF00) == 0x0000FF00)
+ if ((sea[0] & 0x0000FF00) == 0x0000FF00)
{
rd_dq_fail_count1++;
}
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
- if((sea[0] & 0x00FF0000) == 0x00FF0000)
+ if ((sea[0] & 0x00FF0000) == 0x00FF0000)
{
rd_dq_fail_count2++;
}
- if((sea[0] & 0xFF000000) == 0xFF000000)
+ if ((sea[0] & 0xFF000000) == 0xFF000000)
{
rd_dq_fail_count3++;
}
}
#endif
- if((rd_dq_fail_count0 > SHMOO_AND28_RD_DQ_FAIL_CAP) && (rd_dq_fail_count1 > SHMOO_AND28_RD_DQ_FAIL_CAP))
+ if ((rd_dq_fail_count0 > SHMOO_AND28_RD_DQ_FAIL_CAP) && (rd_dq_fail_count1 > SHMOO_AND28_RD_DQ_FAIL_CAP))
{
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
- if((rd_dq_fail_count2 > SHMOO_AND28_RD_DQ_FAIL_CAP) && (rd_dq_fail_count3 > SHMOO_AND28_RD_DQ_FAIL_CAP))
+ if ((rd_dq_fail_count2 > SHMOO_AND28_RD_DQ_FAIL_CAP) && (rd_dq_fail_count3 > SHMOO_AND28_RD_DQ_FAIL_CAP))
{
break;
}
@@ -842,14 +842,14 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
}
}
- for(y = yCapMin; y < yCapMax; y++)
+ for (y = yCapMin; y < yCapMax; y++)
{
DDR_PHY_REG_READ(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_CONTROL_REGS_VREF_DAC_CONTROL, &data);
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, VREF_DAC_CONTROL, DAC0, y);
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, VREF_DAC_CONTROL, DAC1, y);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_CONTROL_REGS_VREF_DAC_CONTROL, data);
- for(x = 0; x < (*scPtr).sizeX; x++)
+ for (x = 0; x < (*scPtr).sizeX; x++)
{
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_DQSP, FORCE, 1);
@@ -860,8 +860,8 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_DQSP, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_DQSN, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_DQSP, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_DQSN, data);
@@ -873,8 +873,8 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
rd_dqs_delta0 = x - rd_dqs_pos0;
rd_dqs_delta1 = x - rd_dqs_pos1;
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
rd_dqs_delta2 = x - rd_dqs_pos2;
rd_dqs_delta3 = x - rd_dqs_pos3;
@@ -884,11 +884,11 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos0 + rd_dqs_delta0;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -902,11 +902,11 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos1 + rd_dqs_delta1;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -917,17 +917,17 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS0, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS1, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
temp = rd_en_pos2 + rd_dqs_delta2;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -941,11 +941,11 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos3 + rd_dqs_delta3;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_3, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_3, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_3, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -963,8 +963,8 @@ _shmoo_and28_rd_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_READ_FIFO_CLEAR, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_READ_FIFO_CLEAR, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_READ_FIFO_CLEAR, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_READ_FIFO_CLEAR, data);
@@ -999,7 +999,7 @@ _shmoo_and28_addr_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr
_initialize_bist(unit, phy_ndx, -1, scPtr);
- for(x = 0; x < (*scPtr).sizeX; x++)
+ for (x = 0; x < (*scPtr).sizeX; x++)
{
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, VDL_CONTROL_AD00, FORCE, 1);
@@ -1021,8 +1021,8 @@ _shmoo_and28_addr_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_READ_FIFO_CLEAR, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_READ_FIFO_CLEAR, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_READ_FIFO_CLEAR, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_READ_FIFO_CLEAR, data);
@@ -1054,7 +1054,7 @@ _shmoo_and28_ctrl_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr
_initialize_bist(unit, phy_ndx, -1, scPtr);
- for(x = 0; x < (*scPtr).sizeX; x++)
+ for (x = 0; x < (*scPtr).sizeX; x++)
{
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, VDL_CONTROL_AD00, FORCE, 1);
@@ -1087,8 +1087,8 @@ _shmoo_and28_ctrl_extended(int unit, int phy_ndx, and28_shmoo_container_t *scPtr
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_READ_FIFO_CLEAR, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_READ_FIFO_CLEAR, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_READ_FIFO_CLEAR, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_READ_FIFO_CLEAR, data);
@@ -1118,7 +1118,7 @@ _shmoo_and28_do(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
case SHMOO_AND28_ADDR_EXTENDED:
return _shmoo_and28_addr_extended(unit, phy_ndx, scPtr);
case SHMOO_AND28_CTRL_EXTENDED:
- if(!SHMOO_AND28_QUICK_SHMOO_CTRL_EXTENDED)
+ if (!SHMOO_AND28_QUICK_SHMOO_CTRL_EXTENDED)
{
return _shmoo_and28_ctrl_extended(unit, phy_ndx, scPtr);
}
@@ -1187,15 +1187,15 @@ _calib_2D(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32 calibMod
return SOC_E_FAIL;
}
- for(i = 0; i < iter; i++)
+ for (i = 0; i < iter; i++)
{
(*scPtr).resultData[i] = 0;
maxPassLengthArray[i] = 0;
}
- for(y = yCapMin; y < yCapMax; y++)
+ for (y = yCapMin; y < yCapMax; y++)
{
- for(i = 0; i < iter; i++)
+ for (i = 0; i < iter; i++)
{
passStart = -1;
failStart = -1;
@@ -1205,20 +1205,20 @@ _calib_2D(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32 calibMod
maxPassStart = -2;
maxPassLength = -2;
maxMidPointX = -2;
- for(x = calibStart; x < sizeX; x++)
+ for (x = calibStart; x < sizeX; x++)
{
- if(((*scPtr).result2D[xStart + x] >> (i << shiftAmount)) & dataMask)
+ if (((*scPtr).result2D[xStart + x] >> (i << shiftAmount)) & dataMask)
{ /* FAIL */
- if(failStart < 0) {
+ if (failStart < 0) {
failStart = x;
- if(maxPassLength < passLength)
+ if (maxPassLength < passLength)
{
maxPassStart = passStart;
maxPassLength = passLength;
}
passStart = -1;
passLength = -1;
- if((failStartSeen < 0) && (maxPassLength > 0))
+ if ((failStartSeen < 0) && (maxPassLength > 0))
{
failStartSeen = x;
}
@@ -1226,12 +1226,12 @@ _calib_2D(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32 calibMod
}
else
{ /* PASS */
- if(passStart < 0)
+ if (passStart < 0)
{
passStart = x;
passLength = 1;
failStart = -1;
- if((passStartSeen < 0) && (passLength < x))
+ if ((passStartSeen < 0) && (passLength < x))
{
passStartSeen = x;
}
@@ -1241,9 +1241,9 @@ _calib_2D(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32 calibMod
passLength++;
}
- if(x == sizeX - 1)
+ if (x == sizeX - 1)
{
- if(maxPassLength < passLength)
+ if (maxPassLength < passLength)
{
maxPassStart = passStart;
maxPassLength = passLength;
@@ -1255,7 +1255,7 @@ _calib_2D(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32 calibMod
switch (calibPos) {
case SHMOO_AND28_CALIB_FAIL_START:
case SHMOO_AND28_CALIB_RISING_EDGE:
- if(failStartSeen > 0)
+ if (failStartSeen > 0)
{
maxMidPointX = failStartSeen;
(*scPtr).resultData[i] = (y << 16) | (maxMidPointX & 0xFFFF);
@@ -1263,14 +1263,14 @@ _calib_2D(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32 calibMod
break;
case SHMOO_AND28_CALIB_PASS_START:
case SHMOO_AND28_CALIB_FALLING_EDGE:
- if(passStartSeen > 0)
+ if (passStartSeen > 0)
{
maxMidPointX = passStartSeen;
(*scPtr).resultData[i] = (y << 16) | (maxMidPointX & 0xFFFF);
}
break;
case SHMOO_AND28_CALIB_CENTER_PASS:
- if((maxPassLength > 0) && (maxPassLengthArray[i] < maxPassLength))
+ if ((maxPassLength > 0) && (maxPassLengthArray[i] < maxPassLength))
{
maxMidPointX = (maxPassStart + maxPassStart + maxPassLength) >> 1;
(*scPtr).resultData[i] = (y << 16) | (maxMidPointX & 0xFFFF);
@@ -1308,7 +1308,7 @@ _shmoo_and28_calib_2D(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
case SHMOO_AND28_ADDR_EXTENDED:
return _calib_2D(unit, phy_ndx, scPtr, SHMOO_AND28_WORD, SHMOO_AND28_CALIB_CENTER_PASS);
case SHMOO_AND28_CTRL_EXTENDED:
- if(!SHMOO_AND28_QUICK_SHMOO_CTRL_EXTENDED)
+ if (!SHMOO_AND28_QUICK_SHMOO_CTRL_EXTENDED)
{
return _calib_2D(unit, phy_ndx, scPtr, SHMOO_AND28_WORD, SHMOO_AND28_CALIB_CENTER_PASS);
}
@@ -1328,7 +1328,7 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
uint32 val, yVal;
uint32 data, temp;
uint32 rd_dqs_pos0, rd_dqs_pos1, rd_en_pos0, rd_en_pos1, rd_dqs_delta0, rd_dqs_delta1;
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
uint32 rd_dqs_pos2, rd_dqs_pos3, rd_en_pos2, rd_en_pos3, rd_dqs_delta2, rd_dqs_delta3;
#endif
@@ -1357,8 +1357,8 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS0, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS1, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
@@ -1388,8 +1388,8 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS0, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS1, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
@@ -1414,8 +1414,8 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS0, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS1, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_EN_CS0, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_EN_CS1, data);
@@ -1438,8 +1438,8 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_READ(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_DQSP, &data);
rd_dqs_pos1 = DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_DQSP, VDL_STEP);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_READ(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_DQSP, &data);
rd_dqs_pos2 = DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_DQSP, VDL_STEP);
@@ -1455,8 +1455,8 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_READ(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS0, &data);
rd_en_pos1 = DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_EN_CS0, VDL_STEP);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_READ(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_EN_CS0, &data);
rd_en_pos2 = DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, VDL_STEP);
@@ -1484,11 +1484,11 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos0 + rd_dqs_delta0;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -1511,11 +1511,11 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos1 + rd_dqs_delta1;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -1526,8 +1526,8 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS0, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS1, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
val = (*scPtr).resultData[2] & 0xFFFF;
rd_dqs_delta2 = val - rd_dqs_pos2;
@@ -1541,11 +1541,11 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos2 + rd_dqs_delta2;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -1568,11 +1568,11 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos3 + rd_dqs_delta3;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_3, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_3, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_3, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -1585,8 +1585,8 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
}
#endif
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
yVal = ((((*scPtr).resultData[0] >> 16) & 0xFFFF) + (((*scPtr).resultData[1] >> 16) & 0xFFFF)
+ (((*scPtr).resultData[2] >> 16) & 0xFFFF) + (((*scPtr).resultData[3] >> 16) & 0xFFFF)) >> 2;
@@ -1621,11 +1621,11 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos0 + rd_dqs_delta0;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -1639,11 +1639,11 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos1 + rd_dqs_delta1;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -1654,8 +1654,8 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS0, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS1, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
val = (*scPtr).resultData[1] & 0xFFFF;
rd_dqs_delta2 = val - rd_dqs_pos2;
@@ -1672,11 +1672,11 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos2 + rd_dqs_delta2;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -1690,11 +1690,11 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos3 + rd_dqs_delta3;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_3, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_3, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_3, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -1707,8 +1707,8 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
}
#endif
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
yVal = ((((*scPtr).resultData[0] >> 16) & 0xFFFF) + (((*scPtr).resultData[1] >> 16) & 0xFFFF)) >> 1;
}
@@ -1742,11 +1742,11 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos0 + rd_dqs_delta0;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -1760,11 +1760,11 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos1 + rd_dqs_delta1;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_1, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -1775,8 +1775,8 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS0, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS1, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
rd_dqs_delta2 = val - rd_dqs_pos2;
rd_dqs_delta3 = val - rd_dqs_pos3;
@@ -1789,11 +1789,11 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos2 + rd_dqs_delta2;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -1807,11 +1807,11 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
temp = rd_en_pos3 + rd_dqs_delta3;
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_3, VDL_CONTROL_RD_EN_CS0, FORCE, 1);
- if(temp & 0x80000000)
+ if (temp & 0x80000000)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_3, VDL_CONTROL_RD_EN_CS0, VDL_STEP, 0);
}
- else if(temp >= SHMOO_AND28_MAX_VDL_LENGTH)
+ else if (temp >= SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_3, VDL_CONTROL_RD_EN_CS0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -1878,8 +1878,8 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_WR_EDC, data);
#endif
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_WR_DQ0, FORCE, 1);
@@ -1949,8 +1949,8 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_WR_EDC, data);
#endif
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_2, VDL_CONTROL_WR_DQ0, FORCE, 1);
@@ -2015,8 +2015,8 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_WR_EDC, data);
#endif
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_WR_DQ0, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_WR_DQ1, data);
@@ -2087,7 +2087,7 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
}
break;
case SHMOO_AND28_CTRL_EXTENDED:
- if(SHMOO_AND28_QUICK_SHMOO_CTRL_EXTENDED)
+ if (SHMOO_AND28_QUICK_SHMOO_CTRL_EXTENDED)
{
DDR_PHY_REG_READ(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_CONTROL_REGS_VDL_CONTROL_AD00, &data);
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, VDL_CONTROL_AD00, FORCE, 1);
@@ -2172,8 +2172,8 @@ _shmoo_and28_set_new_step(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_READ_FIFO_CLEAR, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_READ_FIFO_CLEAR, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_READ_FIFO_CLEAR, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_READ_FIFO_CLEAR, data);
@@ -2380,13 +2380,13 @@ _plot(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32 plotMode)
return SOC_E_FAIL;
}
/*
- if(engageUIshift)
+ if (engageUIshift)
{ */
ui = 0;
- for(x = 0; x < sizeX; x++)
+ for (x = 0; x < sizeX; x++)
{
- if((ui < SHMOO_AND28_MAX_VISIBLE_UI_COUNT) && (x > (*scPtr).endUI[ui]))
+ if ((ui < SHMOO_AND28_MAX_VISIBLE_UI_COUNT) && (x > (*scPtr).endUI[ui]))
{
str0[x] = ' ';
str1[x] = ' ';
@@ -2403,7 +2403,7 @@ _plot(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32 plotMode)
/* }
else
{
- for(x = 0; x < sizeX; x++)
+ for (x = 0; x < sizeX; x++)
{
str0[x] = '0' + (x / 100);
str1[x] = '0' + ((x % 100) / 10);
@@ -2415,13 +2415,13 @@ _plot(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32 plotMode)
str1[x] = 0;
str2[x] = 0;
- for(i = 0; i < iter; i++)
+ for (i = 0; i < iter; i++)
{
xStart = 0;
maxMidPointX = (*scPtr).resultData[i >> calibShiftAmount] & 0xFFFF;
maxMidPointY = ((*scPtr).resultData[i >> calibShiftAmount] >> 16) & 0xFFFF;
- if((sizeY > 1) || (i == 0))
+ if ((sizeY > 1) || (i == 0))
{
printf("***** Interface.......: %3d\n", phy_ndx);
printf(" **** VDL step size...: %3u.%03u ps\n", (step1000 / 1000), (step1000 % 1000));
@@ -2450,7 +2450,7 @@ _plot(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32 plotMode)
return SOC_E_FAIL;
}
- if(engageUIshift)
+ if (engageUIshift)
{
printf(" *** UI shift........: On\n");
}
@@ -2460,7 +2460,7 @@ _plot(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32 plotMode)
}
}
- if(sizeY > 1)
+ if (sizeY > 1)
{
switch(calibMode)
{
@@ -2510,26 +2510,26 @@ _plot(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32 plotMode)
printf(" %s\n", str1);
printf(" %s\n", str2);
- for(y = yCapMin; y < yCapMax; y++)
+ for (y = yCapMin; y < yCapMax; y++)
{
printf(" %03u ", y << yJump);
- for(x = 0; x < calibStart; x++)
+ for (x = 0; x < calibStart; x++)
{
printf("%s", outOfSearch);
}
- for(x = calibStart; x < sizeX; x++)
+ for (x = calibStart; x < sizeX; x++)
{
- if(((*scPtr).result2D[xStart + x] >> (i << shiftAmount)) & dataMask)
+ if (((*scPtr).result2D[xStart + x] >> (i << shiftAmount)) & dataMask)
{ /* FAIL - RISING EDGE */
- if(x != maxMidPointX)
+ if (x != maxMidPointX)
{ /* REGULAR FAIL */
printf("%s", fail_high);
}
else
{ /* FAIL - RISING EDGE */
- if((calibPos == SHMOO_AND28_CALIB_RISING_EDGE) || (calibPos == SHMOO_AND28_CALIB_FAIL_START))
+ if ((calibPos == SHMOO_AND28_CALIB_RISING_EDGE) || (calibPos == SHMOO_AND28_CALIB_FAIL_START))
{ /* RISING EDGE */
printf("X");
}
@@ -2541,13 +2541,13 @@ _plot(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32 plotMode)
}
else
{ /* PASS - MIDPOINT - FALLING EDGE */
- if(x != maxMidPointX)
+ if (x != maxMidPointX)
{ /* REGULAR PASS */
printf("%s", pass_low);
}
else
{ /* POTENTIAL MIDPOINT - FALLING EDGE */
- if(y == maxMidPointY)
+ if (y == maxMidPointY)
{ /* MID POINT - FALLING EDGE */
printf("X");
}
@@ -2565,7 +2565,7 @@ _plot(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32 plotMode)
}
else
{
- if(i == 0)
+ if (i == 0)
{
switch(calibMode)
{
@@ -2611,22 +2611,22 @@ _plot(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32 plotMode)
printf(" %03u ", i);
- for(x = 0; x < calibStart; x++)
+ for (x = 0; x < calibStart; x++)
{
printf("%s", outOfSearch);
}
- for(x = calibStart; x < sizeX; x++)
+ for (x = calibStart; x < sizeX; x++)
{
- if(((*scPtr).result2D[x] >> (i << shiftAmount)) & dataMask)
+ if (((*scPtr).result2D[x] >> (i << shiftAmount)) & dataMask)
{ /* FAIL - RISING EDGE */
- if(x != maxMidPointX)
+ if (x != maxMidPointX)
{ /* REGULAR FAIL */
printf("%s", fail_high);
}
else
{ /* FAIL - RISING EDGE */
- if((calibPos == SHMOO_AND28_CALIB_RISING_EDGE) || (calibPos == SHMOO_AND28_CALIB_FAIL_START))
+ if ((calibPos == SHMOO_AND28_CALIB_RISING_EDGE) || (calibPos == SHMOO_AND28_CALIB_FAIL_START))
{ /* RISING EDGE */
printf("X");
}
@@ -2638,7 +2638,7 @@ _plot(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32 plotMode)
}
else
{ /* PASS - MIDPOINT - FALLING EDGE */
- if(x != maxMidPointX)
+ if (x != maxMidPointX)
{ /* REGULAR PASS */
printf("%s", pass_low);
}
@@ -2669,7 +2669,7 @@ _shmoo_and28_plot(int unit, int phy_ndx, and28_shmoo_container_t *scPtr)
case SHMOO_AND28_ADDR_EXTENDED:
return _plot(unit, phy_ndx, scPtr, SHMOO_AND28_WORD);
case SHMOO_AND28_CTRL_EXTENDED:
- if(!SHMOO_AND28_QUICK_SHMOO_CTRL_EXTENDED)
+ if (!SHMOO_AND28_QUICK_SHMOO_CTRL_EXTENDED)
{
return _plot(unit, phy_ndx, scPtr, SHMOO_AND28_WORD);
}
@@ -2700,7 +2700,7 @@ _and28_calculate_step_size(int unit, int phy_ndx, and28_step_size_t *ssPtr)
uint32 data;
uint32 timeout;
- if(shmoo_dram_info_ptr->sim_system_mode)
+ if (shmoo_dram_info_ptr->sim_system_mode)
{
(*ssPtr).step1000 = 8000;
(*ssPtr).size1000UI = 78125;
@@ -2723,7 +2723,7 @@ _and28_calculate_step_size(int unit, int phy_ndx, and28_step_size_t *ssPtr)
{
DDR_PHY_REG_READ(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_CONTROL_REGS_VDL_CALIB_STATUS1, &data);
- if(DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, VDL_CALIB_STATUS1, CALIB_IDLE))
+ if (DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, VDL_CALIB_STATUS1, CALIB_IDLE))
{
/* printf(" VDL calibration complete.\n"); */
break;
@@ -2738,9 +2738,9 @@ _and28_calculate_step_size(int unit, int phy_ndx, and28_step_size_t *ssPtr)
timeout--;
sal_usleep(SHMOO_AND28_SHORT_SLEEP);
}
- while(TRUE);
+ while (TRUE);
- if(DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, VDL_CALIB_STATUS1, CALIB_LOCK_4B) == 0)
+ if (DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, VDL_CALIB_STATUS1, CALIB_LOCK_4B) == 0)
{
printf(" VDL calibration failed!!! (No lock)\n");
return SOC_E_FAIL;
@@ -2785,21 +2785,21 @@ _and28_zq_calibration(int unit, int phy_ndx)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_CONTROL_REGS_ZQ_CAL, data);
sal_usleep(SHMOO_AND28_SHORT_SLEEP);
- for(i = 0; i < SHMOO_AND28_MAX_ZQ_CAL_RANGE; i++)
+ for (i = 0; i < SHMOO_AND28_MAX_ZQ_CAL_RANGE; i++)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, ZQ_CAL, ZQ_DRIVE_P, i);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_CONTROL_REGS_ZQ_CAL, data);
sal_usleep(SHMOO_AND28_SHORT_SLEEP);
DDR_PHY_REG_READ(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_CONTROL_REGS_ZQ_CAL, &data);
- if(DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, ZQ_CAL, ZQ_PCOMP_STATUS))
+ if (DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, ZQ_CAL, ZQ_PCOMP_STATUS))
{
p_drive = i;
break;
}
}
- if(i == SHMOO_AND28_MAX_ZQ_CAL_RANGE)
+ if (i == SHMOO_AND28_MAX_ZQ_CAL_RANGE)
{
printf(" WARNING: ZQ calibration error (P) - Manual IO programming required for correct operation\n");
/* return SOC_E_FAIL; */
@@ -2820,21 +2820,21 @@ _and28_zq_calibration(int unit, int phy_ndx)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_CONTROL_REGS_ZQ_CAL, data);
sal_usleep(SHMOO_AND28_SHORT_SLEEP);
- for(i = 0; i < SHMOO_AND28_MAX_ZQ_CAL_RANGE; i++)
+ for (i = 0; i < SHMOO_AND28_MAX_ZQ_CAL_RANGE; i++)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, ZQ_CAL, ZQ_DRIVE_N, i);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_CONTROL_REGS_ZQ_CAL, data);
sal_usleep(SHMOO_AND28_SHORT_SLEEP);
DDR_PHY_REG_READ(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_CONTROL_REGS_ZQ_CAL, &data);
- if(DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, ZQ_CAL, ZQ_NCOMP_STATUS))
+ if (DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, ZQ_CAL, ZQ_NCOMP_STATUS))
{
n_drive = i;
break;
}
}
- if(i == SHMOO_AND28_MAX_ZQ_CAL_RANGE)
+ if (i == SHMOO_AND28_MAX_ZQ_CAL_RANGE)
{
printf(" WARNING: ZQ calibration error (N) - Manual IO programming required for correct operation\n");
/* return SOC_E_FAIL; */
@@ -2879,8 +2879,8 @@ _and28_zq_calibration(int unit, int phy_ndx)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_DRIVE_PAD_CTL, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_DRIVE_PAD_CTL, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_DRIVE_PAD_CTL, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_DRIVE_PAD_CTL, data);
@@ -2899,8 +2899,8 @@ _and28_zq_calibration(int unit, int phy_ndx)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_DQSP_DRIVE_PAD_CTL, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_DQSN_DRIVE_PAD_CTL, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_DQSP_DRIVE_PAD_CTL, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_DQSN_DRIVE_PAD_CTL, data);
@@ -2918,8 +2918,8 @@ _and28_zq_calibration(int unit, int phy_ndx)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_ALERT_DRIVE_PAD_CTL, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_ALERT_DRIVE_PAD_CTL, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_ALERT_DRIVE_PAD_CTL, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_ALERT_DRIVE_PAD_CTL, data);
@@ -2938,8 +2938,8 @@ _and28_zq_calibration(int unit, int phy_ndx)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_RD_EN_DRIVE_PAD_CTL, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_RD_EN_DRIVE_PAD_CTL, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_RD_EN_DRIVE_PAD_CTL, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_RD_EN_DRIVE_PAD_CTL, data);
@@ -2979,7 +2979,7 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
sal_usleep(SHMOO_AND28_SHORT_SLEEP);
/*A08*/ printf("R08. ZQ calibration\n");
- /*R08*/ if(shmoo_dram_info_ptr->sim_system_mode)
+ /*R08*/ if (shmoo_dram_info_ptr->sim_system_mode)
{
printf(" Skipped for emulation\n");
@@ -2994,7 +2994,7 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
(*scPtr).step1000 = ss.step1000;
(*scPtr).size1000UI = ss.size1000UI;
temp = (ss.size1000UI * 3) / 1000; /* 300% */
- if(temp > SHMOO_AND28_MAX_VDL_LENGTH)
+ if (temp > SHMOO_AND28_MAX_VDL_LENGTH)
{
(*scPtr).sizeX = SHMOO_AND28_MAX_VDL_LENGTH;
}
@@ -3006,7 +3006,7 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
temp = (ss.size1000UI * 125) / 100000; /* 125% */
(*scPtr).yJump = 2;
temp = temp >> (*scPtr).yJump;
- if(temp > SHMOO_AND28_MAX_VREF_RANGE)
+ if (temp > SHMOO_AND28_MAX_VREF_RANGE)
{
(*scPtr).sizeY = SHMOO_AND28_MAX_VREF_RANGE;
}
@@ -3015,7 +3015,7 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
(*scPtr).sizeY = temp;
}
- for(i = 0; i < SHMOO_AND28_MAX_VISIBLE_UI_COUNT; i++)
+ for (i = 0; i < SHMOO_AND28_MAX_VISIBLE_UI_COUNT; i++)
{
(*scPtr).endUI[i] = ((i + 1) * (ss.size1000UI)) / 1000;
}
@@ -3026,8 +3026,8 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_READ_CONTROL, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_READ_CONTROL, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_READ_CONTROL, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_READ_CONTROL, data);
@@ -3041,8 +3041,8 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_RD_EN_DLY_CYC, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_RD_EN_DLY_CYC, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_RD_EN_DLY_CYC, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_RD_EN_DLY_CYC, data);
@@ -3055,8 +3055,8 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_VDL_CONTROL_RD_EN_CS0, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS0, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_EN_CS0, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_VDL_CONTROL_RD_EN_CS0, data);
@@ -3069,8 +3069,8 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_VDL_CONTROL_RD_EN_CS1, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EN_CS1, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_EN_CS1, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_VDL_CONTROL_RD_EN_CS1, data);
@@ -3125,8 +3125,8 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_EDCN, data);
#endif
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_DQ0P, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_DQ0N, data);
@@ -3178,7 +3178,7 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_DQSP, FORCE, 1);
temp = (((*scPtr).size1000UI * 3) / 4000) + SHMOO_AND28_RD_DQS_VDL_OFFSET; /* 75% + Offset */
- if(temp > SHMOO_AND28_MAX_VDL_LENGTH)
+ if (temp > SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_RD_DQSP, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -3191,8 +3191,8 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_DQSP, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_RD_DQSN, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_DQSP, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_RD_DQSN, data);
@@ -3207,8 +3207,8 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_WR_CHAN_DLY_CYC, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_WR_CHAN_DLY_CYC, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_WR_CHAN_DLY_CYC, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_WR_CHAN_DLY_CYC, data);
@@ -3218,7 +3218,7 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_WR_DQ0, FORCE, 1);
temp = (*scPtr).size1000UI / 2000;
- if(temp > SHMOO_AND28_MAX_VDL_LENGTH)
+ if (temp > SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, VDL_CONTROL_WR_DQ0, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -3251,8 +3251,8 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_VDL_CONTROL_WR_EDC, data);
#endif
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_WR_DQ0, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_WR_DQ1, data);
@@ -3288,7 +3288,7 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
(*scPtr).step1000 = ss.step1000;
(*scPtr).size1000UI = ss.size1000UI;
temp = (ss.size1000UI * 125) / 100000; /* 125% */
- if(temp > SHMOO_AND28_MAX_VDL_LENGTH)
+ if (temp > SHMOO_AND28_MAX_VDL_LENGTH)
{
(*scPtr).sizeX = SHMOO_AND28_MAX_VDL_LENGTH;
}
@@ -3297,7 +3297,7 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
(*scPtr).sizeX = temp;
}
- for(i = 0; i < SHMOO_AND28_MAX_VISIBLE_UI_COUNT; i++)
+ for (i = 0; i < SHMOO_AND28_MAX_VISIBLE_UI_COUNT; i++)
{
(*scPtr).endUI[i] = ((i + 1) * (ss.size1000UI)) / 1000;
}
@@ -3307,7 +3307,7 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
(*scPtr).step1000 = ss.step1000;
(*scPtr).size1000UI = ss.size1000UI;
temp = (ss.size1000UI * 125) / 100000; /* 125% */
- if(temp > SHMOO_AND28_MAX_VDL_LENGTH)
+ if (temp > SHMOO_AND28_MAX_VDL_LENGTH)
{
(*scPtr).sizeX = SHMOO_AND28_MAX_VDL_LENGTH;
}
@@ -3316,7 +3316,7 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
(*scPtr).sizeX = temp;
}
- for(i = 0; i < SHMOO_AND28_MAX_VISIBLE_UI_COUNT; i++)
+ for (i = 0; i < SHMOO_AND28_MAX_VISIBLE_UI_COUNT; i++)
{
(*scPtr).endUI[i] = ((i + 1) * (ss.size1000UI)) / 1000;
}
@@ -3326,7 +3326,7 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
(*scPtr).step1000 = ss.step1000;
(*scPtr).size1000UI = ss.size1000UI;
temp = (ss.size1000UI * 25) / 10000; /* 250% */
- if(temp > SHMOO_AND28_MAX_VDL_LENGTH)
+ if (temp > SHMOO_AND28_MAX_VDL_LENGTH)
{
(*scPtr).sizeX = SHMOO_AND28_MAX_VDL_LENGTH;
}
@@ -3335,7 +3335,7 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
(*scPtr).sizeX = temp;
}
- for(i = 0; i < SHMOO_AND28_MAX_VISIBLE_UI_COUNT; i++)
+ for (i = 0; i < SHMOO_AND28_MAX_VISIBLE_UI_COUNT; i++)
{
(*scPtr).endUI[i] = ((i + 1) * (ss.size1000UI)) / 1000;
}
@@ -3345,7 +3345,7 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
(*scPtr).step1000 = ss.step1000;
(*scPtr).size1000UI = ss.size1000UI;
temp = (ss.size1000UI * 25) / 10000; /* 250% */
- if(temp > SHMOO_AND28_MAX_VDL_LENGTH)
+ if (temp > SHMOO_AND28_MAX_VDL_LENGTH)
{
(*scPtr).sizeX = SHMOO_AND28_MAX_VDL_LENGTH;
}
@@ -3354,7 +3354,7 @@ _shmoo_and28_entry(int unit, int phy_ndx, and28_shmoo_container_t *scPtr, uint32
(*scPtr).sizeX = temp;
}
- for(i = 0; i < SHMOO_AND28_MAX_VISIBLE_UI_COUNT; i++)
+ for (i = 0; i < SHMOO_AND28_MAX_VISIBLE_UI_COUNT; i++)
{
(*scPtr).endUI[i] = ((i + 1) * (ss.size1000UI)) / 1000;
}
@@ -3630,8 +3630,8 @@ _shmoo_and28_save(int unit, int phy_ndx, and28_shmoo_config_param_t *config_para
DDR_PHY_REG_READ(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_READ_CONTROL, &data);
(*config_param).rd_control[1] = (uint8) data;
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
#if (defined(PHY_AND28_E0) || defined(PHY_AND28_E2))
DDR_PHY_REG_READ(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_VDL_CONTROL_WR_DQS_P, &data);
@@ -4044,8 +4044,8 @@ _shmoo_and28_restore(int unit, int phy_ndx, and28_shmoo_config_param_t *config_p
data = SET_RD_CONTROL((uint32) (*config_param).rd_control[1]);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_READ_CONTROL, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
data = SET_WR_VDL_FORCE((uint32) (*config_param).wr_vdl_dqsp[2]);
#if (defined(PHY_AND28_E0) || defined(PHY_AND28_E2))
@@ -4224,8 +4224,8 @@ _shmoo_and28_restore(int unit, int phy_ndx, and28_shmoo_config_param_t *config_p
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_READ_FIFO_CLEAR, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_READ_FIFO_CLEAR, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_READ_FIFO_CLEAR, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_READ_FIFO_CLEAR, data);
@@ -4270,16 +4270,16 @@ soc_and28_shmoo_ctl(int unit, int phy_ndx, int shmoo_type, int stat, int plot, i
dramType = shmoo_dram_info_ptr->dram_type;
ctlType = shmoo_dram_info_ptr->ctl_type;
- if(!stat)
+ if (!stat)
{
scPtr = &shmoo_container;
- if(scPtr == NULL)
+ if (scPtr == NULL)
{
return SOC_E_MEMORY;
}
sal_memset(scPtr, 0, sizeof(and28_shmoo_container_t));
- if(phy_ndx != SHMOO_AND28_INTERFACE_RSVP)
+ if (phy_ndx != SHMOO_AND28_INTERFACE_RSVP)
{
ndx = phy_ndx;
ndxEnd = phy_ndx + 1;
@@ -4290,13 +4290,13 @@ soc_and28_shmoo_ctl(int unit, int phy_ndx, int shmoo_type, int stat, int plot, i
ndxEnd = SHMOO_AND28_MAX_INTERFACES;
}
- for(; ndx < ndxEnd; ndx++)
+ for (; ndx < ndxEnd; ndx++)
{
- if(!_shmoo_and28_check_dram(ndx)) {
+ if (!_shmoo_and28_check_dram(ndx)) {
continue;
}
- if(action == SHMOO_AND28_ACTION_RESTORE)
+ if (action == SHMOO_AND28_ACTION_RESTORE)
{
switch(ctlType)
{
@@ -4306,7 +4306,7 @@ soc_and28_shmoo_ctl(int unit, int phy_ndx, int shmoo_type, int stat, int plot, i
_shmoo_and28_restore(unit, phy_ndx, config_param);
break;
default:
- if(scPtr != NULL)
+ if (scPtr != NULL)
{
/* sal_free(scPtr); */
scPtr = NULL;
@@ -4316,7 +4316,7 @@ soc_and28_shmoo_ctl(int unit, int phy_ndx, int shmoo_type, int stat, int plot, i
return SOC_E_FAIL;
}
}
- else if((action == SHMOO_AND28_ACTION_RUN) || (action == SHMOO_AND28_ACTION_RUN_AND_SAVE))
+ else if ((action == SHMOO_AND28_ACTION_RUN) || (action == SHMOO_AND28_ACTION_RUN_AND_SAVE))
{
switch(ctlType)
{
@@ -4345,7 +4345,7 @@ soc_and28_shmoo_ctl(int unit, int phy_ndx, int shmoo_type, int stat, int plot, i
(*scPtr).dramType = dramType;
(*scPtr).ctlType = ctlType;
- if(shmoo_type != SHMOO_AND28_SHMOO_RSVP)
+ if (shmoo_type != SHMOO_AND28_SHMOO_RSVP)
{
(*scPtr).shmooType = shmoo_type;
_shmoo_and28_entry(unit, ndx, scPtr, SHMOO_AND28_SINGLE);
@@ -4353,7 +4353,7 @@ soc_and28_shmoo_ctl(int unit, int phy_ndx, int shmoo_type, int stat, int plot, i
_shmoo_and28_calib_2D(unit, ndx, scPtr);
_shmoo_and28_set_new_step(unit, ndx, scPtr);
#ifdef PLOT_SUPPORT
- if(plot)
+ if (plot)
{
_shmoo_and28_plot(unit, ndx, scPtr);
}
@@ -4362,7 +4362,7 @@ soc_and28_shmoo_ctl(int unit, int phy_ndx, int shmoo_type, int stat, int plot, i
}
else
{
- for(i = 0; i < seqCount; i++)
+ for (i = 0; i < seqCount; i++)
{
(*scPtr).shmooType = seqPtr[i];
_shmoo_and28_entry(unit, ndx, scPtr, SHMOO_AND28_SEQUENTIAL);
@@ -4370,7 +4370,7 @@ soc_and28_shmoo_ctl(int unit, int phy_ndx, int shmoo_type, int stat, int plot, i
_shmoo_and28_calib_2D(unit, ndx, scPtr);
_shmoo_and28_set_new_step(unit, ndx, scPtr);
#ifdef PLOT_SUPPORT
- if(plot)
+ if (plot)
{
_shmoo_and28_plot(unit, ndx, scPtr);
}
@@ -4381,7 +4381,7 @@ soc_and28_shmoo_ctl(int unit, int phy_ndx, int shmoo_type, int stat, int plot, i
break;
default:
- if(scPtr != NULL)
+ if (scPtr != NULL)
{
/* sal_free(scPtr); */
scPtr = NULL;
@@ -4392,13 +4392,13 @@ soc_and28_shmoo_ctl(int unit, int phy_ndx, int shmoo_type, int stat, int plot, i
}
}
- if((action == SHMOO_AND28_ACTION_RUN_AND_SAVE) || (action == SHMOO_AND28_ACTION_SAVE))
+ if ((action == SHMOO_AND28_ACTION_RUN_AND_SAVE) || (action == SHMOO_AND28_ACTION_SAVE))
{
_shmoo_and28_save(unit, phy_ndx, config_param);
}
}
- if(scPtr != NULL)
+ if (scPtr != NULL)
{
/* sal_free(scPtr); */
scPtr = NULL;
@@ -4428,7 +4428,7 @@ int
soc_and28_shmoo_dram_info_set(int unit, and28_shmoo_dram_info_t *sdi)
{
-#if(!SHMOO_AND28_PHY_CONSTANT_CONFIG)
+#if (!SHMOO_AND28_PHY_CONSTANT_CONFIG)
shmoo_dram_info_ptr = &shmoo_dram_info;
shmoo_dram_info_ptr->ctl_type = (*sdi).ctl_type;
shmoo_dram_info_ptr->dram_type = (*sdi).dram_type;
@@ -4458,7 +4458,7 @@ _soc_and28_shmoo_phy_cfg_pll(int unit, int phy_ndx)
uint32 pll_dividers;
uint32 pll_frac_divider;
- if(shmoo_dram_info_ptr->ref_clk_mhz != 50)
+ if (shmoo_dram_info_ptr->ref_clk_mhz != 50)
{
printf(" Unsupported reference flock frequency: %4d MHz\n", shmoo_dram_info_ptr->ref_clk_mhz);
return SOC_E_FAIL;
@@ -4499,7 +4499,7 @@ _soc_and28_shmoo_phy_cfg_pll(int unit, int phy_ndx)
printf(" Fref.............: %4d MHz\n", shmoo_dram_info_ptr->ref_clk_mhz);
printf(" Data rate........: %4d Mbps\n", shmoo_dram_info_ptr->data_rate_mbps);
- if(phy_ndx != SHMOO_AND28_INTERFACE_RSVP)
+ if (phy_ndx != SHMOO_AND28_INTERFACE_RSVP)
{
ndx = phy_ndx;
ndxEnd = phy_ndx + 1;
@@ -4510,9 +4510,9 @@ _soc_and28_shmoo_phy_cfg_pll(int unit, int phy_ndx)
ndxEnd = SHMOO_AND28_MAX_INTERFACES;
}
- for(; ndx < ndxEnd; ndx++)
+ for (; ndx < ndxEnd; ndx++)
{
- if(!_shmoo_and28_check_dram(ndx))
+ if (!_shmoo_and28_check_dram(ndx))
{
continue;
}
@@ -4531,7 +4531,7 @@ _soc_and28_shmoo_phy_cfg_pll(int unit, int phy_ndx)
{
DDR_PHY_REG_READ(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_CONTROL_REGS_PLL_STATUS, &data);
- if(DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, PLL_STATUS, LOCK))
+ if (DDR_PHY_GET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, PLL_STATUS, LOCK))
{
printf(" PLL locked.\n");
break;
@@ -4546,7 +4546,7 @@ _soc_and28_shmoo_phy_cfg_pll(int unit, int phy_ndx)
timeout--;
sal_usleep(SHMOO_AND28_SHORT_SLEEP);
}
- while(TRUE);
+ while (TRUE);
DDR_PHY_REG_READ(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_CONTROL_REGS_PLL_CONFIG, &data);
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, PLL_CONFIG, RESET_POST_DIV, 0);
@@ -4570,7 +4570,7 @@ soc_and28_shmoo_phy_init(int unit, int phy_ndx)
uint32 size1000UI, sizeUI;
and28_step_size_t ss;
- if(phy_ndx != SHMOO_AND28_INTERFACE_RSVP)
+ if (phy_ndx != SHMOO_AND28_INTERFACE_RSVP)
{
ndx = phy_ndx;
ndxEnd = phy_ndx + 1;
@@ -4581,9 +4581,9 @@ soc_and28_shmoo_phy_init(int unit, int phy_ndx)
ndxEnd = SHMOO_AND28_MAX_INTERFACES;
}
- for(; ndx < ndxEnd; ndx++)
+ for (; ndx < ndxEnd; ndx++)
{
- if(!_shmoo_and28_check_dram(ndx))
+ if (!_shmoo_and28_check_dram(ndx))
{
continue;
}
@@ -4605,7 +4605,7 @@ soc_and28_shmoo_phy_init(int unit, int phy_ndx)
sal_usleep(SHMOO_AND28_SHORT_SLEEP);
/*A02*/ printf("A02. Configure timing parameters\n");
- if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
{
switch(shmoo_dram_info_ptr->data_rate_mbps)
{
@@ -4742,7 +4742,7 @@ soc_and28_shmoo_phy_init(int unit, int phy_ndx)
/*A06*/ printf("A06. Configure ADDR/CTRL VDLs\n");
data = 0;
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, VDL_CONTROL_AD00, FORCE, 1);
- if(sizeUI > SHMOO_AND28_MAX_VDL_LENGTH)
+ if (sizeUI > SHMOO_AND28_MAX_VDL_LENGTH)
{
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_CONTROL_REGS, VDL_CONTROL_AD00, VDL_STEP, SHMOO_AND28_MAX_VDL_LENGTH - 1);
}
@@ -4790,7 +4790,7 @@ soc_and28_shmoo_phy_init(int unit, int phy_ndx)
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_CONTROL_REGS_VIRTUAL_VTT_CONTROL, data);
/*A08*/ printf("A08. ZQ calibration\n");
-/*R08*/ if(shmoo_dram_info_ptr->sim_system_mode)
+/*R08*/ if (shmoo_dram_info_ptr->sim_system_mode)
{
printf(" Skipped for emulation\n");
@@ -4830,8 +4830,8 @@ soc_and28_shmoo_phy_init(int unit, int phy_ndx)
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, ODT_CONTROL, ODT_PRE_LENGTH, 4);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_ODT_CONTROL, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_ODT_CONTROL, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_ODT_CONTROL, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_ODT_CONTROL, data);
@@ -4848,8 +4848,8 @@ soc_and28_shmoo_phy_init(int unit, int phy_ndx)
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, WR_PREAMBLE_MODE, DQS_PREAM_BITS, 2);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_WR_PREAMBLE_MODE, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_WR_PREAMBLE_MODE, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_WR_PREAMBLE_MODE, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_WR_PREAMBLE_MODE, data);
@@ -4863,8 +4863,8 @@ soc_and28_shmoo_phy_init(int unit, int phy_ndx)
DDR_PHY_SET_FIELD(data, DDR34_CORE_PHY_BYTE_LANE_0, IDLE_PAD_CONTROL, AUTO_DQ_IDDQ_MODE, 3);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_0_IDLE_PAD_CONTROL, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_1_IDLE_PAD_CONTROL, data);
- #if(SHMOO_AND28_PHY_BITWIDTH_IS_32)
- if(shmoo_dram_info_ptr->interface_bitwidth == 32)
+ #if (SHMOO_AND28_PHY_BITWIDTH_IS_32)
+ if (shmoo_dram_info_ptr->interface_bitwidth == 32)
{
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_2_IDLE_PAD_CONTROL, data);
DDR_PHY_REG_WRITE(unit, SHMOO_AND28_PHY_REG_BASE, 0, DDR34_CORE_PHY_BYTE_LANE_3_IDLE_PAD_CONTROL, data);
diff --git a/src/soc/broadcom/cygnus/ydc_ddr_bist.c b/src/soc/broadcom/cygnus/ydc_ddr_bist.c
index 73f8c55bd8..5eaa2219d6 100644
--- a/src/soc/broadcom/cygnus/ydc_ddr_bist.c
+++ b/src/soc/broadcom/cygnus/ydc_ddr_bist.c
@@ -61,11 +61,11 @@ soc_ydc_ddr_bist_config_set(int unit, int phy_ndx, ydc_ddr_bist_info_t *bist_inf
YDC_DDR_BIST_SET_FIELD(data, YDC_DDR_BIST, CONFIGURATIONS, IND_WR_RD_ADDR_MODE, 1);
WRITE_YDC_DDR_BIST_CONFIGURATIONSr(0, YDC_DDR_BIST_REG_BASE, data);
- if(!((*bist_info).prbs_mode))
+ if (!((*bist_info).prbs_mode))
{
- if((*bist_info).mpr_mode)
+ if ((*bist_info).mpr_mode)
{
- if(YDC_DDR_BIST_PHY_BITWITDH_IS_32)
+ if (YDC_DDR_BIST_PHY_BITWITDH_IS_32)
{
WRITE_YDC_DDR_BIST_PATTERN_WORD_0r(0, YDC_DDR_BIST_REG_BASE, 0xFFFFFFFF);
WRITE_YDC_DDR_BIST_PATTERN_WORD_1r(0, YDC_DDR_BIST_REG_BASE, 0x00000000);
@@ -128,11 +128,11 @@ soc_ydc_ddr_bist_run(int unit, int phy_ndx, ydc_ddr_bist_err_cnt_t *error_count)
poll_count = 0;
- while(TRUE)
+ while (TRUE)
{
READ_YDC_DDR_BIST_STATUSESr(0, YDC_DDR_BIST_REG_BASE, &data);
- if(YDC_DDR_BIST_GET_FIELD(data, YDC_DDR_BIST, STATUSES, BIST_FINISHED))
+ if (YDC_DDR_BIST_GET_FIELD(data, YDC_DDR_BIST, STATUSES, BIST_FINISHED))
{
READ_YDC_DDR_BIST_CONFIGURATIONSr(0, YDC_DDR_BIST_REG_BASE, &data);
YDC_DDR_BIST_SET_FIELD(data, YDC_DDR_BIST, CONFIGURATIONS, BIST_EN, 0);
@@ -140,7 +140,7 @@ soc_ydc_ddr_bist_run(int unit, int phy_ndx, ydc_ddr_bist_err_cnt_t *error_count)
break;
}
- if(poll_count > YDC_DDR_BIST_POLL_COUNT_LIMIT)
+ if (poll_count > YDC_DDR_BIST_POLL_COUNT_LIMIT)
{
READ_YDC_DDR_BIST_CONFIGURATIONSr(0, YDC_DDR_BIST_REG_BASE, &data);
YDC_DDR_BIST_SET_FIELD(data, YDC_DDR_BIST, CONFIGURATIONS, BIST_EN, 0);