Home | History | Annotate | Download | only in Pei
      1 /************************************************************************
      2  *
      3  * Copyright (c) 2013-2015 Intel Corporation.
      4  *
      5 * This program and the accompanying materials
      6 * are licensed and made available under the terms and conditions of the BSD License
      7 * which accompanies this distribution.  The full text of the license may be found at
      8 * http://opensource.org/licenses/bsd-license.php
      9 *
     10 * THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
     11 * WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
     12  *
     13  * This file contains all of the Cat Mountain Memory Reference Code (MRC).
     14  *
     15  * These functions are generic and should work for any Cat Mountain config.
     16  *
     17  * MRC requires two data structures to be passed in which are initialised by "PreMemInit()".
     18  *
     19  * The basic flow is as follows:
     20  * 01) Check for supported DDR speed configuration
     21  * 02) Set up MEMORY_MANAGER buffer as pass-through (POR)
     22  * 03) Set Channel Interleaving Mode and Channel Stride to the most aggressive setting possible
     23  * 04) Set up the MCU logic
     24  * 05) Set up the DDR_PHY logic
     25  * 06) Initialise the DRAMs (JEDEC)
     26  * 07) Perform the Receive Enable Calibration algorithm
     27  * 08) Perform the Write Leveling algorithm
     28  * 09) Perform the Read Training algorithm (includes internal Vref)
     29  * 10) Perform the Write Training algorithm
     30  * 11) Set Channel Interleaving Mode and Channel Stride to the desired settings
     31  *
     32  * Dunit configuration based on Valleyview MRC.
     33  *
     34  ***************************************************************************/
     35 
     36 #include "mrc.h"
     37 #include "memory_options.h"
     38 
     39 #include "meminit.h"
     40 #include "meminit_utils.h"
     41 #include "hte.h"
     42 #include "io.h"
     43 
     44 // Override ODT to off state if requested
     45 #define DRMC_DEFAULT    (mrc_params->rd_odt_value==0?BIT12:0)
     46 
     47 
     48 // tRFC values (in picoseconds) per density
     49 const uint32_t tRFC[5] =
     50 {
     51     90000,  // 512Mb
     52     110000, // 1Gb
     53     160000, // 2Gb
     54     300000, // 4Gb
     55     350000, // 8Gb
     56     };
     57 
     58 // tCK clock period in picoseconds per speed index 800, 1066, 1333
     59 const uint32_t tCK[3] =
     60 {
     61     2500,
     62     1875,
     63     1500
     64 };
     65 
     66 #ifdef SIM
     67 // Select static timings specific to simulation environment
     68 #define PLATFORM_ID    0
     69 #else
     70 // Select static timings specific to ClantonPeek platform
     71 #define PLATFORM_ID    1
     72 #endif
     73 
     74 
     75 // Global variables
     76 const uint16_t ddr_wclk[] =
     77     {193, 158};
     78 
     79 const uint16_t ddr_wctl[] =
     80     {  1, 217};
     81 
     82 const uint16_t ddr_wcmd[] =
     83     {  1, 220};
     84 
     85 
     86 #ifdef BACKUP_RCVN
     87 const uint16_t ddr_rcvn[] =
     88     {129, 498};
     89 #endif // BACKUP_RCVN
     90 
     91 #ifdef BACKUP_WDQS
     92 const uint16_t ddr_wdqs[] =
     93     { 65, 289};
     94 #endif // BACKUP_WDQS
     95 
     96 #ifdef BACKUP_RDQS
     97 const uint8_t ddr_rdqs[] =
     98     { 32,  24};
     99 #endif // BACKUP_RDQS
    100 
    101 #ifdef BACKUP_WDQ
    102 const uint16_t ddr_wdq[] =
    103     { 32, 257};
    104 #endif // BACKUP_WDQ
    105 
    106 
    107 
    108 // Select MEMORY_MANAGER as the source for PRI interface
    109 static void select_memory_manager(
    110     MRCParams_t *mrc_params)
    111 {
    112   RegDCO Dco;
    113 
    114   ENTERFN();
    115 
    116   Dco.raw = isbR32m(MCU, DCO);
    117   Dco.field.PMICTL = 0;          //0 - PRI owned by MEMORY_MANAGER
    118   isbW32m(MCU, DCO, Dco.raw);
    119 
    120   LEAVEFN();
    121 }
    122 
    123 // Select HTE as the source for PRI interface
    124 void select_hte(
    125     MRCParams_t *mrc_params)
    126 {
    127   RegDCO Dco;
    128 
    129   ENTERFN();
    130 
    131   Dco.raw = isbR32m(MCU, DCO);
    132   Dco.field.PMICTL = 1;          //1 - PRI owned by HTE
    133   isbW32m(MCU, DCO, Dco.raw);
    134 
    135   LEAVEFN();
    136 }
    137 
    138 // Send DRAM command, data should be formated
    139 // using DCMD_Xxxx macro or emrsXCommand structure.
    140 static void dram_init_command(
    141     uint32_t data)
    142 {
    143   Wr32(DCMD, 0, data);
    144 }
    145 
    146 // Send DRAM wake command using special MCU side-band WAKE opcode
    147 static void dram_wake_command(
    148     void)
    149 {
    150   ENTERFN();
    151 
    152   Wr32(MMIO, PCIADDR(0,0,0,SB_PACKET_REG),
    153       (uint32_t) SB_COMMAND(SB_WAKE_CMND_OPCODE, MCU, 0));
    154 
    155   LEAVEFN();
    156 }
    157 
    158 // Stop self refresh driven by MCU
    159 static void clear_self_refresh(
    160     MRCParams_t *mrc_params)
    161 {
    162   ENTERFN();
    163 
    164   // clear the PMSTS Channel Self Refresh bits
    165   isbM32m(MCU, PMSTS, BIT0, BIT0);
    166 
    167   LEAVEFN();
    168 }
    169 
    170 // Configure MCU before jedec init sequence
    171 static void prog_decode_before_jedec(
    172     MRCParams_t *mrc_params)
    173 {
    174   RegDRP Drp;
    175   RegDRCF Drfc;
    176   RegDCAL Dcal;
    177   RegDSCH Dsch;
    178   RegDPMC0 Dpmc0;
    179 
    180   ENTERFN();
    181 
    182   // Disable power saving features
    183   Dpmc0.raw = isbR32m(MCU, DPMC0);
    184   Dpmc0.field.CLKGTDIS = 1;
    185   Dpmc0.field.DISPWRDN = 1;
    186   Dpmc0.field.DYNSREN = 0;
    187   Dpmc0.field.PCLSTO = 0;
    188   isbW32m(MCU, DPMC0, Dpmc0.raw);
    189 
    190   // Disable out of order transactions
    191   Dsch.raw = isbR32m(MCU, DSCH);
    192   Dsch.field.OOODIS = 1;
    193   Dsch.field.NEWBYPDIS = 1;
    194   isbW32m(MCU, DSCH, Dsch.raw);
    195 
    196   // Disable issuing the REF command
    197   Drfc.raw = isbR32m(MCU, DRFC);
    198   Drfc.field.tREFI = 0;
    199   isbW32m(MCU, DRFC, Drfc.raw);
    200 
    201   // Disable ZQ calibration short
    202   Dcal.raw = isbR32m(MCU, DCAL);
    203   Dcal.field.ZQCINT = 0;
    204   Dcal.field.SRXZQCL = 0;
    205   isbW32m(MCU, DCAL, Dcal.raw);
    206 
    207   // Training performed in address mode 0, rank population has limited impact, however
    208   // simulator complains if enabled non-existing rank.
    209   Drp.raw = 0;
    210   if (mrc_params->rank_enables & 1)
    211     Drp.field.rank0Enabled = 1;
    212   if (mrc_params->rank_enables & 2)
    213     Drp.field.rank1Enabled = 1;
    214   isbW32m(MCU, DRP, Drp.raw);
    215 
    216   LEAVEFN();
    217 }
    218 
    219 // After Cold Reset, BIOS should set COLDWAKE bit to 1 before
    220 // sending the WAKE message to the Dunit.
    221 // For Standby Exit, or any other mode in which the DRAM is in
    222 // SR, this bit must be set to 0.
    223 static void perform_ddr_reset(
    224     MRCParams_t *mrc_params)
    225 {
    226   ENTERFN();
    227 
    228   // Set COLDWAKE bit before sending the WAKE message
    229   isbM32m(MCU, DRMC, BIT16, BIT16);
    230 
    231   // Send wake command to DUNIT (MUST be done before JEDEC)
    232   dram_wake_command();
    233 
    234   // Set default value
    235   isbW32m(MCU, DRMC, DRMC_DEFAULT);
    236 
    237   LEAVEFN();
    238 }
    239 
    240 // Dunit Initialisation Complete.
    241 // Indicates that initialisation of the Dunit has completed.
    242 // Memory accesses are permitted and maintenance operation
    243 // begins. Until this bit is set to a 1, the memory controller will
    244 // not accept DRAM requests from the MEMORY_MANAGER or HTE.
    245 static void set_ddr_init_complete(
    246     MRCParams_t *mrc_params)
    247 {
    248   RegDCO Dco;
    249 
    250   ENTERFN();
    251 
    252   Dco.raw = isbR32m(MCU, DCO);
    253   Dco.field.PMICTL = 0;          //0 - PRI owned by MEMORY_MANAGER
    254   Dco.field.IC = 1;              //1 - initialisation complete
    255   isbW32m(MCU, DCO, Dco.raw);
    256 
    257   LEAVEFN();
    258 }
    259 
    260 static void prog_page_ctrl(
    261     MRCParams_t *mrc_params)
    262 {
    263   RegDPMC0 Dpmc0;
    264 
    265   ENTERFN();
    266 
    267   Dpmc0.raw = isbR32m(MCU, DPMC0);
    268 
    269   Dpmc0.field.PCLSTO = 0x4;
    270   Dpmc0.field.PREAPWDEN = 1;
    271 
    272   isbW32m(MCU, DPMC0, Dpmc0.raw);
    273 }
    274 
    275 // Configure MCU Power Management Control Register
    276 // and Scheduler Control Register.
    277 static void prog_ddr_control(
    278     MRCParams_t *mrc_params)
    279 {
    280   RegDSCH Dsch;
    281   RegDPMC0 Dpmc0;
    282 
    283   ENTERFN();
    284 
    285   Dpmc0.raw = isbR32m(MCU, DPMC0);
    286   Dsch.raw = isbR32m(MCU, DSCH);
    287 
    288   Dpmc0.field.DISPWRDN = mrc_params->power_down_disable;
    289   Dpmc0.field.CLKGTDIS = 0;
    290   Dpmc0.field.PCLSTO = 4;
    291   Dpmc0.field.PREAPWDEN = 1;
    292 
    293   Dsch.field.OOODIS = 0;
    294   Dsch.field.OOOST3DIS = 0;
    295   Dsch.field.NEWBYPDIS = 0;
    296 
    297   isbW32m(MCU, DSCH, Dsch.raw);
    298   isbW32m(MCU, DPMC0, Dpmc0.raw);
    299 
    300   // CMDTRIST = 2h - CMD/ADDR are tristated when no valid command
    301   isbM32m(MCU, DPMC1, 2 << 4, BIT5|BIT4);
    302 
    303   LEAVEFN();
    304 }
    305 
    306 // After training complete configure MCU Rank Population Register
    307 // specifying: ranks enabled, device width, density, address mode.
    308 static void prog_dra_drb(
    309     MRCParams_t *mrc_params)
    310 {
    311   RegDRP Drp;
    312   RegDCO Dco;
    313 
    314   ENTERFN();
    315 
    316   Dco.raw = isbR32m(MCU, DCO);
    317   Dco.field.IC = 0;
    318   isbW32m(MCU, DCO, Dco.raw);
    319 
    320   Drp.raw = 0;
    321   if (mrc_params->rank_enables & 1)
    322     Drp.field.rank0Enabled = 1;
    323   if (mrc_params->rank_enables & 2)
    324     Drp.field.rank1Enabled = 1;
    325   if (mrc_params->dram_width == x16)
    326   {
    327     Drp.field.dimm0DevWidth = 1;
    328     Drp.field.dimm1DevWidth = 1;
    329   }
    330   // Density encoding in DRAMParams_t 0=512Mb, 1=Gb, 2=2Gb, 3=4Gb
    331   // has to be mapped RANKDENSx encoding (0=1Gb)
    332   Drp.field.dimm0DevDensity = mrc_params->params.DENSITY - 1;
    333   Drp.field.dimm1DevDensity = mrc_params->params.DENSITY - 1;
    334 
    335   // Address mode can be overwritten if ECC enabled
    336   Drp.field.addressMap = mrc_params->address_mode;
    337 
    338   isbW32m(MCU, DRP, Drp.raw);
    339 
    340   Dco.field.PMICTL = 0;          //0 - PRI owned by MEMORY_MANAGER
    341   Dco.field.IC = 1;              //1 - initialisation complete
    342   isbW32m(MCU, DCO, Dco.raw);
    343 
    344   LEAVEFN();
    345 }
    346 
    347 // Configure refresh rate and short ZQ calibration interval.
    348 // Activate dynamic self refresh.
    349 static void change_refresh_period(
    350     MRCParams_t *mrc_params)
    351 {
    352   RegDRCF Drfc;
    353   RegDCAL Dcal;
    354   RegDPMC0 Dpmc0;
    355 
    356   ENTERFN();
    357 
    358   Drfc.raw = isbR32m(MCU, DRFC);
    359   Drfc.field.tREFI = mrc_params->refresh_rate;
    360   Drfc.field.REFDBTCLR = 1;
    361   isbW32m(MCU, DRFC, Drfc.raw);
    362 
    363   Dcal.raw = isbR32m(MCU, DCAL);
    364   Dcal.field.ZQCINT = 3; // 63ms
    365   isbW32m(MCU, DCAL, Dcal.raw);
    366 
    367   Dpmc0.raw = isbR32m(MCU, DPMC0);
    368   Dpmc0.field.ENPHYCLKGATE = 1;
    369   Dpmc0.field.DYNSREN = 1;
    370   isbW32m(MCU, DPMC0, Dpmc0.raw);
    371 
    372   LEAVEFN();
    373 }
    374 
    375 // Send DRAM wake command
    376 static void perform_wake(
    377     MRCParams_t *mrc_params)
    378 {
    379   ENTERFN();
    380 
    381   dram_wake_command();
    382 
    383   LEAVEFN();
    384 }
    385 
    386 // prog_ddr_timing_control (aka mcu_init):
    387 // POST_CODE[major] == 0x02
    388 //
    389 // It will initialise timing registers in the MCU (DTR0..DTR4).
    390 static void prog_ddr_timing_control(
    391     MRCParams_t *mrc_params)
    392 {
    393   uint8_t TCL, WL;
    394   uint8_t TRP, TRCD, TRAS, TRFC, TWR, TWTR, TRRD, TRTP, TFAW;
    395   uint32_t TCK;
    396 
    397   RegDTR0 Dtr0;
    398   RegDTR1 Dtr1;
    399   RegDTR2 Dtr2;
    400   RegDTR3 Dtr3;
    401   RegDTR4 Dtr4;
    402 
    403   ENTERFN();
    404 
    405   // mcu_init starts
    406   post_code(0x02, 0x00);
    407 
    408   Dtr0.raw = isbR32m(MCU, DTR0);
    409   Dtr1.raw = isbR32m(MCU, DTR1);
    410   Dtr2.raw = isbR32m(MCU, DTR2);
    411   Dtr3.raw = isbR32m(MCU, DTR3);
    412   Dtr4.raw = isbR32m(MCU, DTR4);
    413 
    414   TCK = tCK[mrc_params->ddr_speed];  // Clock in picoseconds
    415   TCL = mrc_params->params.tCL;      // CAS latency in clocks
    416   TRP = TCL;  // Per CAT MRC
    417   TRCD = TCL;  // Per CAT MRC
    418   TRAS = MCEIL(mrc_params->params.tRAS, TCK);
    419   TRFC = MCEIL(tRFC[mrc_params->params.DENSITY], TCK);
    420   TWR = MCEIL(15000, TCK);   // Per JEDEC: tWR=15000ps DDR2/3 from 800-1600
    421 
    422   TWTR = MCEIL(mrc_params->params.tWTR, TCK);
    423   TRRD = MCEIL(mrc_params->params.tRRD, TCK);
    424   TRTP = 4;  // Valid for 800 and 1066, use 5 for 1333
    425   TFAW = MCEIL(mrc_params->params.tFAW, TCK);
    426 
    427   WL = 5 + mrc_params->ddr_speed;
    428 
    429   Dtr0.field.dramFrequency = mrc_params->ddr_speed;
    430 
    431   Dtr0.field.tCL = TCL - 5;            //Convert from TCL (DRAM clocks) to VLV indx
    432   Dtr0.field.tRP = TRP - 5;            //5 bit DRAM Clock
    433   Dtr0.field.tRCD = TRCD - 5;          //5 bit DRAM Clock
    434 
    435   Dtr1.field.tWCL = WL - 3;            //Convert from WL (DRAM clocks)  to VLV indx
    436   Dtr1.field.tWTP = WL + 4 + TWR - 14;  //Change to tWTP
    437   Dtr1.field.tRTP = MMAX(TRTP, 4) - 3;  //4 bit DRAM Clock
    438   Dtr1.field.tRRD = TRRD - 4;        //4 bit DRAM Clock
    439   Dtr1.field.tCMD = 1;             //2N
    440   Dtr1.field.tRAS = TRAS - 14;      //6 bit DRAM Clock
    441 
    442   Dtr1.field.tFAW = ((TFAW + 1) >> 1) - 5;    //4 bit DRAM Clock
    443   Dtr1.field.tCCD = 0;                        //Set 4 Clock CAS to CAS delay (multi-burst)
    444   Dtr2.field.tRRDR = 1;
    445   Dtr2.field.tWWDR = 2;
    446   Dtr2.field.tRWDR = 2;
    447   Dtr3.field.tWRDR = 2;
    448   Dtr3.field.tWRDD = 2;
    449 
    450   if (mrc_params->ddr_speed == DDRFREQ_800)
    451   {
    452      // Extended RW delay (+1)
    453      Dtr3.field.tRWSR = TCL - 5 + 1;
    454   }
    455   else if(mrc_params->ddr_speed == DDRFREQ_1066)
    456   {
    457      // Extended RW delay (+1)
    458      Dtr3.field.tRWSR = TCL - 5 + 1;
    459   }
    460 
    461   Dtr3.field.tWRSR = 4 + WL + TWTR - 11;
    462 
    463   if (mrc_params->ddr_speed == DDRFREQ_800)
    464   {
    465     Dtr3.field.tXP = MMAX(0, 1 - Dtr1.field.tCMD);
    466   }
    467   else
    468   {
    469     Dtr3.field.tXP = MMAX(0, 2 - Dtr1.field.tCMD);
    470   }
    471 
    472   Dtr4.field.WRODTSTRT = Dtr1.field.tCMD;
    473   Dtr4.field.WRODTSTOP = Dtr1.field.tCMD;
    474   Dtr4.field.RDODTSTRT = Dtr1.field.tCMD + Dtr0.field.tCL - Dtr1.field.tWCL + 2; //Convert from WL (DRAM clocks)  to VLV indx
    475   Dtr4.field.RDODTSTOP = Dtr1.field.tCMD + Dtr0.field.tCL - Dtr1.field.tWCL + 2;
    476   Dtr4.field.TRGSTRDIS = 0;
    477   Dtr4.field.ODTDIS = 0;
    478 
    479   isbW32m(MCU, DTR0, Dtr0.raw);
    480   isbW32m(MCU, DTR1, Dtr1.raw);
    481   isbW32m(MCU, DTR2, Dtr2.raw);
    482   isbW32m(MCU, DTR3, Dtr3.raw);
    483   isbW32m(MCU, DTR4, Dtr4.raw);
    484 
    485   LEAVEFN();
    486 }
    487 
    488 // ddrphy_init:
    489 // POST_CODE[major] == 0x03
    490 //
    491 // This function performs some initialisation on the DDRIO unit.
    492 // This function is dependent on BOARD_ID, DDR_SPEED, and CHANNEL_ENABLES.
    493 static void ddrphy_init(MRCParams_t *mrc_params)
    494 {
    495   uint32_t tempD; // temporary DWORD
    496   uint8_t channel_i; // channel counter
    497   uint8_t rank_i; // rank counter
    498   uint8_t bl_grp_i; // byte lane group counter (2 BLs per module)
    499 
    500   uint8_t bl_divisor = /*(mrc_params->channel_width==x16)?2:*/1; // byte lane divisor
    501   uint8_t speed = mrc_params->ddr_speed & (BIT1|BIT0); // For DDR3 --> 0 == 800, 1 == 1066, 2 == 1333
    502   uint8_t tCAS;
    503   uint8_t tCWL;
    504 
    505   ENTERFN();
    506 
    507   tCAS = mrc_params->params.tCL;
    508   tCWL = 5 + mrc_params->ddr_speed;
    509 
    510   // ddrphy_init starts
    511   post_code(0x03, 0x00);
    512 
    513   // HSD#231531
    514   // Make sure IOBUFACT is deasserted before initialising the DDR PHY.
    515   // HSD#234845
    516   // Make sure WRPTRENABLE is deasserted before initialising the DDR PHY.
    517   for (channel_i=0; channel_i<NUM_CHANNELS; channel_i++) {
    518     if (mrc_params->channel_enables & (1<<channel_i)) {
    519       // Deassert DDRPHY Initialisation Complete
    520       isbM32m(DDRPHY, (CMDPMCONFIG0 + (channel_i * DDRIOCCC_CH_OFFSET)), ~BIT20, BIT20); // SPID_INIT_COMPLETE=0
    521       // Deassert IOBUFACT
    522       isbM32m(DDRPHY, (CMDCFGREG0 + (channel_i * DDRIOCCC_CH_OFFSET)), ~BIT2, BIT2); // IOBUFACTRST_N=0
    523       // Disable WRPTR
    524       isbM32m(DDRPHY, (CMDPTRREG + (channel_i * DDRIOCCC_CH_OFFSET)), ~BIT0, BIT0); // WRPTRENABLE=0
    525     } // if channel enabled
    526   } // channel_i loop
    527 
    528   // Put PHY in reset
    529   isbM32m(DDRPHY, MASTERRSTN, 0, BIT0); // PHYRSTN=0
    530 
    531   // Initialise DQ01,DQ23,CMD,CLK-CTL,COMP modules
    532   // STEP0:
    533   post_code(0x03, 0x10);
    534   for (channel_i=0; channel_i<NUM_CHANNELS; channel_i++) {
    535     if (mrc_params->channel_enables & (1<<channel_i)) {
    536 
    537       // DQ01-DQ23
    538       for (bl_grp_i=0; bl_grp_i<((NUM_BYTE_LANES/bl_divisor)/2); bl_grp_i++) {
    539         isbM32m(DDRPHY, (DQOBSCKEBBCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), ((bl_grp_i) ? (0x00) : (BIT22)), (BIT22)); // Analog MUX select - IO2xCLKSEL
    540 
    541         // ODT Strength
    542         switch (mrc_params->rd_odt_value) {
    543           case 1: tempD = 0x3; break; // 60 ohm
    544           case 2: tempD = 0x3; break; // 120 ohm
    545           case 3: tempD = 0x3; break; // 180 ohm
    546           default: tempD = 0x3; break; // 120 ohm
    547         }
    548         isbM32m(DDRPHY, (B0RXIOBUFCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), (tempD<<5), (BIT6|BIT5)); // ODT strength
    549         isbM32m(DDRPHY, (B1RXIOBUFCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), (tempD<<5), (BIT6|BIT5)); // ODT strength
    550         // Dynamic ODT/DIFFAMP
    551         tempD = (((tCAS)<<24)|((tCAS)<<16)|((tCAS)<<8)|((tCAS)<<0));
    552         switch (speed) {
    553           case 0: tempD -= 0x01010101; break; // 800
    554           case 1: tempD -= 0x02020202; break; // 1066
    555           case 2: tempD -= 0x03030303; break; // 1333
    556           case 3: tempD -= 0x04040404; break; // 1600
    557         }
    558         isbM32m(DDRPHY, (B01LATCTL1 + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), tempD, ((BIT28|BIT27|BIT26|BIT25|BIT24)|(BIT20|BIT19|BIT18|BIT17|BIT16)|(BIT12|BIT11|BIT10|BIT9|BIT8)|(BIT4|BIT3|BIT2|BIT1|BIT0))); // Launch Time: ODT, DIFFAMP, ODT, DIFFAMP
    559         switch (speed) {
    560           // HSD#234715
    561           case 0: tempD = ((0x06<<16)|(0x07<<8)); break; // 800
    562           case 1: tempD = ((0x07<<16)|(0x08<<8)); break; // 1066
    563           case 2: tempD = ((0x09<<16)|(0x0A<<8)); break; // 1333
    564           case 3: tempD = ((0x0A<<16)|(0x0B<<8)); break; // 1600
    565         }
    566         isbM32m(DDRPHY, (B0ONDURCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), tempD, ((BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)|(BIT13|BIT12|BIT11|BIT10|BIT9|BIT8))); // On Duration: ODT, DIFFAMP
    567         isbM32m(DDRPHY, (B1ONDURCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), tempD, ((BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)|(BIT13|BIT12|BIT11|BIT10|BIT9|BIT8))); // On Duration: ODT, DIFFAMP
    568 
    569         switch (mrc_params->rd_odt_value) {
    570           case 0:  tempD = ((0x3F<<16)|(0x3f<<10)); break; // override DIFFAMP=on, ODT=off
    571           default: tempD = ((0x3F<<16)|(0x2A<<10)); break; // override DIFFAMP=on, ODT=on
    572         }
    573         isbM32m(DDRPHY, (B0OVRCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), tempD, ((BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)|(BIT15|BIT14|BIT13|BIT12|BIT11|BIT10))); // Override: DIFFAMP, ODT
    574         isbM32m(DDRPHY, (B1OVRCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), tempD, ((BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)|(BIT15|BIT14|BIT13|BIT12|BIT11|BIT10))); // Override: DIFFAMP, ODT
    575 
    576         // DLL Setup
    577         // 1xCLK Domain Timings: tEDP,RCVEN,WDQS (PO)
    578         isbM32m(DDRPHY, (B0LATCTL0 + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), (((tCAS+7)<<16)|((tCAS-4)<<8)|((tCWL-2)<<0)), ((BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)|(BIT12|BIT11|BIT10|BIT9|BIT8)|(BIT4|BIT3|BIT2|BIT1|BIT0))); // 1xCLK: tEDP, RCVEN, WDQS
    579         isbM32m(DDRPHY, (B1LATCTL0 + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), (((tCAS+7)<<16)|((tCAS-4)<<8)|((tCWL-2)<<0)), ((BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)|(BIT12|BIT11|BIT10|BIT9|BIT8)|(BIT4|BIT3|BIT2|BIT1|BIT0))); // 1xCLK: tEDP, RCVEN, WDQS
    580 
    581         // RCVEN Bypass (PO)
    582         isbM32m(DDRPHY, (B0RXIOBUFCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), ((0x0<<7)|(0x0<<0)), (BIT7|BIT0)); // AFE Bypass, RCVEN DIFFAMP
    583         isbM32m(DDRPHY, (B1RXIOBUFCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), ((0x0<<7)|(0x0<<0)), (BIT7|BIT0)); // AFE Bypass, RCVEN DIFFAMP
    584         // TX
    585         isbM32m(DDRPHY, (DQCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), (BIT16), (BIT16)); // 0 means driving DQ during DQS-preamble
    586         isbM32m(DDRPHY, (B01PTRCTL1 + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), (BIT8), (BIT8)); // WR_LVL mode disable
    587         // RX (PO)
    588         isbM32m(DDRPHY, (B0VREFCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), ((0x03<<2)|(0x0<<1)|(0x0<<0)), ((BIT7|BIT6|BIT5|BIT4|BIT3|BIT2)|BIT1|BIT0)); // Internal Vref Code, Enable#, Ext_or_Int (1=Ext)
    589         isbM32m(DDRPHY, (B1VREFCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), ((0x03<<2)|(0x0<<1)|(0x0<<0)), ((BIT7|BIT6|BIT5|BIT4|BIT3|BIT2)|BIT1|BIT0)); // Internal Vref Code, Enable#, Ext_or_Int (1=Ext)
    590         isbM32m(DDRPHY, (B0RXIOBUFCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), (0), (BIT4)); // Per-Bit De-Skew Enable
    591         isbM32m(DDRPHY, (B1RXIOBUFCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), (0), (BIT4)); // Per-Bit De-Skew Enable
    592       }
    593       // CLKEBB
    594       isbM32m(DDRPHY, (CMDOBSCKEBBCTL + (channel_i * DDRIOCCC_CH_OFFSET)), 0, (BIT23));
    595 
    596       // Enable tristate control of cmd/address bus
    597       isbM32m(DDRPHY, (CMDCFGREG0 + (channel_i * DDRIOCCC_CH_OFFSET)), 0, (BIT1|BIT0));
    598 
    599       // ODT RCOMP
    600       isbM32m(DDRPHY, (CMDRCOMPODT + (channel_i * DDRIOCCC_CH_OFFSET)), ((0x03<<5)|(0x03<<0)), ((BIT9|BIT8|BIT7|BIT6|BIT5)|(BIT4|BIT3|BIT2|BIT1|BIT0)));
    601 
    602       // CMDPM* registers must be programmed in this order...
    603       isbM32m(DDRPHY, (CMDPMDLYREG4 + (channel_i * DDRIOCCC_CH_OFFSET)), ((0xFFFFU<<16)|(0xFFFF<<0)), ((BIT31|BIT30|BIT29|BIT28|BIT27|BIT26|BIT25|BIT24|BIT23|BIT22|BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)|(BIT15|BIT14|BIT13|BIT12|BIT11|BIT10|BIT9|BIT8|BIT7|BIT6|BIT5|BIT4|BIT3|BIT2|BIT1|BIT0))); // Turn On Delays: SFR (regulator), MPLL
    604       isbM32m(DDRPHY, (CMDPMDLYREG3 + (channel_i * DDRIOCCC_CH_OFFSET)), ((0xFU<<28)|(0xFFF<<16)|(0xF<<12)|(0x616<<0)), ((BIT31|BIT30|BIT29|BIT28)|(BIT27|BIT26|BIT25|BIT24|BIT23|BIT22|BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)|(BIT15|BIT14|BIT13|BIT12)|(BIT11|BIT10|BIT9|BIT8|BIT7|BIT6|BIT5|BIT4|BIT3|BIT2|BIT1|BIT0))); // Delays: ASSERT_IOBUFACT_to_ALLON0_for_PM_MSG_3, VREG (MDLL) Turn On, ALLON0_to_DEASSERT_IOBUFACT_for_PM_MSG_gt0, MDLL Turn On
    605       isbM32m(DDRPHY, (CMDPMDLYREG2 + (channel_i * DDRIOCCC_CH_OFFSET)), ((0xFFU<<24)|(0xFF<<16)|(0xFF<<8)|(0xFF<<0)), ((BIT31|BIT30|BIT29|BIT28|BIT27|BIT26|BIT25|BIT24)|(BIT23|BIT22|BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)|(BIT15|BIT14|BIT13|BIT12|BIT11|BIT10|BIT9|BIT8)|(BIT7|BIT6|BIT5|BIT4|BIT3|BIT2|BIT1|BIT0))); // MPLL Divider Reset Delays
    606       isbM32m(DDRPHY, (CMDPMDLYREG1 + (channel_i * DDRIOCCC_CH_OFFSET)), ((0xFFU<<24)|(0xFF<<16)|(0xFF<<8)|(0xFF<<0)), ((BIT31|BIT30|BIT29|BIT28|BIT27|BIT26|BIT25|BIT24)|(BIT23|BIT22|BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)|(BIT15|BIT14|BIT13|BIT12|BIT11|BIT10|BIT9|BIT8)|(BIT7|BIT6|BIT5|BIT4|BIT3|BIT2|BIT1|BIT0))); // Turn Off Delays: VREG, Staggered MDLL, MDLL, PI
    607       isbM32m(DDRPHY, (CMDPMDLYREG0 + (channel_i * DDRIOCCC_CH_OFFSET)), ((0xFFU<<24)|(0xFF<<16)|(0xFF<<8)|(0xFF<<0)), ((BIT31|BIT30|BIT29|BIT28|BIT27|BIT26|BIT25|BIT24)|(BIT23|BIT22|BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)|(BIT15|BIT14|BIT13|BIT12|BIT11|BIT10|BIT9|BIT8)|(BIT7|BIT6|BIT5|BIT4|BIT3|BIT2|BIT1|BIT0))); // Turn On Delays: MPLL, Staggered MDLL, PI, IOBUFACT
    608       isbM32m(DDRPHY, (CMDPMCONFIG0 + (channel_i * DDRIOCCC_CH_OFFSET)), ((0x6<<8)|BIT6|(0x4<<0)), (BIT31|BIT30|BIT29|BIT28|BIT27|BIT26|BIT25|BIT24|BIT23|BIT22|BIT21|(BIT11|BIT10|BIT9|BIT8)|BIT6|(BIT3|BIT2|BIT1|BIT0))); // Allow PUnit signals
    609       isbM32m(DDRPHY, (CMDMDLLCTL +   (channel_i * DDRIOCCC_CH_OFFSET)), ((0x3<<4)|(0x7<<0)), ((BIT6|BIT5|BIT4)|(BIT3|BIT2|BIT1|BIT0))); // DLL_VREG Bias Trim, VREF Tuning for DLL_VREG
    610       // CLK-CTL
    611       isbM32m(DDRPHY, (CCOBSCKEBBCTL + (channel_i * DDRIOCCC_CH_OFFSET)), 0, (BIT24)); // CLKEBB
    612       isbM32m(DDRPHY, (CCCFGREG0 +     (channel_i * DDRIOCCC_CH_OFFSET)), ((0x0<<16)|(0x0<<12)|(0x0<<8)|(0xF<<4)|BIT0), ((BIT19|BIT18|BIT17|BIT16)|(BIT15|BIT14|BIT13|BIT12)|(BIT11|BIT10|BIT9|BIT8)|(BIT7|BIT6|BIT5|BIT4)|BIT0)); // Buffer Enable: CS,CKE,ODT,CLK
    613       isbM32m(DDRPHY, (CCRCOMPODT +    (channel_i * DDRIOCCC_CH_OFFSET)), ((0x03<<8)|(0x03<<0)), ((BIT12|BIT11|BIT10|BIT9|BIT8)|(BIT4|BIT3|BIT2|BIT1|BIT0))); // ODT RCOMP
    614       isbM32m(DDRPHY, (CCMDLLCTL +     (channel_i * DDRIOCCC_CH_OFFSET)), ((0x3<<4)|(0x7<<0)), ((BIT6|BIT5|BIT4)|(BIT3|BIT2|BIT1|BIT0))); // DLL_VREG Bias Trim, VREF Tuning for DLL_VREG
    615 
    616       // COMP (RON channel specific)
    617       // - DQ/DQS/DM RON: 32 Ohm
    618       // - CTRL/CMD RON: 27 Ohm
    619       // - CLK RON: 26 Ohm
    620       isbM32m(DDRPHY, (DQVREFCH0 +  (channel_i * DDRCOMP_CH_OFFSET)), ((0x08<<24)|(0x03<<16)), ((BIT29|BIT28|BIT27|BIT26|BIT25|BIT24)|(BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)));  // RCOMP Vref PU/PD
    621       isbM32m(DDRPHY, (CMDVREFCH0 + (channel_i * DDRCOMP_CH_OFFSET)), ((0x0C<<24)|(0x03<<16)), ((BIT29|BIT28|BIT27|BIT26|BIT25|BIT24)|(BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)));  // RCOMP Vref PU/PD
    622       isbM32m(DDRPHY, (CLKVREFCH0 + (channel_i * DDRCOMP_CH_OFFSET)), ((0x0F<<24)|(0x03<<16)), ((BIT29|BIT28|BIT27|BIT26|BIT25|BIT24)|(BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)));  // RCOMP Vref PU/PD
    623       isbM32m(DDRPHY, (DQSVREFCH0 + (channel_i * DDRCOMP_CH_OFFSET)), ((0x08<<24)|(0x03<<16)), ((BIT29|BIT28|BIT27|BIT26|BIT25|BIT24)|(BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)));  // RCOMP Vref PU/PD
    624       isbM32m(DDRPHY, (CTLVREFCH0 + (channel_i * DDRCOMP_CH_OFFSET)), ((0x0C<<24)|(0x03<<16)), ((BIT29|BIT28|BIT27|BIT26|BIT25|BIT24)|(BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)));  // RCOMP Vref PU/PD
    625 
    626       // DQS Swapped Input Enable
    627       isbM32m(DDRPHY, (COMPEN1CH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT19|BIT17),           ((BIT31|BIT30)|BIT19|BIT17|(BIT15|BIT14)));
    628 
    629       // ODT VREF = 1.5 x 274/360+274 = 0.65V (code of ~50)
    630       isbM32m(DDRPHY, (DQVREFCH0 +  (channel_i * DDRCOMP_CH_OFFSET)), ((0x32<<8)|(0x03<<0)),   ((BIT13|BIT12|BIT11|BIT10|BIT9|BIT8)|(BIT5|BIT4|BIT3|BIT2|BIT1|BIT0))); // ODT Vref PU/PD
    631       isbM32m(DDRPHY, (DQSVREFCH0 + (channel_i * DDRCOMP_CH_OFFSET)), ((0x32<<8)|(0x03<<0)),   ((BIT13|BIT12|BIT11|BIT10|BIT9|BIT8)|(BIT5|BIT4|BIT3|BIT2|BIT1|BIT0))); // ODT Vref PU/PD
    632       isbM32m(DDRPHY, (CLKVREFCH0 + (channel_i * DDRCOMP_CH_OFFSET)), ((0x0E<<8)|(0x05<<0)),   ((BIT13|BIT12|BIT11|BIT10|BIT9|BIT8)|(BIT5|BIT4|BIT3|BIT2|BIT1|BIT0))); // ODT Vref PU/PD
    633 
    634       // Slew rate settings are frequency specific, numbers below are for 800Mhz (speed == 0)
    635       // - DQ/DQS/DM/CLK SR: 4V/ns,
    636       // - CTRL/CMD SR: 1.5V/ns
    637       tempD = (0x0E<<16)|(0x0E<<12)|(0x08<<8)|(0x0B<<4)|(0x0B<<0);
    638       isbM32m(DDRPHY, (DLYSELCH0 +   (channel_i * DDRCOMP_CH_OFFSET)), (tempD), ((BIT19|BIT18|BIT17|BIT16)|(BIT15|BIT14|BIT13|BIT12)|(BIT11|BIT10|BIT9|BIT8)|(BIT7|BIT6|BIT5|BIT4)|(BIT3|BIT2|BIT1|BIT0))); // DCOMP Delay Select: CTL,CMD,CLK,DQS,DQ
    639       isbM32m(DDRPHY, (TCOVREFCH0 +  (channel_i * DDRCOMP_CH_OFFSET)), ((0x05<<16)|(0x05<<8)|(0x05<<0)), ((BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)|(BIT13|BIT12|BIT11|BIT10|BIT9|BIT8)|(BIT5|BIT4|BIT3|BIT2|BIT1|BIT0))); // TCO Vref CLK,DQS,DQ
    640       isbM32m(DDRPHY, (CCBUFODTCH0 + (channel_i * DDRCOMP_CH_OFFSET)), ((0x03<<8)|(0x03<<0)), ((BIT12|BIT11|BIT10|BIT9|BIT8)|(BIT4|BIT3|BIT2|BIT1|BIT0))); // ODTCOMP CMD/CTL PU/PD
    641       isbM32m(DDRPHY, (COMPEN0CH0 +  (channel_i * DDRCOMP_CH_OFFSET)), (0), ((BIT31|BIT30)|BIT8)); // COMP
    642 
    643       #ifdef BACKUP_COMPS
    644       // DQ COMP Overrides
    645       isbM32m(DDRPHY, (DQDRVPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0A<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // RCOMP PU
    646       isbM32m(DDRPHY, (DQDRVPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0A<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // RCOMP PD
    647       isbM32m(DDRPHY, (DQDLYPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x10<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // DCOMP PU
    648       isbM32m(DDRPHY, (DQDLYPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x10<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // DCOMP PD
    649       isbM32m(DDRPHY, (DQODTPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0B<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // ODTCOMP PU
    650       isbM32m(DDRPHY, (DQODTPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0B<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // ODTCOMP PD
    651       isbM32m(DDRPHY, (DQTCOPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31), (BIT31)); // TCOCOMP PU
    652       isbM32m(DDRPHY, (DQTCOPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31), (BIT31)); // TCOCOMP PD
    653       // DQS COMP Overrides
    654       isbM32m(DDRPHY, (DQSDRVPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0A<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // RCOMP PU
    655       isbM32m(DDRPHY, (DQSDRVPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0A<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // RCOMP PD
    656       isbM32m(DDRPHY, (DQSDLYPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x10<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // DCOMP PU
    657       isbM32m(DDRPHY, (DQSDLYPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x10<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // DCOMP PD
    658       isbM32m(DDRPHY, (DQSODTPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0B<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // ODTCOMP PU
    659       isbM32m(DDRPHY, (DQSODTPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0B<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // ODTCOMP PD
    660       isbM32m(DDRPHY, (DQSTCOPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31), (BIT31)); // TCOCOMP PU
    661       isbM32m(DDRPHY, (DQSTCOPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31), (BIT31)); // TCOCOMP PD
    662       // CLK COMP Overrides
    663       isbM32m(DDRPHY, (CLKDRVPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0C<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // RCOMP PU
    664       isbM32m(DDRPHY, (CLKDRVPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0C<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // RCOMP PD
    665       isbM32m(DDRPHY, (CLKDLYPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x07<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // DCOMP PU
    666       isbM32m(DDRPHY, (CLKDLYPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x07<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // DCOMP PD
    667       isbM32m(DDRPHY, (CLKODTPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0B<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // ODTCOMP PU
    668       isbM32m(DDRPHY, (CLKODTPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0B<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // ODTCOMP PD
    669       isbM32m(DDRPHY, (CLKTCOPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31), (BIT31)); // TCOCOMP PU
    670       isbM32m(DDRPHY, (CLKTCOPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31), (BIT31)); // TCOCOMP PD
    671       // CMD COMP Overrides
    672       isbM32m(DDRPHY, (CMDDRVPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0D<<16)), (BIT31|(BIT21|BIT20|BIT19|BIT18|BIT17|BIT16))); // RCOMP PU
    673       isbM32m(DDRPHY, (CMDDRVPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0D<<16)), (BIT31|(BIT21|BIT20|BIT19|BIT18|BIT17|BIT16))); // RCOMP PD
    674       isbM32m(DDRPHY, (CMDDLYPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0A<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // DCOMP PU
    675       isbM32m(DDRPHY, (CMDDLYPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0A<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // DCOMP PD
    676       // CTL COMP Overrides
    677       isbM32m(DDRPHY, (CTLDRVPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0D<<16)), (BIT31|(BIT21|BIT20|BIT19|BIT18|BIT17|BIT16))); // RCOMP PU
    678       isbM32m(DDRPHY, (CTLDRVPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0D<<16)), (BIT31|(BIT21|BIT20|BIT19|BIT18|BIT17|BIT16))); // RCOMP PD
    679       isbM32m(DDRPHY, (CTLDLYPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0A<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // DCOMP PU
    680       isbM32m(DDRPHY, (CTLDLYPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x0A<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // DCOMP PD
    681       #else
    682       // DQ TCOCOMP Overrides
    683       isbM32m(DDRPHY, (DQTCOPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x1F<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // TCOCOMP PU
    684       isbM32m(DDRPHY, (DQTCOPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x1F<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // TCOCOMP PD
    685       // DQS TCOCOMP Overrides
    686       isbM32m(DDRPHY, (DQSTCOPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x1F<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // TCOCOMP PU
    687       isbM32m(DDRPHY, (DQSTCOPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x1F<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // TCOCOMP PD
    688       // CLK TCOCOMP Overrides
    689       isbM32m(DDRPHY, (CLKTCOPUCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x1F<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // TCOCOMP PU
    690       isbM32m(DDRPHY, (CLKTCOPDCTLCH0 + (channel_i * DDRCOMP_CH_OFFSET)), (BIT31|(0x1F<<16)), (BIT31|(BIT20|BIT19|BIT18|BIT17|BIT16))); // TCOCOMP PD
    691       #endif // BACKUP_COMPS
    692       // program STATIC delays
    693       #ifdef BACKUP_WCMD
    694       set_wcmd(channel_i, ddr_wcmd[PLATFORM_ID]);
    695       #else
    696       set_wcmd(channel_i, ddr_wclk[PLATFORM_ID] + HALF_CLK);
    697       #endif // BACKUP_WCMD
    698       for (rank_i=0; rank_i<NUM_RANKS; rank_i++) {
    699         if (mrc_params->rank_enables & (1<<rank_i)) {
    700           set_wclk(channel_i, rank_i, ddr_wclk[PLATFORM_ID]);
    701           #ifdef BACKUP_WCTL
    702           set_wctl(channel_i, rank_i, ddr_wctl[PLATFORM_ID]);
    703           #else
    704           set_wctl(channel_i, rank_i, ddr_wclk[PLATFORM_ID] + HALF_CLK);
    705           #endif // BACKUP_WCTL
    706         }
    707       }
    708     }
    709   }
    710   // COMP (non channel specific)
    711   //isbM32m(DDRPHY, (), (), ());
    712   isbM32m(DDRPHY, (DQANADRVPUCTL), (BIT30), (BIT30)); // RCOMP: Dither PU Enable
    713   isbM32m(DDRPHY, (DQANADRVPDCTL), (BIT30), (BIT30)); // RCOMP: Dither PD Enable
    714   isbM32m(DDRPHY, (CMDANADRVPUCTL), (BIT30), (BIT30)); // RCOMP: Dither PU Enable
    715   isbM32m(DDRPHY, (CMDANADRVPDCTL), (BIT30), (BIT30)); // RCOMP: Dither PD Enable
    716   isbM32m(DDRPHY, (CLKANADRVPUCTL), (BIT30), (BIT30)); // RCOMP: Dither PU Enable
    717   isbM32m(DDRPHY, (CLKANADRVPDCTL), (BIT30), (BIT30)); // RCOMP: Dither PD Enable
    718   isbM32m(DDRPHY, (DQSANADRVPUCTL), (BIT30), (BIT30)); // RCOMP: Dither PU Enable
    719   isbM32m(DDRPHY, (DQSANADRVPDCTL), (BIT30), (BIT30)); // RCOMP: Dither PD Enable
    720   isbM32m(DDRPHY, (CTLANADRVPUCTL), (BIT30), (BIT30)); // RCOMP: Dither PU Enable
    721   isbM32m(DDRPHY, (CTLANADRVPDCTL), (BIT30), (BIT30)); // RCOMP: Dither PD Enable
    722   isbM32m(DDRPHY, (DQANAODTPUCTL), (BIT30), (BIT30)); // ODT: Dither PU Enable
    723   isbM32m(DDRPHY, (DQANAODTPDCTL), (BIT30), (BIT30)); // ODT: Dither PD Enable
    724   isbM32m(DDRPHY, (CLKANAODTPUCTL), (BIT30), (BIT30)); // ODT: Dither PU Enable
    725   isbM32m(DDRPHY, (CLKANAODTPDCTL), (BIT30), (BIT30)); // ODT: Dither PD Enable
    726   isbM32m(DDRPHY, (DQSANAODTPUCTL), (BIT30), (BIT30)); // ODT: Dither PU Enable
    727   isbM32m(DDRPHY, (DQSANAODTPDCTL), (BIT30), (BIT30)); // ODT: Dither PD Enable
    728   isbM32m(DDRPHY, (DQANADLYPUCTL), (BIT30), (BIT30)); // DCOMP: Dither PU Enable
    729   isbM32m(DDRPHY, (DQANADLYPDCTL), (BIT30), (BIT30)); // DCOMP: Dither PD Enable
    730   isbM32m(DDRPHY, (CMDANADLYPUCTL), (BIT30), (BIT30)); // DCOMP: Dither PU Enable
    731   isbM32m(DDRPHY, (CMDANADLYPDCTL), (BIT30), (BIT30)); // DCOMP: Dither PD Enable
    732   isbM32m(DDRPHY, (CLKANADLYPUCTL), (BIT30), (BIT30)); // DCOMP: Dither PU Enable
    733   isbM32m(DDRPHY, (CLKANADLYPDCTL), (BIT30), (BIT30)); // DCOMP: Dither PD Enable
    734   isbM32m(DDRPHY, (DQSANADLYPUCTL), (BIT30), (BIT30)); // DCOMP: Dither PU Enable
    735   isbM32m(DDRPHY, (DQSANADLYPDCTL), (BIT30), (BIT30)); // DCOMP: Dither PD Enable
    736   isbM32m(DDRPHY, (CTLANADLYPUCTL), (BIT30), (BIT30)); // DCOMP: Dither PU Enable
    737   isbM32m(DDRPHY, (CTLANADLYPDCTL), (BIT30), (BIT30)); // DCOMP: Dither PD Enable
    738   isbM32m(DDRPHY, (DQANATCOPUCTL), (BIT30), (BIT30)); // TCO: Dither PU Enable
    739   isbM32m(DDRPHY, (DQANATCOPDCTL), (BIT30), (BIT30)); // TCO: Dither PD Enable
    740   isbM32m(DDRPHY, (CLKANATCOPUCTL), (BIT30), (BIT30)); // TCO: Dither PU Enable
    741   isbM32m(DDRPHY, (CLKANATCOPDCTL), (BIT30), (BIT30)); // TCO: Dither PD Enable
    742   isbM32m(DDRPHY, (DQSANATCOPUCTL), (BIT30), (BIT30)); // TCO: Dither PU Enable
    743   isbM32m(DDRPHY, (DQSANATCOPDCTL), (BIT30), (BIT30)); // TCO: Dither PD Enable
    744   isbM32m(DDRPHY, (TCOCNTCTRL), (0x1<<0), (BIT1|BIT0)); // TCOCOMP: Pulse Count
    745   isbM32m(DDRPHY, (CHNLBUFSTATIC), ((0x03<<24)|(0x03<<16)), ((BIT28|BIT27|BIT26|BIT25|BIT24)|(BIT20|BIT19|BIT18|BIT17|BIT16))); // ODT: CMD/CTL PD/PU
    746   isbM32m(DDRPHY, (MSCNTR), (0x64<<0), (BIT7|BIT6|BIT5|BIT4|BIT3|BIT2|BIT1|BIT0)); // Set 1us counter
    747   isbM32m(DDRPHY, (LATCH1CTL), (0x1<<28), (BIT30|BIT29|BIT28)); // ???
    748 
    749   // Release PHY from reset
    750   isbM32m(DDRPHY, MASTERRSTN, BIT0, BIT0); // PHYRSTN=1
    751 
    752   // STEP1:
    753   post_code(0x03, 0x11);
    754   for (channel_i=0; channel_i<NUM_CHANNELS; channel_i++) {
    755     if (mrc_params->channel_enables & (1<<channel_i)) {
    756       // DQ01-DQ23
    757       for (bl_grp_i=0; bl_grp_i<((NUM_BYTE_LANES/bl_divisor)/2); bl_grp_i++) {
    758         isbM32m(DDRPHY, (DQMDLLCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), (BIT13), (BIT13)); // Enable VREG
    759         delay_n(3);
    760       }
    761       // ECC
    762       isbM32m(DDRPHY, (ECCMDLLCTL), (BIT13), (BIT13)); // Enable VREG
    763       delay_n(3);
    764       // CMD
    765       isbM32m(DDRPHY, (CMDMDLLCTL + (channel_i * DDRIOCCC_CH_OFFSET)), (BIT13), (BIT13)); // Enable VREG
    766       delay_n(3);
    767       // CLK-CTL
    768       isbM32m(DDRPHY, (CCMDLLCTL + (channel_i * DDRIOCCC_CH_OFFSET)), (BIT13), (BIT13)); // Enable VREG
    769       delay_n(3);
    770     }
    771   }
    772 
    773   // STEP2:
    774   post_code(0x03, 0x12);
    775   delay_n(200);
    776   for (channel_i=0; channel_i<NUM_CHANNELS; channel_i++) {
    777     if (mrc_params->channel_enables & (1<<channel_i)) {
    778       // DQ01-DQ23
    779       for (bl_grp_i=0; bl_grp_i<((NUM_BYTE_LANES/bl_divisor)/2); bl_grp_i++) {
    780         isbM32m(DDRPHY, (DQMDLLCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), (BIT17), (BIT17)); // Enable MCDLL
    781         delay_n(50);
    782       }
    783       // ECC
    784       isbM32m(DDRPHY, (ECCMDLLCTL), (BIT17), (BIT17)); // Enable MCDLL
    785       delay_n(50);
    786       // CMD
    787       isbM32m(DDRPHY, (CMDMDLLCTL + (channel_i * DDRIOCCC_CH_OFFSET)), (BIT18), (BIT18)); // Enable MCDLL
    788       delay_n(50);
    789       // CLK-CTL
    790       isbM32m(DDRPHY, (CCMDLLCTL + (channel_i * DDRIOCCC_CH_OFFSET)), (BIT18), (BIT18)); // Enable MCDLL
    791       delay_n(50);
    792     }
    793   }
    794 
    795   // STEP3:
    796   post_code(0x03, 0x13);
    797   delay_n(100);
    798   for (channel_i=0; channel_i<NUM_CHANNELS; channel_i++) {
    799     if (mrc_params->channel_enables & (1<<channel_i)) {
    800       // DQ01-DQ23
    801       for (bl_grp_i=0; bl_grp_i<((NUM_BYTE_LANES/bl_divisor)/2); bl_grp_i++) {
    802 #ifdef FORCE_16BIT_DDRIO
    803         tempD = ((bl_grp_i) && (mrc_params->channel_width == x16)) ? ((0x1<<12)|(0x1<<8)|(0xF<<4)|(0xF<<0)) : ((0xF<<12)|(0xF<<8)|(0xF<<4)|(0xF<<0));
    804 #else
    805         tempD = ((0xF<<12)|(0xF<<8)|(0xF<<4)|(0xF<<0));
    806 #endif
    807         isbM32m(DDRPHY, (DQDLLTXCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), (tempD), ((BIT15|BIT14|BIT13|BIT12)|(BIT11|BIT10|BIT9|BIT8)|(BIT7|BIT6|BIT5|BIT4)|(BIT3|BIT2|BIT1|BIT0))); // Enable TXDLL
    808         delay_n(3);
    809         isbM32m(DDRPHY, (DQDLLRXCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), (BIT3|BIT2|BIT1|BIT0), (BIT3|BIT2|BIT1|BIT0)); // Enable RXDLL
    810         delay_n(3);
    811         isbM32m(DDRPHY, (B0OVRCTL + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), (BIT3|BIT2|BIT1|BIT0), (BIT3|BIT2|BIT1|BIT0)); // Enable RXDLL Overrides BL0
    812       }
    813 
    814       // ECC
    815       tempD = ((0xF<<12)|(0xF<<8)|(0xF<<4)|(0xF<<0));
    816       isbM32m(DDRPHY, (ECCDLLTXCTL), (tempD), ((BIT15|BIT14|BIT13|BIT12)|(BIT11|BIT10|BIT9|BIT8)|(BIT7|BIT6|BIT5|BIT4)|(BIT3|BIT2|BIT1|BIT0))); // Enable TXDLL
    817       delay_n(3);
    818 
    819       // CMD (PO)
    820       isbM32m(DDRPHY, (CMDDLLTXCTL + (channel_i * DDRIOCCC_CH_OFFSET)), ((0xF<<12)|(0xF<<8)|(0xF<<4)|(0xF<<0)), ((BIT15|BIT14|BIT13|BIT12)|(BIT11|BIT10|BIT9|BIT8)|(BIT7|BIT6|BIT5|BIT4)|(BIT3|BIT2|BIT1|BIT0))); // Enable TXDLL
    821       delay_n(3);
    822     }
    823   }
    824 
    825 
    826   // STEP4:
    827   post_code(0x03, 0x14);
    828   for (channel_i=0; channel_i<NUM_CHANNELS; channel_i++) {
    829     if (mrc_params->channel_enables & (1<<channel_i)) {
    830       // Host To Memory Clock Alignment (HMC) for 800/1066
    831       for (bl_grp_i=0; bl_grp_i<((NUM_BYTE_LANES/bl_divisor)/2); bl_grp_i++) {
    832         isbM32m(DDRPHY, (DQCLKALIGNREG2 + (bl_grp_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), ((bl_grp_i)?(0x3):(0x1)), (BIT3|BIT2|BIT1|BIT0)); // CLK_ALIGN_MOD_ID
    833       }
    834       isbM32m(DDRPHY, (ECCCLKALIGNREG2 + (channel_i * DDRIODQ_CH_OFFSET)), 0x2, (BIT3|BIT2|BIT1|BIT0)); // CLK_ALIGN_MOD_ID
    835       isbM32m(DDRPHY, (CMDCLKALIGNREG2 + (channel_i * DDRIODQ_CH_OFFSET)), 0x0, (BIT3|BIT2|BIT1|BIT0)); // CLK_ALIGN_MOD_ID
    836       isbM32m(DDRPHY, (CCCLKALIGNREG2 + (channel_i * DDRIODQ_CH_OFFSET)), 0x2, (BIT3|BIT2|BIT1|BIT0)); // CLK_ALIGN_MOD_ID
    837       isbM32m(DDRPHY, (CMDCLKALIGNREG0 + (channel_i * DDRIOCCC_CH_OFFSET)), (0x2<<4), (BIT5|BIT4)); // CLK_ALIGN_MODE
    838       isbM32m(DDRPHY, (CMDCLKALIGNREG1 + (channel_i * DDRIOCCC_CH_OFFSET)), ((0x18<<16)|(0x10<<8)|(0x8<<2)|(0x1<<0)), ((BIT22|BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)|(BIT14|BIT13|BIT12|BIT11|BIT10|BIT9|BIT8)|(BIT7|BIT6|BIT5|BIT4|BIT3|BIT2)|(BIT1|BIT0))); // NUM_SAMPLES, MAX_SAMPLES, MACRO_PI_STEP, MICRO_PI_STEP
    839       isbM32m(DDRPHY, (CMDCLKALIGNREG2 + (channel_i * DDRIOCCC_CH_OFFSET)), ((0x10<<16)|(0x4<<8)|(0x2<<4)), ((BIT20|BIT19|BIT18|BIT17|BIT16)|(BIT11|BIT10|BIT9|BIT8)|(BIT7|BIT6|BIT5|BIT4))); // ???, TOTAL_NUM_MODULES, FIRST_U_PARTITION
    840       #ifdef HMC_TEST
    841       isbM32m(DDRPHY, (CMDCLKALIGNREG0 + (channel_i * DDRIOCCC_CH_OFFSET)), BIT24, BIT24); // START_CLK_ALIGN=1
    842       while (isbR32m(DDRPHY, (CMDCLKALIGNREG0 + (channel_i * DDRIOCCC_CH_OFFSET))) & BIT24); // wait for START_CLK_ALIGN=0
    843       #endif // HMC_TEST
    844 
    845       // Set RD/WR Pointer Seperation & COUNTEN & FIFOPTREN
    846       isbM32m(DDRPHY, (CMDPTRREG + (channel_i * DDRIOCCC_CH_OFFSET)), BIT0, BIT0); // WRPTRENABLE=1
    847 
    848 
    849 #ifdef SIM
    850       // comp is not working on simulator
    851 #else
    852       // COMP initial
    853       isbM32m(DDRPHY, (COMPEN0CH0 + (channel_i * DDRCOMP_CH_OFFSET)), BIT5, BIT5); // enable bypass for CLK buffer (PO)
    854       isbM32m(DDRPHY, (CMPCTRL), (BIT0), (BIT0)); // Initial COMP Enable
    855       while (isbR32m(DDRPHY, (CMPCTRL)) & BIT0); // wait for Initial COMP Enable = 0
    856       isbM32m(DDRPHY, (COMPEN0CH0 + (channel_i * DDRCOMP_CH_OFFSET)), ~BIT5, BIT5); // disable bypass for CLK buffer (PO)
    857 #endif
    858 
    859       // IOBUFACT
    860       // STEP4a
    861       isbM32m(DDRPHY, (CMDCFGREG0 + (channel_i * DDRIOCCC_CH_OFFSET)), BIT2, BIT2); // IOBUFACTRST_N=1
    862 
    863       // DDRPHY initialisation complete
    864       isbM32m(DDRPHY, (CMDPMCONFIG0 + (channel_i * DDRIOCCC_CH_OFFSET)), BIT20, BIT20); // SPID_INIT_COMPLETE=1
    865     }
    866   }
    867 
    868   LEAVEFN();
    869   return;
    870 }
    871 
    872 // jedec_init (aka PerformJedecInit):
    873 // This function performs JEDEC initialisation on all enabled channels.
    874 static void jedec_init(
    875     MRCParams_t *mrc_params,
    876     uint32_t silent)
    877 {
    878   uint8_t TWR, WL, Rank;
    879   uint32_t TCK;
    880 
    881   RegDTR0 DTR0reg;
    882 
    883   DramInitDDR3MRS0 mrs0Command;
    884   DramInitDDR3EMR1 emrs1Command;
    885   DramInitDDR3EMR2 emrs2Command;
    886   DramInitDDR3EMR3 emrs3Command;
    887 
    888   ENTERFN();
    889 
    890   // jedec_init starts
    891   if (!silent)
    892   {
    893     post_code(0x04, 0x00);
    894   }
    895 
    896   // Assert RESET# for 200us
    897   isbM32m(DDRPHY, CCDDR3RESETCTL, BIT1, (BIT8|BIT1)); // DDR3_RESET_SET=0, DDR3_RESET_RESET=1
    898 #ifdef QUICKSIM
    899       // Don't waste time during simulation
    900       delay_u(2);
    901 #else
    902   delay_u(200);
    903 #endif
    904   isbM32m(DDRPHY, CCDDR3RESETCTL, BIT8, (BIT8|BIT1)); // DDR3_RESET_SET=1, DDR3_RESET_RESET=0
    905 
    906   DTR0reg.raw = isbR32m(MCU, DTR0);
    907 
    908   // Set CKEVAL for populated ranks
    909   // then send NOP to each rank (#4550197)
    910   {
    911     uint32_t DRPbuffer;
    912     uint32_t DRMCbuffer;
    913 
    914     DRPbuffer = isbR32m(MCU, DRP);
    915     DRPbuffer &= 0x3;
    916     DRMCbuffer = isbR32m(MCU, DRMC);
    917     DRMCbuffer &= 0xFFFFFFFC;
    918     DRMCbuffer |= (BIT4 | DRPbuffer);
    919 
    920     isbW32m(MCU, DRMC, DRMCbuffer);
    921 
    922     for (Rank = 0; Rank < NUM_RANKS; Rank++)
    923     {
    924       // Skip to next populated rank
    925       if ((mrc_params->rank_enables & (1 << Rank)) == 0)
    926       {
    927         continue;
    928       }
    929 
    930       dram_init_command(DCMD_NOP(Rank));
    931     }
    932 
    933     isbW32m(MCU, DRMC, DRMC_DEFAULT);
    934   }
    935 
    936   // setup for emrs 2
    937   // BIT[15:11] --> Always "0"
    938   // BIT[10:09] --> Rtt_WR: want "Dynamic ODT Off" (0)
    939   // BIT[08]    --> Always "0"
    940   // BIT[07]    --> SRT: use sr_temp_range
    941   // BIT[06]    --> ASR: want "Manual SR Reference" (0)
    942   // BIT[05:03] --> CWL: use oem_tCWL
    943   // BIT[02:00] --> PASR: want "Full Array" (0)
    944   emrs2Command.raw = 0;
    945   emrs2Command.field.bankAddress = 2;
    946 
    947   WL = 5 + mrc_params->ddr_speed;
    948   emrs2Command.field.CWL = WL - 5;
    949   emrs2Command.field.SRT = mrc_params->sr_temp_range;
    950 
    951   // setup for emrs 3
    952   // BIT[15:03] --> Always "0"
    953   // BIT[02]    --> MPR: want "Normal Operation" (0)
    954   // BIT[01:00] --> MPR_Loc: want "Predefined Pattern" (0)
    955   emrs3Command.raw = 0;
    956   emrs3Command.field.bankAddress = 3;
    957 
    958   // setup for emrs 1
    959   // BIT[15:13]     --> Always "0"
    960   // BIT[12:12]     --> Qoff: want "Output Buffer Enabled" (0)
    961   // BIT[11:11]     --> TDQS: want "Disabled" (0)
    962   // BIT[10:10]     --> Always "0"
    963   // BIT[09,06,02]  --> Rtt_nom: use rtt_nom_value
    964   // BIT[08]        --> Always "0"
    965   // BIT[07]        --> WR_LVL: want "Disabled" (0)
    966   // BIT[05,01]     --> DIC: use ron_value
    967   // BIT[04:03]     --> AL: additive latency want "0" (0)
    968   // BIT[00]        --> DLL: want "Enable" (0)
    969   //
    970   // (BIT5|BIT1) set Ron value
    971   // 00 --> RZQ/6 (40ohm)
    972   // 01 --> RZQ/7 (34ohm)
    973   // 1* --> RESERVED
    974   //
    975   // (BIT9|BIT6|BIT2) set Rtt_nom value
    976   // 000 --> Disabled
    977   // 001 --> RZQ/4 ( 60ohm)
    978   // 010 --> RZQ/2 (120ohm)
    979   // 011 --> RZQ/6 ( 40ohm)
    980   // 1** --> RESERVED
    981   emrs1Command.raw = 0;
    982   emrs1Command.field.bankAddress = 1;
    983   emrs1Command.field.dllEnabled = 0; // 0 = Enable , 1 = Disable
    984 
    985   if (mrc_params->ron_value == 0)
    986   {
    987     emrs1Command.field.DIC0 = DDR3_EMRS1_DIC_34;
    988   }
    989   else
    990   {
    991     emrs1Command.field.DIC0 = DDR3_EMRS1_DIC_40;
    992   }
    993 
    994 
    995   if (mrc_params->rtt_nom_value == 0)
    996   {
    997     emrs1Command.raw |= (DDR3_EMRS1_RTTNOM_40 << 6);
    998   }
    999   else if (mrc_params->rtt_nom_value == 1)
   1000   {
   1001     emrs1Command.raw |= (DDR3_EMRS1_RTTNOM_60 << 6);
   1002   }
   1003   else if (mrc_params->rtt_nom_value == 2)
   1004   {
   1005     emrs1Command.raw |= (DDR3_EMRS1_RTTNOM_120 << 6);
   1006   }
   1007 
   1008   // save MRS1 value (excluding control fields)
   1009   mrc_params->mrs1 = emrs1Command.raw >> 6;
   1010 
   1011   // setup for mrs 0
   1012   // BIT[15:13]     --> Always "0"
   1013   // BIT[12]        --> PPD: for Quark (1)
   1014   // BIT[11:09]     --> WR: use oem_tWR
   1015   // BIT[08]        --> DLL: want "Reset" (1, self clearing)
   1016   // BIT[07]        --> MODE: want "Normal" (0)
   1017   // BIT[06:04,02]  --> CL: use oem_tCAS
   1018   // BIT[03]        --> RD_BURST_TYPE: want "Interleave" (1)
   1019   // BIT[01:00]     --> BL: want "8 Fixed" (0)
   1020   // WR:
   1021   // 0 --> 16
   1022   // 1 --> 5
   1023   // 2 --> 6
   1024   // 3 --> 7
   1025   // 4 --> 8
   1026   // 5 --> 10
   1027   // 6 --> 12
   1028   // 7 --> 14
   1029   // CL:
   1030   // BIT[02:02] "0" if oem_tCAS <= 11 (1866?)
   1031   // BIT[06:04] use oem_tCAS-4
   1032   mrs0Command.raw = 0;
   1033   mrs0Command.field.bankAddress = 0;
   1034   mrs0Command.field.dllReset = 1;
   1035   mrs0Command.field.BL = 0;
   1036   mrs0Command.field.PPD = 1;
   1037   mrs0Command.field.casLatency = DTR0reg.field.tCL + 1;
   1038 
   1039   TCK = tCK[mrc_params->ddr_speed];
   1040   TWR = MCEIL(15000, TCK);   // Per JEDEC: tWR=15000ps DDR2/3 from 800-1600
   1041   mrs0Command.field.writeRecovery = TWR - 4;
   1042 
   1043   for (Rank = 0; Rank < NUM_RANKS; Rank++)
   1044   {
   1045     // Skip to next populated rank
   1046     if ((mrc_params->rank_enables & (1 << Rank)) == 0)
   1047     {
   1048       continue;
   1049     }
   1050 
   1051     emrs2Command.field.rankSelect = Rank;
   1052     dram_init_command(emrs2Command.raw);
   1053 
   1054     emrs3Command.field.rankSelect = Rank;
   1055     dram_init_command(emrs3Command.raw);
   1056 
   1057     emrs1Command.field.rankSelect = Rank;
   1058     dram_init_command(emrs1Command.raw);
   1059 
   1060     mrs0Command.field.rankSelect = Rank;
   1061     dram_init_command(mrs0Command.raw);
   1062 
   1063     dram_init_command(DCMD_ZQCL(Rank));
   1064   }
   1065 
   1066   LEAVEFN();
   1067   return;
   1068 }
   1069 
   1070 // rcvn_cal:
   1071 // POST_CODE[major] == 0x05
   1072 //
   1073 // This function will perform our RCVEN Calibration Algorithm.
   1074 // We will only use the 2xCLK domain timings to perform RCVEN Calibration.
   1075 // All byte lanes will be calibrated "simultaneously" per channel per rank.
   1076 static void rcvn_cal(
   1077     MRCParams_t *mrc_params)
   1078 {
   1079   uint8_t channel_i; // channel counter
   1080   uint8_t rank_i; // rank counter
   1081   uint8_t bl_i; // byte lane counter
   1082   uint8_t bl_divisor = (mrc_params->channel_width == x16) ? 2 : 1; // byte lane divisor
   1083 
   1084 #ifdef R2R_SHARING
   1085   uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES]; // used to find placement for rank2rank sharing configs
   1086 #ifndef BACKUP_RCVN
   1087   uint32_t num_ranks_enabled = 0; // used to find placement for rank2rank sharing configs
   1088 #endif // BACKUP_RCVN
   1089 #endif // R2R_SHARING
   1090 
   1091 #ifdef BACKUP_RCVN
   1092 #else
   1093   uint32_t tempD; // temporary DWORD
   1094   uint32_t delay[NUM_BYTE_LANES]; // absolute PI value to be programmed on the byte lane
   1095   RegDTR1 dtr1;
   1096   RegDTR1 dtr1save;
   1097 #endif // BACKUP_RCVN
   1098   ENTERFN();
   1099 
   1100   // rcvn_cal starts
   1101   post_code(0x05, 0x00);
   1102 
   1103 #ifndef BACKUP_RCVN
   1104   // need separate burst to sample DQS preamble
   1105   dtr1.raw = dtr1save.raw = isbR32m(MCU, DTR1);
   1106   dtr1.field.tCCD = 1;
   1107   isbW32m(MCU, DTR1, dtr1.raw);
   1108 #endif
   1109 
   1110 #ifdef R2R_SHARING
   1111   // need to set "final_delay[][]" elements to "0"
   1112   memset((void *) (final_delay), 0x00, (size_t) sizeof(final_delay));
   1113 #endif // R2R_SHARING
   1114 
   1115   // loop through each enabled channel
   1116   for (channel_i = 0; channel_i < NUM_CHANNELS; channel_i++)
   1117   {
   1118     if (mrc_params->channel_enables & (1 << channel_i))
   1119     {
   1120       // perform RCVEN Calibration on a per rank basis
   1121       for (rank_i = 0; rank_i < NUM_RANKS; rank_i++)
   1122       {
   1123         if (mrc_params->rank_enables & (1 << rank_i))
   1124         {
   1125           // POST_CODE here indicates the current channel and rank being calibrated
   1126           post_code(0x05, (0x10 + ((channel_i << 4) | rank_i)));
   1127 
   1128 #ifdef BACKUP_RCVN
   1129           // set hard-coded timing values
   1130           for (bl_i=0; bl_i<(NUM_BYTE_LANES/bl_divisor); bl_i++)
   1131           {
   1132             set_rcvn(channel_i, rank_i, bl_i, ddr_rcvn[PLATFORM_ID]);
   1133           }
   1134 #else
   1135           // enable FIFORST
   1136           for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i += 2)
   1137           {
   1138             isbM32m(DDRPHY, (B01PTRCTL1 + ((bl_i >> 1) * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), 0,
   1139                 BIT8); // 0 is enabled
   1140           } // bl_i loop
   1141           // initialise the starting delay to 128 PI (tCAS +1 CLK)
   1142           for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i++)
   1143           {
   1144 #ifdef SIM
   1145             // Original value was late at the end of DQS sequence
   1146             delay[bl_i] = 3 * FULL_CLK;
   1147 #else
   1148             delay[bl_i] = (4 + 1) * FULL_CLK; // 1x CLK domain timing is tCAS-4
   1149 #endif
   1150 
   1151             set_rcvn(channel_i, rank_i, bl_i, delay[bl_i]);
   1152           } // bl_i loop
   1153 
   1154           // now find the rising edge
   1155           find_rising_edge(mrc_params, delay, channel_i, rank_i, true);
   1156           // Now increase delay by 32 PI (1/4 CLK) to place in center of high pulse.
   1157           for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i++)
   1158           {
   1159             delay[bl_i] += QRTR_CLK;
   1160             set_rcvn(channel_i, rank_i, bl_i, delay[bl_i]);
   1161           } // bl_i loop
   1162           // Now decrement delay by 128 PI (1 CLK) until we sample a "0"
   1163           do
   1164           {
   1165 
   1166             tempD = sample_dqs(mrc_params, channel_i, rank_i, true);
   1167             for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i++)
   1168             {
   1169               if (tempD & (1 << bl_i))
   1170               {
   1171                 if (delay[bl_i] >= FULL_CLK)
   1172                 {
   1173                   delay[bl_i] -= FULL_CLK;
   1174                   set_rcvn(channel_i, rank_i, bl_i, delay[bl_i]);
   1175                 }
   1176                 else
   1177                 {
   1178                   // not enough delay
   1179                   training_message(channel_i, rank_i, bl_i);
   1180                   post_code(0xEE, 0x50);
   1181                 }
   1182               }
   1183             } // bl_i loop
   1184           } while (tempD & 0xFF);
   1185 
   1186 #ifdef R2R_SHARING
   1187           // increment "num_ranks_enabled"
   1188           num_ranks_enabled++;
   1189           // Finally increment delay by 32 PI (1/4 CLK) to place in center of preamble.
   1190           for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i++)
   1191           {
   1192             delay[bl_i] += QRTR_CLK;
   1193             // add "delay[]" values to "final_delay[][]" for rolling average
   1194             final_delay[channel_i][bl_i] += delay[bl_i];
   1195             // set timing based on rolling average values
   1196             set_rcvn(channel_i, rank_i, bl_i, ((final_delay[channel_i][bl_i]) / num_ranks_enabled));
   1197           } // bl_i loop
   1198 #else
   1199           // Finally increment delay by 32 PI (1/4 CLK) to place in center of preamble.
   1200           for (bl_i=0; bl_i<(NUM_BYTE_LANES/bl_divisor); bl_i++)
   1201           {
   1202             delay[bl_i] += QRTR_CLK;
   1203             set_rcvn(channel_i, rank_i, bl_i, delay[bl_i]);
   1204           } // bl_i loop
   1205 
   1206 #endif // R2R_SHARING
   1207 
   1208           // disable FIFORST
   1209           for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i += 2)
   1210           {
   1211             isbM32m(DDRPHY, (B01PTRCTL1 + ((bl_i >> 1) * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)), BIT8,
   1212                 BIT8); // 1 is disabled
   1213           } // bl_i loop
   1214 
   1215 #endif // BACKUP_RCVN
   1216 
   1217         } // if rank is enabled
   1218       } // rank_i loop
   1219     } // if channel is enabled
   1220   } // channel_i loop
   1221 
   1222 #ifndef BACKUP_RCVN
   1223   // restore original
   1224   isbW32m(MCU, DTR1, dtr1save.raw);
   1225 #endif
   1226 
   1227 #ifdef MRC_SV
   1228   if (mrc_params->tune_rcvn)
   1229   {
   1230     uint32_t rcven, val;
   1231     uint32_t rdcmd2rcven;
   1232 
   1233     /*
   1234      Formulas for RDCMD2DATAVALID & DIFFAMP dynamic timings
   1235 
   1236      1. Set after RCVEN training
   1237 
   1238      //Tune RDCMD2DATAVALID
   1239 
   1240      x80/x84[21:16]
   1241      MAX OF 2 RANKS : round up (rdcmd2rcven (rcven 1x) + 2x x 2 + PI/128) + 5
   1242 
   1243      //rdcmd2rcven x80/84[12:8]
   1244      //rcven 2x x70[23:20] & [11:8]
   1245 
   1246      //Tune DIFFAMP Timings
   1247 
   1248      //diffampen launch x88[20:16] & [4:0]  -- B01LATCTL1
   1249      MIN OF 2 RANKS : round down (rcven 1x + 2x x 2 + PI/128) - 1
   1250 
   1251      //diffampen length x8C/x90 [13:8]   -- B0ONDURCTL B1ONDURCTL
   1252      MAX OF 2 RANKS : roundup (rcven 1x + 2x x 2 + PI/128) + 5
   1253 
   1254 
   1255      2. need to do a fiforst after settings these values
   1256     */
   1257 
   1258     DPF(D_INFO, "BEFORE\n");
   1259     DPF(D_INFO, "### %x\n", isbR32m(DDRPHY, B0LATCTL0));
   1260     DPF(D_INFO, "### %x\n", isbR32m(DDRPHY, B01LATCTL1));
   1261     DPF(D_INFO, "### %x\n", isbR32m(DDRPHY, B0ONDURCTL));
   1262 
   1263     DPF(D_INFO, "### %x\n", isbR32m(DDRPHY, B1LATCTL0));
   1264     DPF(D_INFO, "### %x\n", isbR32m(DDRPHY, B1ONDURCTL));
   1265 
   1266     rcven = get_rcvn(0, 0, 0) / 128;
   1267     rdcmd2rcven = (isbR32m(DDRPHY, B0LATCTL0) >> 8) & 0x1F;
   1268     val = rdcmd2rcven + rcven + 6;
   1269     isbM32m(DDRPHY, B0LATCTL0, val << 16, (BIT21|BIT20|BIT19|BIT18|BIT17|BIT16));
   1270 
   1271     val = rdcmd2rcven + rcven - 1;
   1272     isbM32m(DDRPHY, B01LATCTL1, val << 0, (BIT4|BIT3|BIT2|BIT1|BIT0));
   1273 
   1274     val = rdcmd2rcven + rcven + 5;
   1275     isbM32m(DDRPHY, B0ONDURCTL, val << 8, (BIT13|BIT12|BIT11|BIT10|BIT9|BIT8));
   1276 
   1277     rcven = get_rcvn(0, 0, 1) / 128;
   1278     rdcmd2rcven = (isbR32m(DDRPHY, B1LATCTL0) >> 8) & 0x1F;
   1279     val = rdcmd2rcven + rcven + 6;
   1280     isbM32m(DDRPHY, B1LATCTL0, val << 16, (BIT21|BIT20|BIT19|BIT18|BIT17|BIT16));
   1281 
   1282     val = rdcmd2rcven + rcven - 1;
   1283     isbM32m(DDRPHY, B01LATCTL1, val << 16, (BIT20|BIT19|BIT18|BIT17|BIT16));
   1284 
   1285     val = rdcmd2rcven + rcven + 5;
   1286     isbM32m(DDRPHY, B1ONDURCTL, val << 8, (BIT13|BIT12|BIT11|BIT10|BIT9|BIT8));
   1287 
   1288     DPF(D_INFO, "AFTER\n");
   1289     DPF(D_INFO, "### %x\n", isbR32m(DDRPHY, B0LATCTL0));
   1290     DPF(D_INFO, "### %x\n", isbR32m(DDRPHY, B01LATCTL1));
   1291     DPF(D_INFO, "### %x\n", isbR32m(DDRPHY, B0ONDURCTL));
   1292 
   1293     DPF(D_INFO, "### %x\n", isbR32m(DDRPHY, B1LATCTL0));
   1294     DPF(D_INFO, "### %x\n", isbR32m(DDRPHY, B1ONDURCTL));
   1295 
   1296     DPF(D_INFO, "\nPress a key\n");
   1297     mgetc();
   1298 
   1299     // fifo reset
   1300     isbM32m(DDRPHY, B01PTRCTL1, 0, BIT8); // 0 is enabled
   1301     delay_n(3);
   1302     isbM32m(DDRPHY, B01PTRCTL1, BIT8, BIT8); // 1 is disabled
   1303   }
   1304 #endif
   1305 
   1306   LEAVEFN();
   1307   return;
   1308 }
   1309 
   1310 // Check memory executing write/read/verify of many data patterns
   1311 // at the specified address. Bits in the result indicate failure
   1312 // on specific byte lane.
   1313 static uint32_t check_bls_ex(
   1314     MRCParams_t *mrc_params,
   1315     uint32_t address)
   1316 {
   1317   uint32_t result;
   1318   uint8_t first_run = 0;
   1319 
   1320   if (mrc_params->hte_setup)
   1321   {
   1322     mrc_params->hte_setup = 0;
   1323 
   1324     first_run = 1;
   1325     select_hte(mrc_params);
   1326   }
   1327 
   1328   result = WriteStressBitLanesHTE(mrc_params, address, first_run);
   1329 
   1330   DPF(D_TRN, "check_bls_ex result is %x\n", result);
   1331   return result;
   1332 }
   1333 
   1334 // Check memory executing simple write/read/verify at
   1335 // the specified address. Bits in the result indicate failure
   1336 // on specific byte lane.
   1337 static uint32_t check_rw_coarse(
   1338     MRCParams_t *mrc_params,
   1339     uint32_t address)
   1340 {
   1341   uint32_t result = 0;
   1342   uint8_t first_run = 0;
   1343 
   1344   if (mrc_params->hte_setup)
   1345   {
   1346     mrc_params->hte_setup = 0;
   1347 
   1348     first_run = 1;
   1349     select_hte(mrc_params);
   1350   }
   1351 
   1352   result = BasicWriteReadHTE(mrc_params, address, first_run, WRITE_TRAIN);
   1353 
   1354   DPF(D_TRN, "check_rw_coarse result is %x\n", result);
   1355   return result;
   1356 }
   1357 
   1358 // wr_level:
   1359 // POST_CODE[major] == 0x06
   1360 //
   1361 // This function will perform the Write Levelling algorithm (align WCLK and WDQS).
   1362 // This algorithm will act on each rank in each channel separately.
   1363 static void wr_level(
   1364     MRCParams_t *mrc_params)
   1365 {
   1366   uint8_t channel_i; // channel counter
   1367   uint8_t rank_i; // rank counter
   1368   uint8_t bl_i; // byte lane counter
   1369   uint8_t bl_divisor = (mrc_params->channel_width == x16) ? 2 : 1; // byte lane divisor
   1370 
   1371 #ifdef R2R_SHARING
   1372   uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES]; // used to find placement for rank2rank sharing configs
   1373 #ifndef BACKUP_WDQS
   1374   uint32_t num_ranks_enabled = 0; // used to find placement for rank2rank sharing configs
   1375 #endif // BACKUP_WDQS
   1376 #endif // R2R_SHARING
   1377 
   1378 #ifdef BACKUP_WDQS
   1379 #else
   1380   bool all_edges_found; // determines stop condition for CRS_WR_LVL
   1381   uint32_t delay[NUM_BYTE_LANES]; // absolute PI value to be programmed on the byte lane
   1382   // static makes it so the data is loaded in the heap once by shadow(), where
   1383   // non-static copies the data onto the stack every time this function is called.
   1384 
   1385   uint32_t address; // address to be checked during COARSE_WR_LVL
   1386   RegDTR4 dtr4;
   1387   RegDTR4 dtr4save;
   1388 #endif // BACKUP_WDQS
   1389 
   1390   ENTERFN();
   1391 
   1392   // wr_level starts
   1393   post_code(0x06, 0x00);
   1394 
   1395 #ifdef R2R_SHARING
   1396   // need to set "final_delay[][]" elements to "0"
   1397   memset((void *) (final_delay), 0x00, (size_t) sizeof(final_delay));
   1398 #endif // R2R_SHARING
   1399   // loop through each enabled channel
   1400   for (channel_i = 0; channel_i < NUM_CHANNELS; channel_i++)
   1401   {
   1402     if (mrc_params->channel_enables & (1 << channel_i))
   1403     {
   1404       // perform WRITE LEVELING algorithm on a per rank basis
   1405       for (rank_i = 0; rank_i < NUM_RANKS; rank_i++)
   1406       {
   1407         if (mrc_params->rank_enables & (1 << rank_i))
   1408         {
   1409           // POST_CODE here indicates the current rank and channel being calibrated
   1410           post_code(0x06, (0x10 + ((channel_i << 4) | rank_i)));
   1411 
   1412 #ifdef BACKUP_WDQS
   1413           for (bl_i=0; bl_i<(NUM_BYTE_LANES/bl_divisor); bl_i++)
   1414           {
   1415             set_wdqs(channel_i, rank_i, bl_i, ddr_wdqs[PLATFORM_ID]);
   1416             set_wdq(channel_i, rank_i, bl_i, (ddr_wdqs[PLATFORM_ID] - QRTR_CLK));
   1417           }
   1418 #else
   1419 
   1420           { // Begin product specific code
   1421 
   1422             // perform a single PRECHARGE_ALL command to make DRAM state machine go to IDLE state
   1423             dram_init_command(DCMD_PREA(rank_i));
   1424 
   1425             // enable Write Levelling Mode (EMRS1 w/ Write Levelling Mode Enable)
   1426             dram_init_command(DCMD_MRS1(rank_i,0x0082));
   1427 
   1428             // set ODT DRAM Full Time Termination disable in MCU
   1429             dtr4.raw = dtr4save.raw = isbR32m(MCU, DTR4);
   1430             dtr4.field.ODTDIS = 1;
   1431             isbW32m(MCU, DTR4, dtr4.raw);
   1432 
   1433             for (bl_i = 0; bl_i < ((NUM_BYTE_LANES / bl_divisor) / 2); bl_i++)
   1434             {
   1435               isbM32m(DDRPHY, DQCTL + (DDRIODQ_BL_OFFSET * bl_i) + (DDRIODQ_CH_OFFSET * channel_i),
   1436                   (BIT28 | (0x1 << 8) | (0x1 << 6) | (0x1 << 4) | (0x1 << 2)),
   1437                   (BIT28 | (BIT9|BIT8) | (BIT7|BIT6) | (BIT5|BIT4) | (BIT3|BIT2))); // Enable Sandy Bridge Mode (WDQ Tri-State) & Ensure 5 WDQS pulses during Write Leveling
   1438             }
   1439 
   1440             isbM32m(DDRPHY, CCDDR3RESETCTL + (DDRIOCCC_CH_OFFSET * channel_i), (BIT16), (BIT16)); // Write Leveling Mode enabled in IO
   1441           } // End product specific code
   1442           // Initialise the starting delay to WCLK
   1443           for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i++)
   1444           {
   1445             { // Begin product specific code
   1446               // CLK0 --> RK0
   1447               // CLK1 --> RK1
   1448               delay[bl_i] = get_wclk(channel_i, rank_i);
   1449             } // End product specific code
   1450             set_wdqs(channel_i, rank_i, bl_i, delay[bl_i]);
   1451           } // bl_i loop
   1452           // now find the rising edge
   1453           find_rising_edge(mrc_params, delay, channel_i, rank_i, false);
   1454           { // Begin product specific code
   1455             // disable Write Levelling Mode
   1456             isbM32m(DDRPHY, CCDDR3RESETCTL + (DDRIOCCC_CH_OFFSET * channel_i), (0), (BIT16)); // Write Leveling Mode disabled in IO
   1457 
   1458             for (bl_i = 0; bl_i < ((NUM_BYTE_LANES / bl_divisor) / 2); bl_i++)
   1459             {
   1460               isbM32m(DDRPHY, DQCTL + (DDRIODQ_BL_OFFSET * bl_i) + (DDRIODQ_CH_OFFSET * channel_i),
   1461                   ((0x1 << 8) | (0x1 << 6) | (0x1 << 4) | (0x1 << 2)),
   1462                   (BIT28 | (BIT9|BIT8) | (BIT7|BIT6) | (BIT5|BIT4) | (BIT3|BIT2))); // Disable Sandy Bridge Mode & Ensure 4 WDQS pulses during normal operation
   1463             } // bl_i loop
   1464 
   1465             // restore original DTR4
   1466             isbW32m(MCU, DTR4, dtr4save.raw);
   1467 
   1468             // restore original value (Write Levelling Mode Disable)
   1469             dram_init_command(DCMD_MRS1(rank_i, mrc_params->mrs1));
   1470 
   1471             // perform a single PRECHARGE_ALL command to make DRAM state machine go to IDLE state
   1472             dram_init_command(DCMD_PREA(rank_i));
   1473           } // End product specific code
   1474 
   1475           post_code(0x06, (0x30 + ((channel_i << 4) | rank_i)));
   1476 
   1477           // COARSE WRITE LEVEL:
   1478           // check that we're on the correct clock edge
   1479 
   1480           // hte reconfiguration request
   1481           mrc_params->hte_setup = 1;
   1482 
   1483           // start CRS_WR_LVL with WDQS = WDQS + 128 PI
   1484           for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i++)
   1485           {
   1486             delay[bl_i] = get_wdqs(channel_i, rank_i, bl_i) + FULL_CLK;
   1487             set_wdqs(channel_i, rank_i, bl_i, delay[bl_i]);
   1488             // program WDQ timings based on WDQS (WDQ = WDQS - 32 PI)
   1489             set_wdq(channel_i, rank_i, bl_i, (delay[bl_i] - QRTR_CLK));
   1490           } // bl_i loop
   1491 
   1492           // get an address in the targeted channel/rank
   1493           address = get_addr(mrc_params, channel_i, rank_i);
   1494           do
   1495           {
   1496             uint32_t coarse_result = 0x00;
   1497             uint32_t coarse_result_mask = byte_lane_mask(mrc_params);
   1498             all_edges_found = true; // assume pass
   1499 
   1500 #ifdef SIM
   1501             // need restore memory to idle state as write can be in bad sync
   1502             dram_init_command (DCMD_PREA(rank_i));
   1503 #endif
   1504 
   1505             mrc_params->hte_setup = 1;
   1506             coarse_result = check_rw_coarse(mrc_params, address);
   1507 
   1508             // check for failures and margin the byte lane back 128 PI (1 CLK)
   1509             for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i++)
   1510             {
   1511               if (coarse_result & (coarse_result_mask << bl_i))
   1512               {
   1513                 all_edges_found = false;
   1514                 delay[bl_i] -= FULL_CLK;
   1515                 set_wdqs(channel_i, rank_i, bl_i, delay[bl_i]);
   1516                 // program WDQ timings based on WDQS (WDQ = WDQS - 32 PI)
   1517                 set_wdq(channel_i, rank_i, bl_i, (delay[bl_i] - QRTR_CLK));
   1518               }
   1519             } // bl_i loop
   1520 
   1521           } while (!all_edges_found);
   1522 
   1523 #ifdef R2R_SHARING
   1524           // increment "num_ranks_enabled"
   1525           num_ranks_enabled++;
   1526           // accumulate "final_delay[][]" values from "delay[]" values for rolling average
   1527           for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i++)
   1528           {
   1529             final_delay[channel_i][bl_i] += delay[bl_i];
   1530             set_wdqs(channel_i, rank_i, bl_i, ((final_delay[channel_i][bl_i]) / num_ranks_enabled));
   1531             // program WDQ timings based on WDQS (WDQ = WDQS - 32 PI)
   1532             set_wdq(channel_i, rank_i, bl_i, ((final_delay[channel_i][bl_i]) / num_ranks_enabled) - QRTR_CLK);
   1533           } // bl_i loop
   1534 #endif // R2R_SHARING
   1535 #endif // BACKUP_WDQS
   1536 
   1537         } // if rank is enabled
   1538       } // rank_i loop
   1539     } // if channel is enabled
   1540   } // channel_i loop
   1541 
   1542   LEAVEFN();
   1543   return;
   1544 }
   1545 
   1546 // rd_train:
   1547 // POST_CODE[major] == 0x07
   1548 //
   1549 // This function will perform the READ TRAINING Algorithm on all channels/ranks/byte_lanes simultaneously to minimize execution time.
   1550 // The idea here is to train the VREF and RDQS (and eventually RDQ) values to achieve maximum READ margins.
   1551 // The algorithm will first determine the X coordinate (RDQS setting).
   1552 // This is done by collapsing the VREF eye until we find a minimum required RDQS eye for VREF_MIN and VREF_MAX.
   1553 // Then we take the averages of the RDQS eye at VREF_MIN and VREF_MAX, then average those; this will be the final X coordinate.
   1554 // The algorithm will then determine the Y coordinate (VREF setting).
   1555 // This is done by collapsing the RDQS eye until we find a minimum required VREF eye for RDQS_MIN and RDQS_MAX.
   1556 // Then we take the averages of the VREF eye at RDQS_MIN and RDQS_MAX, then average those; this will be the final Y coordinate.
   1557 // NOTE: this algorithm assumes the eye curves have a one-to-one relationship, meaning for each X the curve has only one Y and vice-a-versa.
   1558 static void rd_train(
   1559     MRCParams_t *mrc_params)
   1560 {
   1561 
   1562 #define MIN_RDQS_EYE 10 // in PI Codes
   1563 #define MIN_VREF_EYE 10 // in VREF Codes
   1564 #define RDQS_STEP 1     // how many RDQS codes to jump while margining
   1565 #define VREF_STEP 1     // how many VREF codes to jump while margining
   1566 #define VREF_MIN (0x00) // offset into "vref_codes[]" for minimum allowed VREF setting
   1567 #define VREF_MAX (0x3F) // offset into "vref_codes[]" for maximum allowed VREF setting
   1568 #define RDQS_MIN (0x00) // minimum RDQS delay value
   1569 #define RDQS_MAX (0x3F) // maximum RDQS delay value
   1570 #define B 0 // BOTTOM VREF
   1571 #define T 1 // TOP VREF
   1572 #define L 0 // LEFT RDQS
   1573 #define R 1 // RIGHT RDQS
   1574 
   1575   uint8_t channel_i; // channel counter
   1576   uint8_t rank_i; // rank counter
   1577   uint8_t bl_i; // byte lane counter
   1578   uint8_t bl_divisor = (mrc_params->channel_width == x16) ? 2 : 1; // byte lane divisor
   1579 #ifdef BACKUP_RDQS
   1580 #else
   1581   uint8_t side_x; // tracks LEFT/RIGHT approach vectors
   1582   uint8_t side_y; // tracks BOTTOM/TOP approach vectors
   1583   uint8_t x_coordinate[2/*side_x*/][2/*side_y*/][NUM_CHANNELS][NUM_RANKS][NUM_BYTE_LANES]; // X coordinate data (passing RDQS values) for approach vectors
   1584   uint8_t y_coordinate[2/*side_x*/][2/*side_y*/][NUM_CHANNELS][NUM_BYTE_LANES]; // Y coordinate data (passing VREF values) for approach vectors
   1585   uint8_t x_center[NUM_CHANNELS][NUM_RANKS][NUM_BYTE_LANES]; // centered X (RDQS)
   1586   uint8_t y_center[NUM_CHANNELS][NUM_BYTE_LANES]; // centered Y (VREF)
   1587   uint32_t address; // target address for "check_bls_ex()"
   1588   uint32_t result; // result of "check_bls_ex()"
   1589   uint32_t bl_mask; // byte lane mask for "result" checking
   1590 #ifdef R2R_SHARING
   1591   uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES]; // used to find placement for rank2rank sharing configs
   1592   uint32_t num_ranks_enabled = 0; // used to find placement for rank2rank sharing configs
   1593 #endif // R2R_SHARING
   1594 #endif // BACKUP_RDQS
   1595   // rd_train starts
   1596   post_code(0x07, 0x00);
   1597 
   1598   ENTERFN();
   1599 
   1600 #ifdef BACKUP_RDQS
   1601   for (channel_i=0; channel_i<NUM_CHANNELS; channel_i++)
   1602   {
   1603     if (mrc_params->channel_enables & (1<<channel_i))
   1604     {
   1605       for (rank_i=0; rank_i<NUM_RANKS; rank_i++)
   1606       {
   1607         if (mrc_params->rank_enables & (1<<rank_i))
   1608         {
   1609           for (bl_i=0; bl_i<(NUM_BYTE_LANES/bl_divisor); bl_i++)
   1610           {
   1611             set_rdqs(channel_i, rank_i, bl_i, ddr_rdqs[PLATFORM_ID]);
   1612           } // bl_i loop
   1613         } // if rank is enabled
   1614       } // rank_i loop
   1615     } // if channel is enabled
   1616   } // channel_i loop
   1617 #else
   1618   // initialise x/y_coordinate arrays
   1619   for (channel_i = 0; channel_i < NUM_CHANNELS; channel_i++)
   1620   {
   1621     if (mrc_params->channel_enables & (1 << channel_i))
   1622     {
   1623       for (rank_i = 0; rank_i < NUM_RANKS; rank_i++)
   1624       {
   1625         if (mrc_params->rank_enables & (1 << rank_i))
   1626         {
   1627           for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i++)
   1628           {
   1629             // x_coordinate:
   1630             x_coordinate[L][B][channel_i][rank_i][bl_i] = RDQS_MIN;
   1631             x_coordinate[R][B][channel_i][rank_i][bl_i] = RDQS_MAX;
   1632             x_coordinate[L][T][channel_i][rank_i][bl_i] = RDQS_MIN;
   1633             x_coordinate[R][T][channel_i][rank_i][bl_i] = RDQS_MAX;
   1634             // y_coordinate:
   1635             y_coordinate[L][B][channel_i][bl_i] = VREF_MIN;
   1636             y_coordinate[R][B][channel_i][bl_i] = VREF_MIN;
   1637             y_coordinate[L][T][channel_i][bl_i] = VREF_MAX;
   1638             y_coordinate[R][T][channel_i][bl_i] = VREF_MAX;
   1639           } // bl_i loop
   1640         } // if rank is enabled
   1641       } // rank_i loop
   1642     } // if channel is enabled
   1643   } // channel_i loop
   1644 
   1645   // initialise other variables
   1646   bl_mask = byte_lane_mask(mrc_params);
   1647   address = get_addr(mrc_params, 0, 0);
   1648 
   1649 #ifdef R2R_SHARING
   1650   // need to set "final_delay[][]" elements to "0"
   1651   memset((void *) (final_delay), 0x00, (size_t) sizeof(final_delay));
   1652 #endif // R2R_SHARING
   1653 
   1654   // look for passing coordinates
   1655   for (side_y = B; side_y <= T; side_y++)
   1656   {
   1657     for (side_x = L; side_x <= R; side_x++)
   1658     {
   1659 
   1660       post_code(0x07, (0x10 + (side_y * 2) + (side_x)));
   1661 
   1662       // find passing values
   1663       for (channel_i = 0; channel_i < NUM_CHANNELS; channel_i++)
   1664       {
   1665         if (mrc_params->channel_enables & (0x1 << channel_i))
   1666         {
   1667           for (rank_i = 0; rank_i < NUM_RANKS; rank_i++)
   1668           {
   1669 
   1670             if (mrc_params->rank_enables & (0x1 << rank_i))
   1671             {
   1672               // set x/y_coordinate search starting settings
   1673               for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i++)
   1674               {
   1675                 set_rdqs(channel_i, rank_i, bl_i, x_coordinate[side_x][side_y][channel_i][rank_i][bl_i]);
   1676                 set_vref(channel_i, bl_i, y_coordinate[side_x][side_y][channel_i][bl_i]);
   1677               } // bl_i loop
   1678               // get an address in the target channel/rank
   1679               address = get_addr(mrc_params, channel_i, rank_i);
   1680 
   1681               // request HTE reconfiguration
   1682               mrc_params->hte_setup = 1;
   1683 
   1684               // test the settings
   1685               do
   1686               {
   1687 
   1688                 // result[07:00] == failing byte lane (MAX 8)
   1689                 result = check_bls_ex( mrc_params, address);
   1690 
   1691                 // check for failures
   1692                 if (result & 0xFF)
   1693                 {
   1694                   // at least 1 byte lane failed
   1695                   for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i++)
   1696                   {
   1697                     if (result & (bl_mask << bl_i))
   1698                     {
   1699                       // adjust the RDQS values accordingly
   1700                       if (side_x == L)
   1701                       {
   1702                         x_coordinate[L][side_y][channel_i][rank_i][bl_i] += RDQS_STEP;
   1703                       }
   1704                       else
   1705                       {
   1706                         x_coordinate[R][side_y][channel_i][rank_i][bl_i] -= RDQS_STEP;
   1707                       }
   1708                       // check that we haven't closed the RDQS_EYE too much
   1709                       if ((x_coordinate[L][side_y][channel_i][rank_i][bl_i] > (RDQS_MAX - MIN_RDQS_EYE)) ||
   1710                           (x_coordinate[R][side_y][channel_i][rank_i][bl_i] < (RDQS_MIN + MIN_RDQS_EYE))
   1711                           ||
   1712                           (x_coordinate[L][side_y][channel_i][rank_i][bl_i]
   1713                               == x_coordinate[R][side_y][channel_i][rank_i][bl_i]))
   1714                       {
   1715                         // not enough RDQS margin available at this VREF
   1716                         // update VREF values accordingly
   1717                         if (side_y == B)
   1718                         {
   1719                           y_coordinate[side_x][B][channel_i][bl_i] += VREF_STEP;
   1720                         }
   1721                         else
   1722                         {
   1723                           y_coordinate[side_x][T][channel_i][bl_i] -= VREF_STEP;
   1724                         }
   1725                         // check that we haven't closed the VREF_EYE too much
   1726                         if ((y_coordinate[side_x][B][channel_i][bl_i] > (VREF_MAX - MIN_VREF_EYE)) ||
   1727                             (y_coordinate[side_x][T][channel_i][bl_i] < (VREF_MIN + MIN_VREF_EYE)) ||
   1728                             (y_coordinate[side_x][B][channel_i][bl_i] == y_coordinate[side_x][T][channel_i][bl_i]))
   1729                         {
   1730                           // VREF_EYE collapsed below MIN_VREF_EYE
   1731                           training_message(channel_i, rank_i, bl_i);
   1732                           post_code(0xEE, (0x70 + (side_y * 2) + (side_x)));
   1733                         }
   1734                         else
   1735                         {
   1736                           // update the VREF setting
   1737                           set_vref(channel_i, bl_i, y_coordinate[side_x][side_y][channel_i][bl_i]);
   1738                           // reset the X coordinate to begin the search at the new VREF
   1739                           x_coordinate[side_x][side_y][channel_i][rank_i][bl_i] =
   1740                               (side_x == L) ? (RDQS_MIN) : (RDQS_MAX);
   1741                         }
   1742                       }
   1743                       // update the RDQS setting
   1744                       set_rdqs(channel_i, rank_i, bl_i, x_coordinate[side_x][side_y][channel_i][rank_i][bl_i]);
   1745                     } // if bl_i failed
   1746                   } // bl_i loop
   1747                 } // at least 1 byte lane failed
   1748               } while (result & 0xFF);
   1749             } // if rank is enabled
   1750           } // rank_i loop
   1751         } // if channel is enabled
   1752       } // channel_i loop
   1753     } // side_x loop
   1754   } // side_y loop
   1755 
   1756   post_code(0x07, 0x20);
   1757 
   1758   // find final RDQS (X coordinate) & final VREF (Y coordinate)
   1759   for (channel_i = 0; channel_i < NUM_CHANNELS; channel_i++)
   1760   {
   1761     if (mrc_params->channel_enables & (1 << channel_i))
   1762     {
   1763       for (rank_i = 0; rank_i < NUM_RANKS; rank_i++)
   1764       {
   1765         if (mrc_params->rank_enables & (1 << rank_i))
   1766         {
   1767           for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i++)
   1768           {
   1769             uint32_t tempD1;
   1770             uint32_t tempD2;
   1771 
   1772             // x_coordinate:
   1773             DPF(D_INFO, "RDQS T/B eye rank%d lane%d : %d-%d %d-%d\n", rank_i, bl_i,
   1774                 x_coordinate[L][T][channel_i][rank_i][bl_i],
   1775                 x_coordinate[R][T][channel_i][rank_i][bl_i],
   1776                 x_coordinate[L][B][channel_i][rank_i][bl_i],
   1777                 x_coordinate[R][B][channel_i][rank_i][bl_i]);
   1778 
   1779             tempD1 = (x_coordinate[R][T][channel_i][rank_i][bl_i] + x_coordinate[L][T][channel_i][rank_i][bl_i]) / 2; // average the TOP side LEFT & RIGHT values
   1780             tempD2 = (x_coordinate[R][B][channel_i][rank_i][bl_i] + x_coordinate[L][B][channel_i][rank_i][bl_i]) / 2; // average the BOTTOM side LEFT & RIGHT values
   1781             x_center[channel_i][rank_i][bl_i] = (uint8_t) ((tempD1 + tempD2) / 2); // average the above averages
   1782 
   1783             // y_coordinate:
   1784             DPF(D_INFO, "VREF R/L eye lane%d : %d-%d %d-%d\n", bl_i,
   1785                 y_coordinate[R][B][channel_i][bl_i],
   1786                 y_coordinate[R][T][channel_i][bl_i],
   1787                 y_coordinate[L][B][channel_i][bl_i],
   1788                 y_coordinate[L][T][channel_i][bl_i]);
   1789 
   1790             tempD1 = (y_coordinate[R][T][channel_i][bl_i] + y_coordinate[R][B][channel_i][bl_i]) / 2; // average the RIGHT side TOP & BOTTOM values
   1791             tempD2 = (y_coordinate[L][T][channel_i][bl_i] + y_coordinate[L][B][channel_i][bl_i]) / 2; // average the LEFT side TOP & BOTTOM values
   1792             y_center[channel_i][bl_i] = (uint8_t) ((tempD1 + tempD2) / 2); // average the above averages
   1793           } // bl_i loop
   1794         } // if rank is enabled
   1795       } // rank_i loop
   1796     } // if channel is enabled
   1797   } // channel_i loop
   1798 
   1799 #ifdef RX_EYE_CHECK
   1800   // perform an eye check
   1801   for (side_y=B; side_y<=T; side_y++)
   1802   {
   1803     for (side_x=L; side_x<=R; side_x++)
   1804     {
   1805 
   1806       post_code(0x07, (0x30 + (side_y * 2) + (side_x)));
   1807 
   1808       // update the settings for the eye check
   1809       for (channel_i=0; channel_i<NUM_CHANNELS; channel_i++)
   1810       {
   1811         if (mrc_params->channel_enables & (1<<channel_i))
   1812         {
   1813           for (rank_i=0; rank_i<NUM_RANKS; rank_i++)
   1814           {
   1815             if (mrc_params->rank_enables & (1<<rank_i))
   1816             {
   1817               for (bl_i=0; bl_i<(NUM_BYTE_LANES/bl_divisor); bl_i++)
   1818               {
   1819                 if (side_x == L)
   1820                 {
   1821                   set_rdqs(channel_i, rank_i, bl_i, (x_center[channel_i][rank_i][bl_i] - (MIN_RDQS_EYE / 2)));
   1822                 }
   1823                 else
   1824                 {
   1825                   set_rdqs(channel_i, rank_i, bl_i, (x_center[channel_i][rank_i][bl_i] + (MIN_RDQS_EYE / 2)));
   1826                 }
   1827                 if (side_y == B)
   1828                 {
   1829                   set_vref(channel_i, bl_i, (y_center[channel_i][bl_i] - (MIN_VREF_EYE / 2)));
   1830                 }
   1831                 else
   1832                 {
   1833                   set_vref(channel_i, bl_i, (y_center[channel_i][bl_i] + (MIN_VREF_EYE / 2)));
   1834                 }
   1835               } // bl_i loop
   1836             } // if rank is enabled
   1837           } // rank_i loop
   1838         } // if channel is enabled
   1839       } // channel_i loop
   1840 
   1841       // request HTE reconfiguration
   1842       mrc_params->hte_setup = 1;
   1843 
   1844       // check the eye
   1845       if (check_bls_ex( mrc_params, address) & 0xFF)
   1846       {
   1847         // one or more byte lanes failed
   1848         post_code(0xEE, (0x74 + (side_x * 2) + (side_y)));
   1849       }
   1850     } // side_x loop
   1851   } // side_y loop
   1852 #endif // RX_EYE_CHECK
   1853 
   1854   post_code(0x07, 0x40);
   1855 
   1856   // set final placements
   1857   for (channel_i = 0; channel_i < NUM_CHANNELS; channel_i++)
   1858   {
   1859     if (mrc_params->channel_enables & (1 << channel_i))
   1860     {
   1861       for (rank_i = 0; rank_i < NUM_RANKS; rank_i++)
   1862       {
   1863         if (mrc_params->rank_enables & (1 << rank_i))
   1864         {
   1865 #ifdef R2R_SHARING
   1866           // increment "num_ranks_enabled"
   1867           num_ranks_enabled++;
   1868 #endif // R2R_SHARING
   1869           for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i++)
   1870           {
   1871             // x_coordinate:
   1872 #ifdef R2R_SHARING
   1873             final_delay[channel_i][bl_i] += x_center[channel_i][rank_i][bl_i];
   1874             set_rdqs(channel_i, rank_i, bl_i, ((final_delay[channel_i][bl_i]) / num_ranks_enabled));
   1875 #else
   1876             set_rdqs(channel_i, rank_i, bl_i, x_center[channel_i][rank_i][bl_i]);
   1877 #endif // R2R_SHARING
   1878             // y_coordinate:
   1879             set_vref(channel_i, bl_i, y_center[channel_i][bl_i]);
   1880           } // bl_i loop
   1881         } // if rank is enabled
   1882       } // rank_i loop
   1883     } // if channel is enabled
   1884   } // channel_i loop
   1885 #endif // BACKUP_RDQS
   1886   LEAVEFN();
   1887   return;
   1888 }
   1889 
   1890 // wr_train:
   1891 // POST_CODE[major] == 0x08
   1892 //
   1893 // This function will perform the WRITE TRAINING Algorithm on all channels/ranks/byte_lanes simultaneously to minimize execution time.
   1894 // The idea here is to train the WDQ timings to achieve maximum WRITE margins.
   1895 // The algorithm will start with WDQ at the current WDQ setting (tracks WDQS in WR_LVL) +/- 32 PIs (+/- 1/4 CLK) and collapse the eye until all data patterns pass.
   1896 // This is because WDQS will be aligned to WCLK by the Write Leveling algorithm and WDQ will only ever have a 1/2 CLK window of validity.
   1897 static void wr_train(
   1898     MRCParams_t *mrc_params)
   1899 {
   1900 
   1901 #define WDQ_STEP 1 // how many WDQ codes to jump while margining
   1902 #define L 0 // LEFT side loop value definition
   1903 #define R 1 // RIGHT side loop value definition
   1904 
   1905   uint8_t channel_i; // channel counter
   1906   uint8_t rank_i; // rank counter
   1907   uint8_t bl_i; // byte lane counter
   1908   uint8_t bl_divisor = (mrc_params->channel_width == x16) ? 2 : 1; // byte lane divisor
   1909 #ifdef BACKUP_WDQ
   1910 #else
   1911   uint8_t side_i; // LEFT/RIGHT side indicator (0=L, 1=R)
   1912   uint32_t tempD; // temporary DWORD
   1913   uint32_t delay[2/*side_i*/][NUM_CHANNELS][NUM_RANKS][NUM_BYTE_LANES]; // 2 arrays, for L & R side passing delays
   1914   uint32_t address; // target address for "check_bls_ex()"
   1915   uint32_t result; // result of "check_bls_ex()"
   1916   uint32_t bl_mask; // byte lane mask for "result" checking
   1917 #ifdef R2R_SHARING
   1918   uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES]; // used to find placement for rank2rank sharing configs
   1919   uint32_t num_ranks_enabled = 0; // used to find placement for rank2rank sharing configs
   1920 #endif // R2R_SHARING
   1921 #endif // BACKUP_WDQ
   1922 
   1923   // wr_train starts
   1924   post_code(0x08, 0x00);
   1925 
   1926   ENTERFN();
   1927 
   1928 #ifdef BACKUP_WDQ
   1929   for (channel_i=0; channel_i<NUM_CHANNELS; channel_i++)
   1930   {
   1931     if (mrc_params->channel_enables & (1<<channel_i))
   1932     {
   1933       for (rank_i=0; rank_i<NUM_RANKS; rank_i++)
   1934       {
   1935         if (mrc_params->rank_enables & (1<<rank_i))
   1936         {
   1937           for (bl_i=0; bl_i<(NUM_BYTE_LANES/bl_divisor); bl_i++)
   1938           {
   1939             set_wdq(channel_i, rank_i, bl_i, ddr_wdq[PLATFORM_ID]);
   1940           } // bl_i loop
   1941         } // if rank is enabled
   1942       } // rank_i loop
   1943     } // if channel is enabled
   1944   } // channel_i loop
   1945 #else
   1946   // initialise "delay"
   1947   for (channel_i = 0; channel_i < NUM_CHANNELS; channel_i++)
   1948   {
   1949     if (mrc_params->channel_enables & (1 << channel_i))
   1950     {
   1951       for (rank_i = 0; rank_i < NUM_RANKS; rank_i++)
   1952       {
   1953         if (mrc_params->rank_enables & (1 << rank_i))
   1954         {
   1955           for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i++)
   1956           {
   1957             // want to start with WDQ = (WDQS - QRTR_CLK) +/- QRTR_CLK
   1958             tempD = get_wdqs(channel_i, rank_i, bl_i) - QRTR_CLK;
   1959             delay[L][channel_i][rank_i][bl_i] = tempD - QRTR_CLK;
   1960             delay[R][channel_i][rank_i][bl_i] = tempD + QRTR_CLK;
   1961           } // bl_i loop
   1962         } // if rank is enabled
   1963       } // rank_i loop
   1964     } // if channel is enabled
   1965   } // channel_i loop
   1966 
   1967   // initialise other variables
   1968   bl_mask = byte_lane_mask(mrc_params);
   1969   address = get_addr(mrc_params, 0, 0);
   1970 
   1971 #ifdef R2R_SHARING
   1972   // need to set "final_delay[][]" elements to "0"
   1973   memset((void *) (final_delay), 0x00, (size_t) sizeof(final_delay));
   1974 #endif // R2R_SHARING
   1975 
   1976   // start algorithm on the LEFT side and train each channel/bl until no failures are observed, then repeat for the RIGHT side.
   1977   for (side_i = L; side_i <= R; side_i++)
   1978   {
   1979     post_code(0x08, (0x10 + (side_i)));
   1980 
   1981     // set starting values
   1982     for (channel_i = 0; channel_i < NUM_CHANNELS; channel_i++)
   1983     {
   1984       if (mrc_params->channel_enables & (1 << channel_i))
   1985       {
   1986         for (rank_i = 0; rank_i < NUM_RANKS; rank_i++)
   1987         {
   1988           if (mrc_params->rank_enables & (1 << rank_i))
   1989           {
   1990             for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i++)
   1991             {
   1992               set_wdq(channel_i, rank_i, bl_i, delay[side_i][channel_i][rank_i][bl_i]);
   1993             } // bl_i loop
   1994           } // if rank is enabled
   1995         } // rank_i loop
   1996       } // if channel is enabled
   1997     } // channel_i loop
   1998 
   1999     // find passing values
   2000     for (channel_i = 0; channel_i < NUM_CHANNELS; channel_i++)
   2001     {
   2002       if (mrc_params->channel_enables & (0x1 << channel_i))
   2003       {
   2004         for (rank_i = 0; rank_i < NUM_RANKS; rank_i++)
   2005         {
   2006           if (mrc_params->rank_enables & (0x1 << rank_i))
   2007           {
   2008             // get an address in the target channel/rank
   2009             address = get_addr(mrc_params, channel_i, rank_i);
   2010 
   2011             // request HTE reconfiguration
   2012             mrc_params->hte_setup = 1;
   2013 
   2014             // check the settings
   2015             do
   2016             {
   2017 
   2018 #ifdef SIM
   2019               // need restore memory to idle state as write can be in bad sync
   2020               dram_init_command (DCMD_PREA(rank_i));
   2021 #endif
   2022 
   2023               // result[07:00] == failing byte lane (MAX 8)
   2024               result = check_bls_ex( mrc_params, address);
   2025               // check for failures
   2026               if (result & 0xFF)
   2027               {
   2028                 // at least 1 byte lane failed
   2029                 for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i++)
   2030                 {
   2031                   if (result & (bl_mask << bl_i))
   2032                   {
   2033                     if (side_i == L)
   2034                     {
   2035                       delay[L][channel_i][rank_i][bl_i] += WDQ_STEP;
   2036                     }
   2037                     else
   2038                     {
   2039                       delay[R][channel_i][rank_i][bl_i] -= WDQ_STEP;
   2040                     }
   2041                     // check for algorithm failure
   2042                     if (delay[L][channel_i][rank_i][bl_i] != delay[R][channel_i][rank_i][bl_i])
   2043                     {
   2044                       // margin available, update delay setting
   2045                       set_wdq(channel_i, rank_i, bl_i, delay[side_i][channel_i][rank_i][bl_i]);
   2046                     }
   2047                     else
   2048                     {
   2049                       // no margin available, notify the user and halt
   2050                       training_message(channel_i, rank_i, bl_i);
   2051                       post_code(0xEE, (0x80 + side_i));
   2052                     }
   2053                   } // if bl_i failed
   2054                 } // bl_i loop
   2055               } // at least 1 byte lane failed
   2056             } while (result & 0xFF); // stop when all byte lanes pass
   2057           } // if rank is enabled
   2058         } // rank_i loop
   2059       } // if channel is enabled
   2060     } // channel_i loop
   2061   } // side_i loop
   2062 
   2063   // program WDQ to the middle of passing window
   2064   for (channel_i = 0; channel_i < NUM_CHANNELS; channel_i++)
   2065   {
   2066     if (mrc_params->channel_enables & (1 << channel_i))
   2067     {
   2068       for (rank_i = 0; rank_i < NUM_RANKS; rank_i++)
   2069       {
   2070         if (mrc_params->rank_enables & (1 << rank_i))
   2071         {
   2072 #ifdef R2R_SHARING
   2073           // increment "num_ranks_enabled"
   2074           num_ranks_enabled++;
   2075 #endif // R2R_SHARING
   2076           for (bl_i = 0; bl_i < (NUM_BYTE_LANES / bl_divisor); bl_i++)
   2077           {
   2078 
   2079             DPF(D_INFO, "WDQ eye rank%d lane%d : %d-%d\n", rank_i, bl_i,
   2080                 delay[L][channel_i][rank_i][bl_i],
   2081                 delay[R][channel_i][rank_i][bl_i]);
   2082 
   2083             tempD = (delay[R][channel_i][rank_i][bl_i] + delay[L][channel_i][rank_i][bl_i]) / 2;
   2084 
   2085 #ifdef R2R_SHARING
   2086             final_delay[channel_i][bl_i] += tempD;
   2087             set_wdq(channel_i, rank_i, bl_i, ((final_delay[channel_i][bl_i]) / num_ranks_enabled));
   2088 #else
   2089             set_wdq(channel_i, rank_i, bl_i, tempD);
   2090 #endif // R2R_SHARING
   2091 
   2092           } // bl_i loop
   2093         } // if rank is enabled
   2094       } // rank_i loop
   2095     } // if channel is enabled
   2096   } // channel_i loop
   2097 #endif // BACKUP_WDQ
   2098   LEAVEFN();
   2099   return;
   2100 }
   2101 
   2102 // Wrapper for jedec initialisation routine
   2103 static void perform_jedec_init(
   2104     MRCParams_t *mrc_params)
   2105 {
   2106   jedec_init(mrc_params, 0);
   2107 }
   2108 
   2109 // Configure DDRPHY for Auto-Refresh, Periodic Compensations,
   2110 // Dynamic Diff-Amp, ZQSPERIOD, Auto-Precharge, CKE Power-Down
   2111 static void set_auto_refresh(
   2112     MRCParams_t *mrc_params)
   2113 {
   2114   uint32_t channel_i;
   2115   uint32_t rank_i;
   2116   uint32_t bl_i;
   2117   uint32_t bl_divisor = /*(mrc_params->channel_width==x16)?2:*/1;
   2118   uint32_t tempD;
   2119 
   2120   ENTERFN();
   2121 
   2122   // enable Auto-Refresh, Periodic Compensations, Dynamic Diff-Amp, ZQSPERIOD, Auto-Precharge, CKE Power-Down
   2123   for (channel_i = 0; channel_i < NUM_CHANNELS; channel_i++)
   2124   {
   2125     if (mrc_params->channel_enables & (1 << channel_i))
   2126     {
   2127       // Enable Periodic RCOMPS
   2128       isbM32m(DDRPHY, CMPCTRL, (BIT1), (BIT1));
   2129 
   2130 
   2131       // Enable Dynamic DiffAmp & Set Read ODT Value
   2132       switch (mrc_params->rd_odt_value)
   2133       {
   2134         case 0: tempD = 0x3F; break;  // OFF
   2135         default: tempD = 0x00; break; // Auto
   2136       } // rd_odt_value switch
   2137 
   2138       for (bl_i=0; bl_i<((NUM_BYTE_LANES/bl_divisor)/2); bl_i++)
   2139       {
   2140         isbM32m(DDRPHY, (B0OVRCTL + (bl_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)),
   2141             ((0x00<<16)|(tempD<<10)),
   2142             ((BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)|(BIT15|BIT14|BIT13|BIT12|BIT11|BIT10))); // Override: DIFFAMP, ODT
   2143 
   2144         isbM32m(DDRPHY, (B1OVRCTL + (bl_i * DDRIODQ_BL_OFFSET) + (channel_i * DDRIODQ_CH_OFFSET)),
   2145             ((0x00<<16)|(tempD<<10)),
   2146             ((BIT21|BIT20|BIT19|BIT18|BIT17|BIT16)|(BIT15|BIT14|BIT13|BIT12|BIT11|BIT10)));// Override: DIFFAMP, ODT
   2147       } // bl_i loop
   2148 
   2149       // Issue ZQCS command
   2150       for (rank_i = 0; rank_i < NUM_RANKS; rank_i++)
   2151       {
   2152         if (mrc_params->rank_enables & (1 << rank_i))
   2153         {
   2154           dram_init_command(DCMD_ZQCS(rank_i));
   2155         } // if rank_i enabled
   2156       } // rank_i loop
   2157 
   2158     } // if channel_i enabled
   2159   } // channel_i loop
   2160 
   2161   clear_pointers();
   2162 
   2163   LEAVEFN();
   2164   return;
   2165 }
   2166 
   2167 // Depending on configuration enables ECC support.
   2168 // Available memory size is decresed, and updated with 0s
   2169 // in order to clear error status. Address mode 2 forced.
   2170 static void ecc_enable(
   2171     MRCParams_t *mrc_params)
   2172 {
   2173   RegDRP Drp;
   2174   RegDSCH Dsch;
   2175   RegDECCCTRL Ctr;
   2176 
   2177   if (mrc_params->ecc_enables == 0) return;
   2178 
   2179   ENTERFN();
   2180 
   2181   // Configuration required in ECC mode
   2182   Drp.raw = isbR32m(MCU, DRP);
   2183   Drp.field.addressMap = 2;
   2184   Drp.field.split64 = 1;
   2185   isbW32m(MCU, DRP, Drp.raw);
   2186 
   2187   // Disable new request bypass
   2188   Dsch.raw = isbR32m(MCU, DSCH);
   2189   Dsch.field.NEWBYPDIS = 1;
   2190   isbW32m(MCU, DSCH, Dsch.raw);
   2191 
   2192   // Enable ECC
   2193   Ctr.raw = 0;
   2194   Ctr.field.SBEEN = 1;
   2195   Ctr.field.DBEEN = 1;
   2196   Ctr.field.ENCBGEN = 1;
   2197   isbW32m(MCU, DECCCTRL, Ctr.raw);
   2198 
   2199 #ifdef SIM
   2200   // Read back to be sure writing took place
   2201   Ctr.raw = isbR32m(MCU, DECCCTRL);
   2202 #endif
   2203 
   2204   // Assume 8 bank memory, one bank is gone for ECC
   2205   mrc_params->mem_size -= mrc_params->mem_size / 8;
   2206 
   2207   // For S3 resume memory content has to be preserved
   2208   if (mrc_params->boot_mode != bmS3)
   2209   {
   2210     select_hte(mrc_params);
   2211     HteMemInit(mrc_params, MrcMemInit, MrcHaltHteEngineOnError);
   2212     select_memory_manager(mrc_params);
   2213   }
   2214 
   2215   LEAVEFN();
   2216   return;
   2217 }
   2218 
   2219 // Lock MCU registers at the end of initialisation sequence.
   2220 static void lock_registers(
   2221     MRCParams_t *mrc_params)
   2222 {
   2223   RegDCO Dco;
   2224 
   2225   ENTERFN();
   2226 
   2227   Dco.raw = isbR32m(MCU, DCO);
   2228   Dco.field.PMIDIS = 0;          //0 - PRI enabled
   2229   Dco.field.PMICTL = 0;          //0 - PRI owned by MEMORY_MANAGER
   2230   Dco.field.DRPLOCK = 1;
   2231   Dco.field.REUTLOCK = 1;
   2232   isbW32m(MCU, DCO, Dco.raw);
   2233 
   2234   LEAVEFN();
   2235 
   2236 }
   2237 
   2238 #ifdef MRC_SV
   2239 
   2240 // cache write back invalidate
   2241 static void asm_wbinvd(void)
   2242 {
   2243 #if defined (SIM) || defined (GCC)
   2244   asm(
   2245     "wbinvd;"
   2246   );
   2247 #else
   2248   __asm wbinvd;
   2249 #endif
   2250 }
   2251 
   2252 // cache invalidate
   2253 static void asm_invd(void)
   2254 {
   2255 #if defined (SIM) || defined (GCC)
   2256   asm(
   2257       "invd;"
   2258   );
   2259 #else
   2260   __asm invd;
   2261 #endif
   2262 }
   2263 
   2264 
   2265 static void cpu_read(void)
   2266 {
   2267   uint32_t adr, dat, limit;
   2268 
   2269   asm_invd();
   2270 
   2271   limit = 8 * 1024;
   2272   for (adr = 0; adr < limit; adr += 4)
   2273   {
   2274     dat = *(uint32_t*) adr;
   2275     if ((adr & 0x0F) == 0)
   2276     {
   2277       DPF(D_INFO, "\n%x : ", adr);
   2278     }
   2279     DPF(D_INFO, "%x ", dat);
   2280   }
   2281   DPF(D_INFO, "\n");
   2282 
   2283   DPF(D_INFO, "CPU read done\n");
   2284 }
   2285 
   2286 
   2287 static void cpu_write(void)
   2288 {
   2289   uint32_t adr, limit;
   2290 
   2291   limit = 8 * 1024;
   2292   for (adr = 0; adr < limit; adr += 4)
   2293   {
   2294     *(uint32_t*) adr = 0xDEAD0000 + adr;
   2295   }
   2296 
   2297   asm_wbinvd();
   2298 
   2299   DPF(D_INFO, "CPU write done\n");
   2300 }
   2301 
   2302 
   2303 static void cpu_memory_test(
   2304     MRCParams_t *mrc_params)
   2305 {
   2306   uint32_t result = 0;
   2307   uint32_t val, dat, adr, adr0, step, limit;
   2308   uint64_t my_tsc;
   2309 
   2310   ENTERFN();
   2311 
   2312   asm_invd();
   2313 
   2314   adr0 = 1 * 1024 * 1024;
   2315   limit = 256 * 1024 * 1024;
   2316 
   2317   for (step = 0; step <= 4; step++)
   2318   {
   2319     DPF(D_INFO, "Mem test step %d starting from %xh\n", step, adr0);
   2320 
   2321     my_tsc = read_tsc();
   2322     for (adr = adr0; adr < limit; adr += sizeof(uint32_t))
   2323     {
   2324       if (step == 0)      dat = adr;
   2325       else if (step == 1) dat = (1 << ((adr >> 2) & 0x1f));
   2326       else if (step == 2) dat = ~(1 << ((adr >> 2) & 0x1f));
   2327       else if (step == 3) dat = 0x5555AAAA;
   2328       else if (step == 4) dat = 0xAAAA5555;
   2329 
   2330       *(uint32_t*) adr = dat;
   2331     }
   2332     DPF(D_INFO, "Write time %llXh\n", read_tsc() - my_tsc);
   2333 
   2334     my_tsc = read_tsc();
   2335     for (adr = adr0; adr < limit; adr += sizeof(uint32_t))
   2336     {
   2337       if (step == 0)      dat = adr;
   2338       else if (step == 1) dat = (1 << ((adr >> 2) & 0x1f));
   2339       else if (step == 2) dat = ~(1 << ((adr >> 2) & 0x1f));
   2340       else if (step == 3) dat = 0x5555AAAA;
   2341       else if (step == 4) dat = 0xAAAA5555;
   2342 
   2343       val = *(uint32_t*) adr;
   2344 
   2345       if (val != dat)
   2346       {
   2347         DPF(D_INFO, "%x vs. %x@%x\n", dat, val, adr);
   2348         result = adr|BIT31;
   2349       }
   2350     }
   2351     DPF(D_INFO, "Read time %llXh\n", read_tsc() - my_tsc);
   2352   }
   2353 
   2354   DPF( D_INFO, "Memory test result %x\n", result);
   2355   LEAVEFN();
   2356 }
   2357 #endif // MRC_SV
   2358 
   2359 
   2360 // Execute memory test, if error dtected it is
   2361 // indicated in mrc_params->status.
   2362 static void memory_test(
   2363   MRCParams_t *mrc_params)
   2364 {
   2365   uint32_t result = 0;
   2366 
   2367   ENTERFN();
   2368 
   2369   select_hte(mrc_params);
   2370   result = HteMemInit(mrc_params, MrcMemTest, MrcHaltHteEngineOnError);
   2371   select_memory_manager(mrc_params);
   2372 
   2373   DPF(D_INFO, "Memory test result %x\n", result);
   2374   mrc_params->status = ((result == 0) ? MRC_SUCCESS : MRC_E_MEMTEST);
   2375   LEAVEFN();
   2376 }
   2377 
   2378 
   2379 // Force same timings as with backup settings
   2380 static void static_timings(
   2381   MRCParams_t *mrc_params)
   2382 
   2383 {
   2384   uint8_t ch, rk, bl;
   2385 
   2386   for (ch = 0; ch < NUM_CHANNELS; ch++)
   2387   {
   2388     for (rk = 0; rk < NUM_RANKS; rk++)
   2389     {
   2390       for (bl = 0; bl < NUM_BYTE_LANES; bl++)
   2391       {
   2392         set_rcvn(ch, rk, bl, 498);  // RCVN
   2393         set_rdqs(ch, rk, bl,  24);  // RDQS
   2394         set_wdqs(ch, rk, bl, 292);  // WDQS
   2395         set_wdq( ch, rk, bl, 260);  // WDQ
   2396         if (rk == 0)
   2397         {
   2398           set_vref(ch, bl, 32); // VREF (RANK0 only)
   2399         }
   2400       }
   2401       set_wctl(ch, rk, 217); // WCTL
   2402     }
   2403     set_wcmd(ch, 220); // WCMD
   2404   }
   2405 
   2406   return;
   2407 }
   2408 
   2409 //
   2410 // Initialise system memory.
   2411 //
   2412 void MemInit(
   2413   MRCParams_t *mrc_params)
   2414 {
   2415   static const MemInit_t init[] =
   2416   {
   2417     { 0x0101, bmCold|bmFast|bmWarm|bmS3, clear_self_refresh       }, //0
   2418     { 0x0200, bmCold|bmFast|bmWarm|bmS3, prog_ddr_timing_control  }, //1  initialise the MCU
   2419     { 0x0103, bmCold|bmFast            , prog_decode_before_jedec }, //2
   2420     { 0x0104, bmCold|bmFast            , perform_ddr_reset        }, //3
   2421     { 0x0300, bmCold|bmFast       |bmS3, ddrphy_init              }, //4  initialise the DDRPHY
   2422     { 0x0400, bmCold|bmFast            , perform_jedec_init       }, //5  perform JEDEC initialisation of DRAMs
   2423     { 0x0105, bmCold|bmFast            , set_ddr_init_complete    }, //6
   2424     { 0x0106,        bmFast|bmWarm|bmS3, restore_timings          }, //7
   2425     { 0x0106, bmCold                   , default_timings          }, //8
   2426     { 0x0500, bmCold                   , rcvn_cal                 }, //9  perform RCVN_CAL algorithm
   2427     { 0x0600, bmCold                   , wr_level                 }, //10  perform WR_LEVEL algorithm
   2428     { 0x0120, bmCold                   , prog_page_ctrl           }, //11
   2429     { 0x0700, bmCold                   , rd_train                 }, //12  perform RD_TRAIN algorithm
   2430     { 0x0800, bmCold                   , wr_train                 }, //13  perform WR_TRAIN algorithm
   2431     { 0x010B, bmCold                   , store_timings            }, //14
   2432     { 0x010C, bmCold|bmFast|bmWarm|bmS3, enable_scrambling        }, //15
   2433     { 0x010D, bmCold|bmFast|bmWarm|bmS3, prog_ddr_control         }, //16
   2434     { 0x010E, bmCold|bmFast|bmWarm|bmS3, prog_dra_drb             }, //17
   2435     { 0x010F,               bmWarm|bmS3, perform_wake             }, //18
   2436     { 0x0110, bmCold|bmFast|bmWarm|bmS3, change_refresh_period    }, //19
   2437     { 0x0111, bmCold|bmFast|bmWarm|bmS3, set_auto_refresh         }, //20
   2438     { 0x0112, bmCold|bmFast|bmWarm|bmS3, ecc_enable               }, //21
   2439     { 0x0113, bmCold|bmFast            , memory_test              }, //22
   2440     { 0x0114, bmCold|bmFast|bmWarm|bmS3, lock_registers           }  //23 set init done
   2441   };
   2442 
   2443   uint32_t i;
   2444 
   2445   ENTERFN();
   2446 
   2447   DPF(D_INFO, "Meminit build %s %s\n", __DATE__, __TIME__);
   2448 
   2449   // MRC started
   2450   post_code(0x01, 0x00);
   2451 
   2452   if (mrc_params->boot_mode != bmCold)
   2453   {
   2454     if (mrc_params->ddr_speed != mrc_params->timings.ddr_speed)
   2455     {
   2456       // full training required as frequency changed
   2457       mrc_params->boot_mode = bmCold;
   2458     }
   2459   }
   2460 
   2461   for (i = 0; i < MCOUNT(init); i++)
   2462   {
   2463     uint64_t my_tsc;
   2464 
   2465 #ifdef MRC_SV
   2466     if (mrc_params->menu_after_mrc && i > 14)
   2467     {
   2468       uint8_t ch;
   2469 
   2470       mylop:
   2471 
   2472       DPF(D_INFO, "-- c - continue --\n");
   2473       DPF(D_INFO, "-- j - move to jedec init --\n");
   2474       DPF(D_INFO, "-- m - memory test --\n");
   2475       DPF(D_INFO, "-- r - cpu read --\n");
   2476       DPF(D_INFO, "-- w - cpu write --\n");
   2477       DPF(D_INFO, "-- b - hte base test --\n");
   2478       DPF(D_INFO, "-- g - hte extended test --\n");
   2479 
   2480       ch = mgetc();
   2481       switch (ch)
   2482       {
   2483       case 'c':
   2484         break;
   2485       case 'j':  //move to jedec init
   2486         i = 5;
   2487         break;
   2488 
   2489       case 'M':
   2490       case 'N':
   2491         {
   2492     uint32_t n, res, cnt=0;
   2493 
   2494     for(n=0; mgetch()==0; n++)
   2495     {
   2496       if( ch == 'M' || n % 256 == 0)
   2497       {
   2498         DPF(D_INFO, "n=%d e=%d\n", n, cnt);
   2499       }
   2500 
   2501       res = 0;
   2502 
   2503       if( ch == 'M')
   2504       {
   2505         memory_test(mrc_params);
   2506         res |= mrc_params->status;
   2507             }
   2508 
   2509       mrc_params->hte_setup = 1;
   2510             res |= check_bls_ex(mrc_params, 0x00000000);
   2511             res |= check_bls_ex(mrc_params, 0x00000000);
   2512             res |= check_bls_ex(mrc_params, 0x00000000);
   2513             res |= check_bls_ex(mrc_params, 0x00000000);
   2514 
   2515       if( mrc_params->rank_enables & 2)
   2516       {
   2517         mrc_params->hte_setup = 1;
   2518               res |= check_bls_ex(mrc_params, 0x40000000);
   2519               res |= check_bls_ex(mrc_params, 0x40000000);
   2520               res |= check_bls_ex(mrc_params, 0x40000000);
   2521               res |= check_bls_ex(mrc_params, 0x40000000);
   2522       }
   2523 
   2524       if( res != 0)
   2525       {
   2526               DPF(D_INFO, "###########\n");
   2527               DPF(D_INFO, "#\n");
   2528               DPF(D_INFO, "# Error count %d\n", ++cnt);
   2529               DPF(D_INFO, "#\n");
   2530               DPF(D_INFO, "###########\n");
   2531       }
   2532 
   2533     } // for
   2534 
   2535           select_memory_manager(mrc_params);
   2536   }
   2537         goto mylop;
   2538       case 'm':
   2539         memory_test(mrc_params);
   2540         goto mylop;
   2541       case 'n':
   2542         cpu_memory_test(mrc_params);
   2543         goto mylop;
   2544 
   2545       case 'l':
   2546         ch = mgetc();
   2547         if (ch <= '9') DpfPrintMask ^= (ch - '0') << 3;
   2548         DPF(D_INFO, "Log mask %x\n", DpfPrintMask);
   2549         goto mylop;
   2550       case 'p':
   2551         print_timings(mrc_params);
   2552         goto mylop;
   2553       case 'R':
   2554         rd_train(mrc_params);
   2555         goto mylop;
   2556       case 'W':
   2557         wr_train(mrc_params);
   2558         goto mylop;
   2559 
   2560       case 'r':
   2561         cpu_read();
   2562         goto mylop;
   2563       case 'w':
   2564         cpu_write();
   2565         goto mylop;
   2566 
   2567       case 'g':
   2568         {
   2569         uint32_t result;
   2570         select_hte(mrc_params);
   2571         mrc_params->hte_setup = 1;
   2572         result = check_bls_ex(mrc_params, 0);
   2573         DPF(D_INFO, "Extended test result %x\n", result);
   2574         select_memory_manager(mrc_params);
   2575         }
   2576         goto mylop;
   2577       case 'b':
   2578         {
   2579         uint32_t result;
   2580         select_hte(mrc_params);
   2581         mrc_params->hte_setup = 1;
   2582         result = check_rw_coarse(mrc_params, 0);
   2583         DPF(D_INFO, "Base test result %x\n", result);
   2584         select_memory_manager(mrc_params);
   2585         }
   2586         goto mylop;
   2587       case 'B':
   2588         select_hte(mrc_params);
   2589         HteMemOp(0x2340, 1, 1);
   2590         select_memory_manager(mrc_params);
   2591         goto mylop;
   2592 
   2593       case '3':
   2594         {
   2595         RegDPMC0 DPMC0reg;
   2596 
   2597         DPF( D_INFO, "===>> Start suspend\n");
   2598         isbR32m(MCU, DSTAT);
   2599 
   2600         DPMC0reg.raw = isbR32m(MCU, DPMC0);
   2601         DPMC0reg.field.DYNSREN = 0;
   2602         DPMC0reg.field.powerModeOpCode = 0x05;    // Disable Master DLL
   2603         isbW32m(MCU, DPMC0, DPMC0reg.raw);
   2604 
   2605         // Should be off for negative test case verification
   2606         #if 1
   2607         Wr32(MMIO, PCIADDR(0,0,0,SB_PACKET_REG),
   2608             (uint32_t)SB_COMMAND(SB_SUSPEND_CMND_OPCODE, MCU, 0));
   2609         #endif
   2610 
   2611         DPF( D_INFO, "press key\n");
   2612         mgetc();
   2613         DPF( D_INFO, "===>> Start resume\n");
   2614         isbR32m(MCU, DSTAT);
   2615 
   2616         mrc_params->boot_mode = bmS3;
   2617         i = 0;
   2618         }
   2619 
   2620       } // switch
   2621 
   2622     } // if( menu
   2623 #endif //MRC_SV
   2624 
   2625     if (mrc_params->boot_mode & init[i].boot_path)
   2626     {
   2627       uint8_t major = init[i].post_code >> 8 & 0xFF;
   2628       uint8_t minor = init[i].post_code >> 0 & 0xFF;
   2629       post_code(major, minor);
   2630 
   2631       my_tsc = read_tsc();
   2632       init[i].init_fn(mrc_params);
   2633       DPF(D_TIME, "Execution time %llX", read_tsc() - my_tsc);
   2634     }
   2635   }
   2636 
   2637   // display the timings
   2638   print_timings(mrc_params);
   2639 
   2640   // MRC is complete.
   2641   post_code(0x01, 0xFF);
   2642 
   2643   LEAVEFN();
   2644   return;
   2645 }
   2646