Home | History | Annotate | Download | only in a38x
      1 // SPDX-License-Identifier: GPL-2.0
      2 /*
      3  * Copyright (C) Marvell International Ltd. and its affiliates
      4  */
      5 
      6 #include "ddr3_init.h"
      7 #include "mv_ddr_common.h"
      8 
      9 #define GET_CS_FROM_MASK(mask)	(cs_mask2_num[mask])
     10 #define CS_CBE_VALUE(cs_num)	(cs_cbe_reg[cs_num])
     11 
     12 u32 window_mem_addr = 0;
     13 u32 phy_reg0_val = 0;
     14 u32 phy_reg1_val = 8;
     15 u32 phy_reg2_val = 0;
     16 u32 phy_reg3_val = PARAM_UNDEFINED;
     17 enum hws_ddr_freq low_freq = DDR_FREQ_LOW_FREQ;
     18 enum hws_ddr_freq medium_freq;
     19 u32 debug_dunit = 0;
     20 u32 odt_additional = 1;
     21 u32 *dq_map_table = NULL;
     22 
     23 /* in case of ddr4 do not run ddr3_tip_write_additional_odt_setting function - mc odt always 'on'
     24  * in ddr4 case the terminations are rttWR and rttPARK and the odt must be always 'on' 0x1498 = 0xf
     25  */
     26 u32 odt_config = 1;
     27 
     28 u32 nominal_avs;
     29 u32 extension_avs;
     30 
     31 u32 is_pll_before_init = 0, is_adll_calib_before_init = 1, is_dfs_in_init = 0;
     32 u32 dfs_low_freq;
     33 
     34 u32 g_rtt_nom_cs0, g_rtt_nom_cs1;
     35 u8 calibration_update_control;	/* 2 external only, 1 is internal only */
     36 
     37 enum hws_result training_result[MAX_STAGE_LIMIT][MAX_INTERFACE_NUM];
     38 enum auto_tune_stage training_stage = INIT_CONTROLLER;
     39 u32 finger_test = 0, p_finger_start = 11, p_finger_end = 64,
     40 	n_finger_start = 11, n_finger_end = 64,
     41 	p_finger_step = 3, n_finger_step = 3;
     42 u32 clamp_tbl[] = { 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3 };
     43 
     44 /* Initiate to 0xff, this variable is define by user in debug mode */
     45 u32 mode_2t = 0xff;
     46 u32 xsb_validate_type = 0;
     47 u32 xsb_validation_base_address = 0xf000;
     48 u32 first_active_if = 0;
     49 u32 dfs_low_phy1 = 0x1f;
     50 u32 multicast_id = 0;
     51 int use_broadcast = 0;
     52 struct hws_tip_freq_config_info *freq_info_table = NULL;
     53 u8 is_cbe_required = 0;
     54 u32 debug_mode = 0;
     55 u32 delay_enable = 0;
     56 int rl_mid_freq_wa = 0;
     57 
     58 u32 effective_cs = 0;
     59 
     60 u32 vref_init_val = 0x4;
     61 u32 ck_delay = PARAM_UNDEFINED;
     62 
     63 /* Design guidelines parameters */
     64 u32 g_zpri_data = PARAM_UNDEFINED; /* controller data - P drive strength */
     65 u32 g_znri_data = PARAM_UNDEFINED; /* controller data - N drive strength */
     66 u32 g_zpri_ctrl = PARAM_UNDEFINED; /* controller C/A - P drive strength */
     67 u32 g_znri_ctrl = PARAM_UNDEFINED; /* controller C/A - N drive strength */
     68 
     69 u32 g_zpodt_data = PARAM_UNDEFINED; /* controller data - P ODT */
     70 u32 g_znodt_data = PARAM_UNDEFINED; /* controller data - N ODT */
     71 u32 g_zpodt_ctrl = PARAM_UNDEFINED; /* controller data - P ODT */
     72 u32 g_znodt_ctrl = PARAM_UNDEFINED; /* controller data - N ODT */
     73 
     74 u32 g_odt_config = PARAM_UNDEFINED;
     75 u32 g_rtt_nom = PARAM_UNDEFINED;
     76 u32 g_rtt_wr = PARAM_UNDEFINED;
     77 u32 g_dic = PARAM_UNDEFINED;
     78 u32 g_rtt_park = PARAM_UNDEFINED;
     79 
     80 u32 mask_tune_func = (SET_MEDIUM_FREQ_MASK_BIT |
     81 		      WRITE_LEVELING_MASK_BIT |
     82 		      LOAD_PATTERN_2_MASK_BIT |
     83 		      READ_LEVELING_MASK_BIT |
     84 		      SET_TARGET_FREQ_MASK_BIT |
     85 		      WRITE_LEVELING_TF_MASK_BIT |
     86 		      READ_LEVELING_TF_MASK_BIT |
     87 		      CENTRALIZATION_RX_MASK_BIT |
     88 		      CENTRALIZATION_TX_MASK_BIT);
     89 
     90 static int ddr3_tip_ddr3_training_main_flow(u32 dev_num);
     91 static int ddr3_tip_write_odt(u32 dev_num, enum hws_access_type access_type,
     92 			      u32 if_id, u32 cl_value, u32 cwl_value);
     93 static int ddr3_tip_ddr3_auto_tune(u32 dev_num);
     94 
     95 #ifdef ODT_TEST_SUPPORT
     96 static int odt_test(u32 dev_num, enum hws_algo_type algo_type);
     97 #endif
     98 
     99 int adll_calibration(u32 dev_num, enum hws_access_type access_type,
    100 		     u32 if_id, enum hws_ddr_freq frequency);
    101 static int ddr3_tip_set_timing(u32 dev_num, enum hws_access_type access_type,
    102 			       u32 if_id, enum hws_ddr_freq frequency);
    103 
    104 static struct page_element page_tbl[] = {
    105 	/*
    106 	 * 8bits	16 bits
    107 	 * page-size(K)	page-size(K)	mask
    108 	 */
    109 	{ 1,		2,		2},
    110 	/* 512M */
    111 	{ 1,		2,		3},
    112 	/* 1G */
    113 	{ 1,		2,		0},
    114 	/* 2G */
    115 	{ 1,		2,		4},
    116 	/* 4G */
    117 	{ 2,		2,		5},
    118 	/* 8G */
    119 	{0, 0, 0}, /* TODO: placeholder for 16-Mbit die capacity */
    120 	{0, 0, 0}, /* TODO: placeholder for 32-Mbit die capacity */
    121 	{0, 0, 0}, /* TODO: placeholder for 12-Mbit die capacity */
    122 	{0, 0, 0}  /* TODO: placeholder for 24-Mbit die capacity */
    123 
    124 };
    125 
    126 struct page_element *mv_ddr_page_tbl_get(void)
    127 {
    128 	return &page_tbl[0];
    129 }
    130 
    131 static u8 mem_size_config[MV_DDR_DIE_CAP_LAST] = {
    132 	0x2,			/* 512Mbit  */
    133 	0x3,			/* 1Gbit    */
    134 	0x0,			/* 2Gbit    */
    135 	0x4,			/* 4Gbit    */
    136 	0x5,			/* 8Gbit    */
    137 	0x0, /* TODO: placeholder for 16-Mbit die capacity */
    138 	0x0, /* TODO: placeholder for 32-Mbit die capacity */
    139 	0x0, /* TODO: placeholder for 12-Mbit die capacity */
    140 	0x0  /* TODO: placeholder for 24-Mbit die capacity */
    141 };
    142 
    143 static u8 cs_mask2_num[] = { 0, 0, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3 };
    144 
    145 static struct reg_data odpg_default_value[] = {
    146 	{0x1034, 0x38000, MASK_ALL_BITS},
    147 	{0x1038, 0x0, MASK_ALL_BITS},
    148 	{0x10b0, 0x0, MASK_ALL_BITS},
    149 	{0x10b8, 0x0, MASK_ALL_BITS},
    150 	{0x10c0, 0x0, MASK_ALL_BITS},
    151 	{0x10f0, 0x0, MASK_ALL_BITS},
    152 	{0x10f4, 0x0, MASK_ALL_BITS},
    153 	{0x10f8, 0xff, MASK_ALL_BITS},
    154 	{0x10fc, 0xffff, MASK_ALL_BITS},
    155 	{0x1130, 0x0, MASK_ALL_BITS},
    156 	{0x1830, 0x2000000, MASK_ALL_BITS},
    157 	{0x14d0, 0x0, MASK_ALL_BITS},
    158 	{0x14d4, 0x0, MASK_ALL_BITS},
    159 	{0x14d8, 0x0, MASK_ALL_BITS},
    160 	{0x14dc, 0x0, MASK_ALL_BITS},
    161 	{0x1454, 0x0, MASK_ALL_BITS},
    162 	{0x1594, 0x0, MASK_ALL_BITS},
    163 	{0x1598, 0x0, MASK_ALL_BITS},
    164 	{0x159c, 0x0, MASK_ALL_BITS},
    165 	{0x15a0, 0x0, MASK_ALL_BITS},
    166 	{0x15a4, 0x0, MASK_ALL_BITS},
    167 	{0x15a8, 0x0, MASK_ALL_BITS},
    168 	{0x15ac, 0x0, MASK_ALL_BITS},
    169 	{0x1604, 0x0, MASK_ALL_BITS},
    170 	{0x1608, 0x0, MASK_ALL_BITS},
    171 	{0x160c, 0x0, MASK_ALL_BITS},
    172 	{0x1610, 0x0, MASK_ALL_BITS},
    173 	{0x1614, 0x0, MASK_ALL_BITS},
    174 	{0x1618, 0x0, MASK_ALL_BITS},
    175 	{0x1624, 0x0, MASK_ALL_BITS},
    176 	{0x1690, 0x0, MASK_ALL_BITS},
    177 	{0x1694, 0x0, MASK_ALL_BITS},
    178 	{0x1698, 0x0, MASK_ALL_BITS},
    179 	{0x169c, 0x0, MASK_ALL_BITS},
    180 	{0x14b8, 0x6f67, MASK_ALL_BITS},
    181 	{0x1630, 0x0, MASK_ALL_BITS},
    182 	{0x1634, 0x0, MASK_ALL_BITS},
    183 	{0x1638, 0x0, MASK_ALL_BITS},
    184 	{0x163c, 0x0, MASK_ALL_BITS},
    185 	{0x16b0, 0x0, MASK_ALL_BITS},
    186 	{0x16b4, 0x0, MASK_ALL_BITS},
    187 	{0x16b8, 0x0, MASK_ALL_BITS},
    188 	{0x16bc, 0x0, MASK_ALL_BITS},
    189 	{0x16c0, 0x0, MASK_ALL_BITS},
    190 	{0x16c4, 0x0, MASK_ALL_BITS},
    191 	{0x16c8, 0x0, MASK_ALL_BITS},
    192 	{0x16cc, 0x1, MASK_ALL_BITS},
    193 	{0x16f0, 0x1, MASK_ALL_BITS},
    194 	{0x16f4, 0x0, MASK_ALL_BITS},
    195 	{0x16f8, 0x0, MASK_ALL_BITS},
    196 	{0x16fc, 0x0, MASK_ALL_BITS}
    197 };
    198 
    199 /* MR cmd and addr definitions */
    200 struct mv_ddr_mr_data mr_data[] = {
    201 	{MRS0_CMD, MR0_REG},
    202 	{MRS1_CMD, MR1_REG},
    203 	{MRS2_CMD, MR2_REG},
    204 	{MRS3_CMD, MR3_REG}
    205 };
    206 
    207 static int ddr3_tip_pad_inv(u32 dev_num, u32 if_id);
    208 static int ddr3_tip_rank_control(u32 dev_num, u32 if_id);
    209 
    210 /*
    211  * Update global training parameters by data from user
    212  */
    213 int ddr3_tip_tune_training_params(u32 dev_num,
    214 				  struct tune_train_params *params)
    215 {
    216 	if (params->ck_delay != PARAM_UNDEFINED)
    217 		ck_delay = params->ck_delay;
    218 	if (params->phy_reg3_val != PARAM_UNDEFINED)
    219 		phy_reg3_val = params->phy_reg3_val;
    220 	if (params->g_rtt_nom != PARAM_UNDEFINED)
    221 		g_rtt_nom = params->g_rtt_nom;
    222 	if (params->g_rtt_wr != PARAM_UNDEFINED)
    223 		g_rtt_wr = params->g_rtt_wr;
    224 	if (params->g_dic != PARAM_UNDEFINED)
    225 		g_dic = params->g_dic;
    226 	if (params->g_odt_config != PARAM_UNDEFINED)
    227 		g_odt_config = params->g_odt_config;
    228 	if (params->g_zpri_data != PARAM_UNDEFINED)
    229 		g_zpri_data = params->g_zpri_data;
    230 	if (params->g_znri_data != PARAM_UNDEFINED)
    231 		g_znri_data = params->g_znri_data;
    232 	if (params->g_zpri_ctrl != PARAM_UNDEFINED)
    233 		g_zpri_ctrl = params->g_zpri_ctrl;
    234 	if (params->g_znri_ctrl != PARAM_UNDEFINED)
    235 		g_znri_ctrl = params->g_znri_ctrl;
    236 	if (params->g_zpodt_data != PARAM_UNDEFINED)
    237 		g_zpodt_data = params->g_zpodt_data;
    238 	if (params->g_znodt_data != PARAM_UNDEFINED)
    239 		g_znodt_data = params->g_znodt_data;
    240 	if (params->g_zpodt_ctrl != PARAM_UNDEFINED)
    241 		g_zpodt_ctrl = params->g_zpodt_ctrl;
    242 	if (params->g_znodt_ctrl != PARAM_UNDEFINED)
    243 		g_znodt_ctrl = params->g_znodt_ctrl;
    244 	if (params->g_rtt_park != PARAM_UNDEFINED)
    245 		g_rtt_park = params->g_rtt_park;
    246 
    247 	DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
    248 			  ("DGL parameters: 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X\n",
    249 			   g_zpri_data, g_znri_data, g_zpri_ctrl, g_znri_ctrl, g_zpodt_data, g_znodt_data,
    250 			   g_zpodt_ctrl, g_znodt_ctrl, g_rtt_nom, g_dic, g_odt_config, g_rtt_wr));
    251 
    252 	return MV_OK;
    253 }
    254 
    255 /*
    256  * Configure CS
    257  */
    258 int ddr3_tip_configure_cs(u32 dev_num, u32 if_id, u32 cs_num, u32 enable)
    259 {
    260 	u32 data, addr_hi, data_high;
    261 	u32 mem_index;
    262 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
    263 
    264 	if (enable == 1) {
    265 		data = (tm->interface_params[if_id].bus_width ==
    266 			MV_DDR_DEV_WIDTH_8BIT) ? 0 : 1;
    267 		CHECK_STATUS(ddr3_tip_if_write
    268 			     (dev_num, ACCESS_TYPE_UNICAST, if_id,
    269 			      SDRAM_ADDR_CTRL_REG, (data << (cs_num * 4)),
    270 			      0x3 << (cs_num * 4)));
    271 		mem_index = tm->interface_params[if_id].memory_size;
    272 
    273 		addr_hi = mem_size_config[mem_index] & 0x3;
    274 		CHECK_STATUS(ddr3_tip_if_write
    275 			     (dev_num, ACCESS_TYPE_UNICAST, if_id,
    276 			      SDRAM_ADDR_CTRL_REG,
    277 			      (addr_hi << (2 + cs_num * 4)),
    278 			      0x3 << (2 + cs_num * 4)));
    279 
    280 		data_high = (mem_size_config[mem_index] & 0x4) >> 2;
    281 		CHECK_STATUS(ddr3_tip_if_write
    282 			     (dev_num, ACCESS_TYPE_UNICAST, if_id,
    283 			      SDRAM_ADDR_CTRL_REG,
    284 			      data_high << (20 + cs_num), 1 << (20 + cs_num)));
    285 
    286 		/* Enable Address Select Mode */
    287 		CHECK_STATUS(ddr3_tip_if_write
    288 			     (dev_num, ACCESS_TYPE_UNICAST, if_id,
    289 			      SDRAM_ADDR_CTRL_REG, 1 << (16 + cs_num),
    290 			      1 << (16 + cs_num)));
    291 	}
    292 	switch (cs_num) {
    293 	case 0:
    294 	case 1:
    295 	case 2:
    296 		CHECK_STATUS(ddr3_tip_if_write
    297 			     (dev_num, ACCESS_TYPE_UNICAST, if_id,
    298 			      DUNIT_CTRL_LOW_REG, (enable << (cs_num + 11)),
    299 			      1 << (cs_num + 11)));
    300 		break;
    301 	case 3:
    302 		CHECK_STATUS(ddr3_tip_if_write
    303 			     (dev_num, ACCESS_TYPE_UNICAST, if_id,
    304 			      DUNIT_CTRL_LOW_REG, (enable << 15), 1 << 15));
    305 		break;
    306 	}
    307 
    308 	return MV_OK;
    309 }
    310 
    311 /*
    312  * Calculate number of CS
    313  */
    314 int calc_cs_num(u32 dev_num, u32 if_id, u32 *cs_num)
    315 {
    316 	u32 cs;
    317 	u32 bus_cnt;
    318 	u32 cs_count;
    319 	u32 cs_bitmask;
    320 	u32 curr_cs_num = 0;
    321 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
    322 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
    323 
    324 	for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) {
    325 		VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
    326 		cs_count = 0;
    327 		cs_bitmask = tm->interface_params[if_id].
    328 			as_bus_params[bus_cnt].cs_bitmask;
    329 		for (cs = 0; cs < MAX_CS_NUM; cs++) {
    330 			if ((cs_bitmask >> cs) & 1)
    331 				cs_count++;
    332 		}
    333 
    334 		if (curr_cs_num == 0) {
    335 			curr_cs_num = cs_count;
    336 		} else if (cs_count != curr_cs_num) {
    337 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
    338 					  ("CS number is different per bus (IF %d BUS %d cs_num %d curr_cs_num %d)\n",
    339 					   if_id, bus_cnt, cs_count,
    340 					   curr_cs_num));
    341 			return MV_NOT_SUPPORTED;
    342 		}
    343 	}
    344 	*cs_num = curr_cs_num;
    345 
    346 	return MV_OK;
    347 }
    348 
    349 /*
    350  * Init Controller Flow
    351  */
    352 int hws_ddr3_tip_init_controller(u32 dev_num, struct init_cntr_param *init_cntr_prm)
    353 {
    354 	u32 if_id;
    355 	u32 cs_num;
    356 	u32 t_ckclk = 0, t_wr = 0, t2t = 0;
    357 	u32 data_value = 0, cs_cnt = 0,
    358 		mem_mask = 0, bus_index = 0;
    359 	enum hws_speed_bin speed_bin_index = SPEED_BIN_DDR_2133N;
    360 	u32 cs_mask = 0;
    361 	u32 cl_value = 0, cwl_val = 0;
    362 	u32 bus_cnt = 0, adll_tap = 0;
    363 	enum hws_access_type access_type = ACCESS_TYPE_UNICAST;
    364 	u32 data_read[MAX_INTERFACE_NUM];
    365 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
    366 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
    367 	enum hws_ddr_freq freq = tm->interface_params[0].memory_freq;
    368 	enum mv_ddr_timing timing;
    369 
    370 	DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
    371 			  ("Init_controller, do_mrs_phy=%d, is_ctrl64_bit=%d\n",
    372 			   init_cntr_prm->do_mrs_phy,
    373 			   init_cntr_prm->is_ctrl64_bit));
    374 
    375 	if (init_cntr_prm->init_phy == 1) {
    376 		CHECK_STATUS(ddr3_tip_configure_phy(dev_num));
    377 	}
    378 
    379 	if (generic_init_controller == 1) {
    380 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
    381 			VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
    382 			DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
    383 					  ("active IF %d\n", if_id));
    384 			mem_mask = 0;
    385 			for (bus_index = 0;
    386 			     bus_index < octets_per_if_num;
    387 			     bus_index++) {
    388 				VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_index);
    389 				mem_mask |=
    390 					tm->interface_params[if_id].
    391 					as_bus_params[bus_index].mirror_enable_bitmask;
    392 			}
    393 
    394 			if (mem_mask != 0) {
    395 				CHECK_STATUS(ddr3_tip_if_write
    396 					     (dev_num, ACCESS_TYPE_MULTICAST,
    397 					      if_id, DUAL_DUNIT_CFG_REG, 0,
    398 					      0x8));
    399 			}
    400 
    401 			speed_bin_index =
    402 				tm->interface_params[if_id].
    403 				speed_bin_index;
    404 
    405 			/* t_ckclk is external clock */
    406 			t_ckclk = (MEGA / freq_val[freq]);
    407 
    408 			if (MV_DDR_IS_HALF_BUS_DRAM_MODE(tm->bus_act_mask, octets_per_if_num))
    409 				data_value = (0x4000 | 0 | 0x1000000) & ~(1 << 26);
    410 			else
    411 				data_value = (0x4000 | 0x8000 | 0x1000000) & ~(1 << 26);
    412 
    413 			/* Interface Bus Width */
    414 			/* SRMode */
    415 			CHECK_STATUS(ddr3_tip_if_write
    416 				     (dev_num, access_type, if_id,
    417 				      SDRAM_CFG_REG, data_value,
    418 				      0x100c000));
    419 
    420 			/* Interleave first command pre-charge enable (TBD) */
    421 			CHECK_STATUS(ddr3_tip_if_write
    422 				     (dev_num, access_type, if_id,
    423 				      SDRAM_OPEN_PAGES_CTRL_REG, (1 << 10),
    424 				      (1 << 10)));
    425 
    426 			/* Reset divider_b assert -> de-assert */
    427 			CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
    428 						       SDRAM_CFG_REG,
    429 						       0x0 << PUP_RST_DIVIDER_OFFS,
    430 						       PUP_RST_DIVIDER_MASK << PUP_RST_DIVIDER_OFFS));
    431 
    432 			CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
    433 						       SDRAM_CFG_REG,
    434 						       0x1 << PUP_RST_DIVIDER_OFFS,
    435 						       PUP_RST_DIVIDER_MASK << PUP_RST_DIVIDER_OFFS));
    436 
    437 			/* PHY configuration */
    438 			/*
    439 			 * Postamble Length = 1.5cc, Addresscntl to clk skew
    440 			 * \BD, Preamble length normal, parralal ADLL enable
    441 			 */
    442 			CHECK_STATUS(ddr3_tip_if_write
    443 				     (dev_num, access_type, if_id,
    444 				      DRAM_PHY_CFG_REG, 0x28, 0x3e));
    445 			if (init_cntr_prm->is_ctrl64_bit) {
    446 				/* positive edge */
    447 				CHECK_STATUS(ddr3_tip_if_write
    448 					     (dev_num, access_type, if_id,
    449 					      DRAM_PHY_CFG_REG, 0x0,
    450 					      0xff80));
    451 			}
    452 
    453 			/* calibration block disable */
    454 			/* Xbar Read buffer select (for Internal access) */
    455 			CHECK_STATUS(ddr3_tip_if_write
    456 				     (dev_num, access_type, if_id,
    457 				      MAIN_PADS_CAL_MACH_CTRL_REG, 0x1200c,
    458 				      0x7dffe01c));
    459 			CHECK_STATUS(ddr3_tip_if_write
    460 				     (dev_num, access_type, if_id,
    461 				      MAIN_PADS_CAL_MACH_CTRL_REG,
    462 				      calibration_update_control << 3, 0x3 << 3));
    463 
    464 			/* Pad calibration control - enable */
    465 			CHECK_STATUS(ddr3_tip_if_write
    466 				     (dev_num, access_type, if_id,
    467 				      MAIN_PADS_CAL_MACH_CTRL_REG, 0x1, 0x1));
    468 			if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) < MV_TIP_REV_3) {
    469 				/* DDR3 rank ctrl \96 part of the generic code */
    470 				/* CS1 mirroring enable + w/a for JIRA DUNIT-14581 */
    471 				CHECK_STATUS(ddr3_tip_if_write
    472 					     (dev_num, access_type, if_id,
    473 					      DDR3_RANK_CTRL_REG, 0x27, MASK_ALL_BITS));
    474 			}
    475 
    476 			cs_mask = 0;
    477 			data_value = 0x7;
    478 			/*
    479 			 * Address ctrl \96 Part of the Generic code
    480 			 * The next configuration is done:
    481 			 * 1)  Memory Size
    482 			 * 2) Bus_width
    483 			 * 3) CS#
    484 			 * 4) Page Number
    485 			 * Per Dunit get from the Map_topology the parameters:
    486 			 * Bus_width
    487 			 */
    488 
    489 			data_value =
    490 				(tm->interface_params[if_id].
    491 				 bus_width == MV_DDR_DEV_WIDTH_8BIT) ? 0 : 1;
    492 
    493 			/* create merge cs mask for all cs available in dunit */
    494 			for (bus_cnt = 0;
    495 			     bus_cnt < octets_per_if_num;
    496 			     bus_cnt++) {
    497 				VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
    498 				cs_mask |=
    499 					tm->interface_params[if_id].
    500 					as_bus_params[bus_cnt].cs_bitmask;
    501 			}
    502 			DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
    503 					  ("Init_controller IF %d cs_mask %d\n",
    504 					   if_id, cs_mask));
    505 			/*
    506 			 * Configure the next upon the Map Topology \96 If the
    507 			 * Dunit is CS0 Configure CS0 if it is multi CS
    508 			 * configure them both:  The Bust_width it\92s the
    509 			 * Memory Bus width \96 x8 or x16
    510 			 */
    511 			for (cs_cnt = 0; cs_cnt < NUM_OF_CS; cs_cnt++) {
    512 				ddr3_tip_configure_cs(dev_num, if_id, cs_cnt,
    513 						      ((cs_mask & (1 << cs_cnt)) ? 1
    514 						       : 0));
    515 			}
    516 
    517 			if (init_cntr_prm->do_mrs_phy) {
    518 				/*
    519 				 * MR0 \96 Part of the Generic code
    520 				 * The next configuration is done:
    521 				 * 1) Burst Length
    522 				 * 2) CAS Latency
    523 				 * get for each dunit what is it Speed_bin &
    524 				 * Target Frequency. From those both parameters
    525 				 * get the appropriate Cas_l from the CL table
    526 				 */
    527 				cl_value =
    528 					tm->interface_params[if_id].
    529 					cas_l;
    530 				cwl_val =
    531 					tm->interface_params[if_id].
    532 					cas_wl;
    533 				DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
    534 						  ("cl_value 0x%x cwl_val 0x%x\n",
    535 						   cl_value, cwl_val));
    536 
    537 				t_wr = time_to_nclk(speed_bin_table
    538 							   (speed_bin_index,
    539 							    SPEED_BIN_TWR), t_ckclk);
    540 
    541 				data_value =
    542 					((cl_mask_table[cl_value] & 0x1) << 2) |
    543 					((cl_mask_table[cl_value] & 0xe) << 3);
    544 				CHECK_STATUS(ddr3_tip_if_write
    545 					     (dev_num, access_type, if_id,
    546 					      MR0_REG, data_value,
    547 					      (0x7 << 4) | (1 << 2)));
    548 				CHECK_STATUS(ddr3_tip_if_write
    549 					     (dev_num, access_type, if_id,
    550 					      MR0_REG, twr_mask_table[t_wr] << 9,
    551 					      0x7 << 9));
    552 
    553 				/*
    554 				 * MR1: Set RTT and DIC Design GL values
    555 				 * configured by user
    556 				 */
    557 				CHECK_STATUS(ddr3_tip_if_write
    558 					     (dev_num, ACCESS_TYPE_MULTICAST,
    559 					      PARAM_NOT_CARE, MR1_REG,
    560 					      g_dic | g_rtt_nom, 0x266));
    561 
    562 				/* MR2 - Part of the Generic code */
    563 				/*
    564 				 * The next configuration is done:
    565 				 * 1)  SRT
    566 				 * 2) CAS Write Latency
    567 				 */
    568 				data_value = (cwl_mask_table[cwl_val] << 3);
    569 				data_value |=
    570 					((tm->interface_params[if_id].
    571 					  interface_temp ==
    572 					  MV_DDR_TEMP_HIGH) ? (1 << 7) : 0);
    573 				data_value |= g_rtt_wr;
    574 				CHECK_STATUS(ddr3_tip_if_write
    575 					     (dev_num, access_type, if_id,
    576 					      MR2_REG, data_value,
    577 					      (0x7 << 3) | (0x1 << 7) | (0x3 <<
    578 									 9)));
    579 			}
    580 
    581 			ddr3_tip_write_odt(dev_num, access_type, if_id,
    582 					   cl_value, cwl_val);
    583 			ddr3_tip_set_timing(dev_num, access_type, if_id, freq);
    584 
    585 			if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) < MV_TIP_REV_3) {
    586 				CHECK_STATUS(ddr3_tip_if_write
    587 					     (dev_num, access_type, if_id,
    588 					      DUNIT_CTRL_HIGH_REG, 0x1000119,
    589 					      0x100017F));
    590 			} else {
    591 				CHECK_STATUS(ddr3_tip_if_write
    592 					     (dev_num, access_type, if_id,
    593 					      DUNIT_CTRL_HIGH_REG, 0x600177 |
    594 					      (init_cntr_prm->is_ctrl64_bit ?
    595 					      CPU_INTERJECTION_ENA_SPLIT_ENA << CPU_INTERJECTION_ENA_OFFS :
    596 					      CPU_INTERJECTION_ENA_SPLIT_DIS << CPU_INTERJECTION_ENA_OFFS),
    597 					      0x1600177 | CPU_INTERJECTION_ENA_MASK <<
    598 					      CPU_INTERJECTION_ENA_OFFS));
    599 			}
    600 
    601 			/* reset bit 7 */
    602 			CHECK_STATUS(ddr3_tip_if_write
    603 				     (dev_num, access_type, if_id,
    604 				      DUNIT_CTRL_HIGH_REG,
    605 				      (init_cntr_prm->msys_init << 7), (1 << 7)));
    606 
    607 			timing = tm->interface_params[if_id].timing;
    608 
    609 			if (mode_2t != 0xff) {
    610 				t2t = mode_2t;
    611 			} else if (timing != MV_DDR_TIM_DEFAULT) {
    612 				t2t = (timing == MV_DDR_TIM_2T) ? 1 : 0;
    613 			} else {
    614 				/* calculate number of CS (per interface) */
    615 				CHECK_STATUS(calc_cs_num
    616 					     (dev_num, if_id, &cs_num));
    617 				t2t = (cs_num == 1) ? 0 : 1;
    618 			}
    619 
    620 			CHECK_STATUS(ddr3_tip_if_write
    621 				     (dev_num, access_type, if_id,
    622 				      DUNIT_CTRL_LOW_REG, t2t << 3,
    623 				      0x3 << 3));
    624 			CHECK_STATUS(ddr3_tip_if_write
    625 				     (dev_num, access_type, if_id,
    626 				      DDR_TIMING_REG, 0x28 << 9, 0x3f << 9));
    627 			CHECK_STATUS(ddr3_tip_if_write
    628 				     (dev_num, access_type, if_id,
    629 				      DDR_TIMING_REG, 0xa << 21, 0xff << 21));
    630 
    631 			/* move the block to ddr3_tip_set_timing - end */
    632 			/* AUTO_ZQC_TIMING */
    633 			CHECK_STATUS(ddr3_tip_if_write
    634 				     (dev_num, access_type, if_id,
    635 				      ZQC_CFG_REG, (AUTO_ZQC_TIMING | (2 << 20)),
    636 				      0x3fffff));
    637 			CHECK_STATUS(ddr3_tip_if_read
    638 				     (dev_num, access_type, if_id,
    639 				      DRAM_PHY_CFG_REG, data_read, 0x30));
    640 			data_value =
    641 				(data_read[if_id] == 0) ? (1 << 11) : 0;
    642 			CHECK_STATUS(ddr3_tip_if_write
    643 				     (dev_num, access_type, if_id,
    644 				      DUNIT_CTRL_HIGH_REG, data_value,
    645 				      (1 << 11)));
    646 
    647 			/* Set Active control for ODT write transactions */
    648 			CHECK_STATUS(ddr3_tip_if_write
    649 				     (dev_num, ACCESS_TYPE_MULTICAST,
    650 				      PARAM_NOT_CARE, 0x1494, g_odt_config,
    651 				      MASK_ALL_BITS));
    652 
    653 			if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) == MV_TIP_REV_3) {
    654 				CHECK_STATUS(ddr3_tip_if_write
    655 					     (dev_num, access_type, if_id,
    656 					      0x14a8, 0x900, 0x900));
    657 				/* wa: controls control sub-phy outputs floating during self-refresh */
    658 				CHECK_STATUS(ddr3_tip_if_write
    659 					     (dev_num, access_type, if_id,
    660 					      0x16d0, 0, 0x8000));
    661 			}
    662 		}
    663 	}
    664 
    665 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
    666 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
    667 		CHECK_STATUS(ddr3_tip_rank_control(dev_num, if_id));
    668 
    669 		if (init_cntr_prm->do_mrs_phy) {
    670 			CHECK_STATUS(ddr3_tip_pad_inv(dev_num, if_id));
    671 		}
    672 
    673 		/* Pad calibration control - disable */
    674 		CHECK_STATUS(ddr3_tip_if_write
    675 			     (dev_num, access_type, if_id,
    676 			      MAIN_PADS_CAL_MACH_CTRL_REG, 0x0, 0x1));
    677 		CHECK_STATUS(ddr3_tip_if_write
    678 			     (dev_num, access_type, if_id,
    679 			      MAIN_PADS_CAL_MACH_CTRL_REG,
    680 			      calibration_update_control << 3, 0x3 << 3));
    681 	}
    682 
    683 
    684 	if (delay_enable != 0) {
    685 		adll_tap = MEGA / (freq_val[freq] * 64);
    686 		ddr3_tip_cmd_addr_init_delay(dev_num, adll_tap);
    687 	}
    688 
    689 	return MV_OK;
    690 }
    691 
    692 /*
    693  * Rank Control Flow
    694  */
    695 static int ddr3_tip_rev2_rank_control(u32 dev_num, u32 if_id)
    696 {
    697 	u32 data_value = 0,  bus_cnt = 0;
    698 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
    699 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
    700 
    701 	for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) {
    702 		VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
    703 		data_value |= tm->interface_params[if_id].as_bus_params[bus_cnt].
    704 			      cs_bitmask;
    705 
    706 		if (tm->interface_params[if_id].as_bus_params[bus_cnt].
    707 		    mirror_enable_bitmask == 1) {
    708 			/*
    709 			 * Check mirror_enable_bitmask
    710 			 * If it is enabled, CS + 4 bit in a word to be '1'
    711 			 */
    712 			if ((tm->interface_params[if_id].as_bus_params[bus_cnt].
    713 			     cs_bitmask & 0x1) != 0) {
    714 				data_value |= tm->interface_params[if_id].
    715 					      as_bus_params[bus_cnt].
    716 					      mirror_enable_bitmask << 4;
    717 			}
    718 
    719 			if ((tm->interface_params[if_id].as_bus_params[bus_cnt].
    720 			     cs_bitmask & 0x2) != 0) {
    721 				data_value |= tm->interface_params[if_id].
    722 					      as_bus_params[bus_cnt].
    723 					      mirror_enable_bitmask << 5;
    724 			}
    725 
    726 			if ((tm->interface_params[if_id].as_bus_params[bus_cnt].
    727 			     cs_bitmask & 0x4) != 0) {
    728 				data_value |= tm->interface_params[if_id].
    729 					      as_bus_params[bus_cnt].
    730 					      mirror_enable_bitmask << 6;
    731 			}
    732 
    733 			if ((tm->interface_params[if_id].as_bus_params[bus_cnt].
    734 			     cs_bitmask & 0x8) != 0) {
    735 				data_value |= tm->interface_params[if_id].
    736 					      as_bus_params[bus_cnt].
    737 					      mirror_enable_bitmask << 7;
    738 			}
    739 		}
    740 	}
    741 
    742 	CHECK_STATUS(ddr3_tip_if_write
    743 		     (dev_num, ACCESS_TYPE_UNICAST, if_id, DDR3_RANK_CTRL_REG,
    744 		      data_value, 0xff));
    745 
    746 	return MV_OK;
    747 }
    748 
    749 static int ddr3_tip_rev3_rank_control(u32 dev_num, u32 if_id)
    750 {
    751 	u32 data_value = 0, bus_cnt;
    752 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
    753 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
    754 
    755 	for (bus_cnt = 1; bus_cnt < octets_per_if_num; bus_cnt++) {
    756 		VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
    757 		if ((tm->interface_params[if_id].
    758 		     as_bus_params[0].cs_bitmask !=
    759 		     tm->interface_params[if_id].
    760 		     as_bus_params[bus_cnt].cs_bitmask) ||
    761 		    (tm->interface_params[if_id].
    762 		     as_bus_params[0].mirror_enable_bitmask !=
    763 		     tm->interface_params[if_id].
    764 		     as_bus_params[bus_cnt].mirror_enable_bitmask))
    765 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
    766 					  ("WARNING:Wrong configuration for pup #%d CS mask and CS mirroring for all pups should be the same\n",
    767 					   bus_cnt));
    768 	}
    769 
    770 	data_value |= tm->interface_params[if_id].
    771 		as_bus_params[0].cs_bitmask;
    772 	data_value |= tm->interface_params[if_id].
    773 		as_bus_params[0].mirror_enable_bitmask << 4;
    774 
    775 	CHECK_STATUS(ddr3_tip_if_write
    776 		     (dev_num, ACCESS_TYPE_UNICAST, if_id, DDR3_RANK_CTRL_REG,
    777 		      data_value, 0xff));
    778 
    779 	return MV_OK;
    780 }
    781 
    782 static int ddr3_tip_rank_control(u32 dev_num, u32 if_id)
    783 {
    784 	if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) == MV_TIP_REV_2)
    785 		return ddr3_tip_rev2_rank_control(dev_num, if_id);
    786 	else
    787 		return ddr3_tip_rev3_rank_control(dev_num, if_id);
    788 }
    789 
    790 /*
    791  * PAD Inverse Flow
    792  */
    793 static int ddr3_tip_pad_inv(u32 dev_num, u32 if_id)
    794 {
    795 	u32 bus_cnt, data_value, ck_swap_pup_ctrl;
    796 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
    797 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
    798 
    799 	for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) {
    800 		VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
    801 		if (tm->interface_params[if_id].
    802 		    as_bus_params[bus_cnt].is_dqs_swap == 1) {
    803 			/* dqs swap */
    804 			ddr3_tip_bus_read_modify_write(dev_num, ACCESS_TYPE_UNICAST,
    805 						       if_id, bus_cnt,
    806 						       DDR_PHY_DATA,
    807 						       PHY_CTRL_PHY_REG, 0xc0,
    808 						       0xc0);
    809 		}
    810 
    811 		if (tm->interface_params[if_id].
    812 		    as_bus_params[bus_cnt].is_ck_swap == 1) {
    813 			if (bus_cnt <= 1)
    814 				data_value = 0x5 << 2;
    815 			else
    816 				data_value = 0xa << 2;
    817 
    818 			/* mask equals data */
    819 			/* ck swap pup is only control pup #0 ! */
    820 			ck_swap_pup_ctrl = 0;
    821 			ddr3_tip_bus_read_modify_write(dev_num, ACCESS_TYPE_UNICAST,
    822 						       if_id, ck_swap_pup_ctrl,
    823 						       DDR_PHY_CONTROL,
    824 						       PHY_CTRL_PHY_REG,
    825 						       data_value, data_value);
    826 		}
    827 	}
    828 
    829 	return MV_OK;
    830 }
    831 
    832 /*
    833  * Algorithm Parameters Validation
    834  */
    835 int ddr3_tip_validate_algo_var(u32 value, u32 fail_value, char *var_name)
    836 {
    837 	if (value == fail_value) {
    838 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
    839 				  ("Error: %s is not initialized (Algo Components Validation)\n",
    840 				   var_name));
    841 		return 0;
    842 	}
    843 
    844 	return 1;
    845 }
    846 
    847 int ddr3_tip_validate_algo_ptr(void *ptr, void *fail_value, char *ptr_name)
    848 {
    849 	if (ptr == fail_value) {
    850 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
    851 				  ("Error: %s is not initialized (Algo Components Validation)\n",
    852 				   ptr_name));
    853 		return 0;
    854 	}
    855 
    856 	return 1;
    857 }
    858 
    859 int ddr3_tip_validate_algo_components(u8 dev_num)
    860 {
    861 	int status = 1;
    862 
    863 	/* Check DGL parameters*/
    864 	status &= ddr3_tip_validate_algo_var(ck_delay, PARAM_UNDEFINED, "ck_delay");
    865 	status &= ddr3_tip_validate_algo_var(phy_reg3_val, PARAM_UNDEFINED, "phy_reg3_val");
    866 	status &= ddr3_tip_validate_algo_var(g_rtt_nom, PARAM_UNDEFINED, "g_rtt_nom");
    867 	status &= ddr3_tip_validate_algo_var(g_dic, PARAM_UNDEFINED, "g_dic");
    868 	status &= ddr3_tip_validate_algo_var(odt_config, PARAM_UNDEFINED, "odt_config");
    869 	status &= ddr3_tip_validate_algo_var(g_zpri_data, PARAM_UNDEFINED, "g_zpri_data");
    870 	status &= ddr3_tip_validate_algo_var(g_znri_data, PARAM_UNDEFINED, "g_znri_data");
    871 	status &= ddr3_tip_validate_algo_var(g_zpri_ctrl, PARAM_UNDEFINED, "g_zpri_ctrl");
    872 	status &= ddr3_tip_validate_algo_var(g_znri_ctrl, PARAM_UNDEFINED, "g_znri_ctrl");
    873 	status &= ddr3_tip_validate_algo_var(g_zpodt_data, PARAM_UNDEFINED, "g_zpodt_data");
    874 	status &= ddr3_tip_validate_algo_var(g_znodt_data, PARAM_UNDEFINED, "g_znodt_data");
    875 	status &= ddr3_tip_validate_algo_var(g_zpodt_ctrl, PARAM_UNDEFINED, "g_zpodt_ctrl");
    876 	status &= ddr3_tip_validate_algo_var(g_znodt_ctrl, PARAM_UNDEFINED, "g_znodt_ctrl");
    877 
    878 	/* Check functions pointers */
    879 	status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].tip_dunit_mux_select_func,
    880 					     NULL, "tip_dunit_mux_select_func");
    881 	status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].mv_ddr_dunit_write,
    882 					     NULL, "mv_ddr_dunit_write");
    883 	status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].mv_ddr_dunit_read,
    884 					     NULL, "mv_ddr_dunit_read");
    885 	status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].mv_ddr_phy_write,
    886 					     NULL, "mv_ddr_phy_write");
    887 	status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].mv_ddr_phy_read,
    888 					     NULL, "mv_ddr_phy_read");
    889 	status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].tip_get_freq_config_info_func,
    890 					     NULL, "tip_get_freq_config_info_func");
    891 	status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].tip_set_freq_divider_func,
    892 					     NULL, "tip_set_freq_divider_func");
    893 	status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].tip_get_clock_ratio,
    894 					     NULL, "tip_get_clock_ratio");
    895 
    896 	status &= ddr3_tip_validate_algo_ptr(dq_map_table, NULL, "dq_map_table");
    897 	status &= ddr3_tip_validate_algo_var(dfs_low_freq, 0, "dfs_low_freq");
    898 
    899 	return (status == 1) ? MV_OK : MV_NOT_INITIALIZED;
    900 }
    901 
    902 
    903 int ddr3_pre_algo_config(void)
    904 {
    905 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
    906 
    907 	/* Set Bus3 ECC training mode */
    908 	if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask)) {
    909 		/* Set Bus3 ECC MUX */
    910 		CHECK_STATUS(ddr3_tip_if_write
    911 			     (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
    912 			      DRAM_PINS_MUX_REG, 0x100, 0x100));
    913 	}
    914 
    915 	/* Set regular ECC training mode (bus4 and bus 3) */
    916 	if ((DDR3_IS_ECC_PUP4_MODE(tm->bus_act_mask)) ||
    917 	    (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask)) ||
    918 	    (DDR3_IS_ECC_PUP8_MODE(tm->bus_act_mask))) {
    919 		/* Enable ECC Write MUX */
    920 		CHECK_STATUS(ddr3_tip_if_write
    921 			     (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
    922 			      TRAINING_SW_2_REG, 0x100, 0x100));
    923 		/* General ECC enable */
    924 		CHECK_STATUS(ddr3_tip_if_write
    925 			     (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
    926 			      SDRAM_CFG_REG, 0x40000, 0x40000));
    927 		/* Disable Read Data ECC MUX */
    928 		CHECK_STATUS(ddr3_tip_if_write
    929 			     (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
    930 			      TRAINING_SW_2_REG, 0x0, 0x2));
    931 	}
    932 
    933 	return MV_OK;
    934 }
    935 
    936 int ddr3_post_algo_config(void)
    937 {
    938 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
    939 	int status;
    940 
    941 	status = ddr3_post_run_alg();
    942 	if (MV_OK != status) {
    943 		printf("DDR3 Post Run Alg - FAILED 0x%x\n", status);
    944 		return status;
    945 	}
    946 
    947 	/* Un_set ECC training mode */
    948 	if ((DDR3_IS_ECC_PUP4_MODE(tm->bus_act_mask)) ||
    949 	    (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask)) ||
    950 	    (DDR3_IS_ECC_PUP8_MODE(tm->bus_act_mask))) {
    951 		/* Disable ECC Write MUX */
    952 		CHECK_STATUS(ddr3_tip_if_write
    953 			     (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
    954 			      TRAINING_SW_2_REG, 0x0, 0x100));
    955 		/* General ECC and Bus3 ECC MUX remains enabled */
    956 	}
    957 
    958 	return MV_OK;
    959 }
    960 
    961 /*
    962  * Run Training Flow
    963  */
    964 int hws_ddr3_tip_run_alg(u32 dev_num, enum hws_algo_type algo_type)
    965 {
    966 	int status = MV_OK;
    967 
    968 	status = ddr3_pre_algo_config();
    969 	if (MV_OK != status) {
    970 		printf("DDR3 Pre Algo Config - FAILED 0x%x\n", status);
    971 		return status;
    972 	}
    973 
    974 #ifdef ODT_TEST_SUPPORT
    975 	if (finger_test == 1)
    976 		return odt_test(dev_num, algo_type);
    977 #endif
    978 
    979 	if (algo_type == ALGO_TYPE_DYNAMIC) {
    980 		status = ddr3_tip_ddr3_auto_tune(dev_num);
    981 	}
    982 
    983 	if (status != MV_OK) {
    984 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
    985 				  ("********   DRAM initialization Failed (res 0x%x)   ********\n",
    986 				   status));
    987 		return status;
    988 	}
    989 
    990 	status = ddr3_post_algo_config();
    991 	if (MV_OK != status) {
    992 		printf("DDR3 Post Algo Config - FAILED 0x%x\n", status);
    993 		return status;
    994 	}
    995 
    996 	return status;
    997 }
    998 
    999 #ifdef ODT_TEST_SUPPORT
   1000 /*
   1001  * ODT Test
   1002  */
   1003 static int odt_test(u32 dev_num, enum hws_algo_type algo_type)
   1004 {
   1005 	int ret = MV_OK, ret_tune = MV_OK;
   1006 	int pfinger_val = 0, nfinger_val;
   1007 
   1008 	for (pfinger_val = p_finger_start; pfinger_val <= p_finger_end;
   1009 	     pfinger_val += p_finger_step) {
   1010 		for (nfinger_val = n_finger_start; nfinger_val <= n_finger_end;
   1011 		     nfinger_val += n_finger_step) {
   1012 			if (finger_test != 0) {
   1013 				DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   1014 						  ("pfinger_val %d nfinger_val %d\n",
   1015 						   pfinger_val, nfinger_val));
   1016 				/*
   1017 				 * TODO: need to check the correctness
   1018 				 * of the following two lines.
   1019 				 */
   1020 				g_zpodt_data = pfinger_val;
   1021 				g_znodt_data = nfinger_val;
   1022 			}
   1023 
   1024 			if (algo_type == ALGO_TYPE_DYNAMIC) {
   1025 				ret = ddr3_tip_ddr3_auto_tune(dev_num);
   1026 			}
   1027 		}
   1028 	}
   1029 
   1030 	if (ret_tune != MV_OK) {
   1031 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   1032 				  ("Run_alg: tuning failed %d\n", ret_tune));
   1033 		ret = (ret == MV_OK) ? ret_tune : ret;
   1034 	}
   1035 
   1036 	return ret;
   1037 }
   1038 #endif
   1039 
   1040 /*
   1041  * Select Controller
   1042  */
   1043 int hws_ddr3_tip_select_ddr_controller(u32 dev_num, int enable)
   1044 {
   1045 	return config_func_info[dev_num].
   1046 		tip_dunit_mux_select_func((u8)dev_num, enable);
   1047 }
   1048 
   1049 /*
   1050  * Dunit Register Write
   1051  */
   1052 int ddr3_tip_if_write(u32 dev_num, enum hws_access_type interface_access,
   1053 		      u32 if_id, u32 reg_addr, u32 data_value, u32 mask)
   1054 {
   1055 	config_func_info[dev_num].mv_ddr_dunit_write(reg_addr, mask, data_value);
   1056 
   1057 	return MV_OK;
   1058 }
   1059 
   1060 /*
   1061  * Dunit Register Read
   1062  */
   1063 int ddr3_tip_if_read(u32 dev_num, enum hws_access_type interface_access,
   1064 		     u32 if_id, u32 reg_addr, u32 *data, u32 mask)
   1065 {
   1066 	config_func_info[dev_num].mv_ddr_dunit_read(reg_addr, mask, data);
   1067 
   1068 	return MV_OK;
   1069 }
   1070 
   1071 /*
   1072  * Dunit Register Polling
   1073  */
   1074 int ddr3_tip_if_polling(u32 dev_num, enum hws_access_type access_type,
   1075 			u32 if_id, u32 exp_value, u32 mask, u32 offset,
   1076 			u32 poll_tries)
   1077 {
   1078 	u32 poll_cnt = 0, interface_num = 0, start_if, end_if;
   1079 	u32 read_data[MAX_INTERFACE_NUM];
   1080 	int ret;
   1081 	int is_fail = 0, is_if_fail;
   1082 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
   1083 
   1084 	if (access_type == ACCESS_TYPE_MULTICAST) {
   1085 		start_if = 0;
   1086 		end_if = MAX_INTERFACE_NUM - 1;
   1087 	} else {
   1088 		start_if = if_id;
   1089 		end_if = if_id;
   1090 	}
   1091 
   1092 	for (interface_num = start_if; interface_num <= end_if; interface_num++) {
   1093 		/* polling bit 3 for n times */
   1094 		VALIDATE_IF_ACTIVE(tm->if_act_mask, interface_num);
   1095 
   1096 		is_if_fail = 0;
   1097 		for (poll_cnt = 0; poll_cnt < poll_tries; poll_cnt++) {
   1098 			ret =
   1099 				ddr3_tip_if_read(dev_num, ACCESS_TYPE_UNICAST,
   1100 						 interface_num, offset, read_data,
   1101 						 mask);
   1102 			if (ret != MV_OK)
   1103 				return ret;
   1104 
   1105 			if (read_data[interface_num] == exp_value)
   1106 				break;
   1107 		}
   1108 
   1109 		if (poll_cnt >= poll_tries) {
   1110 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   1111 					  ("max poll IF #%d\n", interface_num));
   1112 			is_fail = 1;
   1113 			is_if_fail = 1;
   1114 		}
   1115 
   1116 		training_result[training_stage][interface_num] =
   1117 			(is_if_fail == 1) ? TEST_FAILED : TEST_SUCCESS;
   1118 	}
   1119 
   1120 	return (is_fail == 0) ? MV_OK : MV_FAIL;
   1121 }
   1122 
   1123 /*
   1124  * Bus read access
   1125  */
   1126 int ddr3_tip_bus_read(u32 dev_num, u32 if_id,
   1127 		      enum hws_access_type phy_access, u32 phy_id,
   1128 		      enum hws_ddr_phy phy_type, u32 reg_addr, u32 *data)
   1129 {
   1130 	return config_func_info[dev_num].
   1131 		mv_ddr_phy_read(phy_access, phy_id, phy_type, reg_addr, data);
   1132 }
   1133 
   1134 /*
   1135  * Bus write access
   1136  */
   1137 int ddr3_tip_bus_write(u32 dev_num, enum hws_access_type interface_access,
   1138 		       u32 if_id, enum hws_access_type phy_access,
   1139 		       u32 phy_id, enum hws_ddr_phy phy_type, u32 reg_addr,
   1140 		       u32 data_value)
   1141 {
   1142 	return config_func_info[dev_num].
   1143 		mv_ddr_phy_write(phy_access, phy_id, phy_type, reg_addr, data_value, OPERATION_WRITE);
   1144 }
   1145 
   1146 
   1147 /*
   1148  * Phy read-modify-write
   1149  */
   1150 int ddr3_tip_bus_read_modify_write(u32 dev_num, enum hws_access_type access_type,
   1151 				   u32 interface_id, u32 phy_id,
   1152 				   enum hws_ddr_phy phy_type, u32 reg_addr,
   1153 				   u32 data_value, u32 reg_mask)
   1154 {
   1155 	u32 data_val = 0, if_id, start_if, end_if;
   1156 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
   1157 
   1158 	if (access_type == ACCESS_TYPE_MULTICAST) {
   1159 		start_if = 0;
   1160 		end_if = MAX_INTERFACE_NUM - 1;
   1161 	} else {
   1162 		start_if = interface_id;
   1163 		end_if = interface_id;
   1164 	}
   1165 
   1166 	for (if_id = start_if; if_id <= end_if; if_id++) {
   1167 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
   1168 		CHECK_STATUS(ddr3_tip_bus_read
   1169 			     (dev_num, if_id, ACCESS_TYPE_UNICAST, phy_id,
   1170 			      phy_type, reg_addr, &data_val));
   1171 		data_value = (data_val & (~reg_mask)) | (data_value & reg_mask);
   1172 		CHECK_STATUS(ddr3_tip_bus_write
   1173 			     (dev_num, ACCESS_TYPE_UNICAST, if_id,
   1174 			      ACCESS_TYPE_UNICAST, phy_id, phy_type, reg_addr,
   1175 			      data_value));
   1176 	}
   1177 
   1178 	return MV_OK;
   1179 }
   1180 
   1181 /*
   1182  * ADLL Calibration
   1183  */
   1184 int adll_calibration(u32 dev_num, enum hws_access_type access_type,
   1185 		     u32 if_id, enum hws_ddr_freq frequency)
   1186 {
   1187 	struct hws_tip_freq_config_info freq_config_info;
   1188 	u32 bus_cnt = 0;
   1189 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
   1190 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
   1191 
   1192 	/* Reset Diver_b assert -> de-assert */
   1193 	CHECK_STATUS(ddr3_tip_if_write
   1194 		     (dev_num, access_type, if_id, SDRAM_CFG_REG,
   1195 		      0, 0x10000000));
   1196 	mdelay(10);
   1197 	CHECK_STATUS(ddr3_tip_if_write
   1198 		     (dev_num, access_type, if_id, SDRAM_CFG_REG,
   1199 		      0x10000000, 0x10000000));
   1200 
   1201 	CHECK_STATUS(config_func_info[dev_num].
   1202 		     tip_get_freq_config_info_func((u8)dev_num, frequency,
   1203 						   &freq_config_info));
   1204 
   1205 	for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) {
   1206 		VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
   1207 		CHECK_STATUS(ddr3_tip_bus_read_modify_write
   1208 			     (dev_num, access_type, if_id, bus_cnt,
   1209 			      DDR_PHY_DATA, ADLL_CFG0_PHY_REG,
   1210 			      freq_config_info.bw_per_freq << 8, 0x700));
   1211 		CHECK_STATUS(ddr3_tip_bus_read_modify_write
   1212 			     (dev_num, access_type, if_id, bus_cnt,
   1213 			      DDR_PHY_DATA, ADLL_CFG2_PHY_REG,
   1214 			      freq_config_info.rate_per_freq, 0x7));
   1215 	}
   1216 
   1217 	for (bus_cnt = 0; bus_cnt < DDR_IF_CTRL_SUBPHYS_NUM; bus_cnt++) {
   1218 		CHECK_STATUS(ddr3_tip_bus_read_modify_write
   1219 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, bus_cnt,
   1220 			      DDR_PHY_CONTROL, ADLL_CFG0_PHY_REG,
   1221 			      freq_config_info.bw_per_freq << 8, 0x700));
   1222 		CHECK_STATUS(ddr3_tip_bus_read_modify_write
   1223 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, bus_cnt,
   1224 			      DDR_PHY_CONTROL, ADLL_CFG2_PHY_REG,
   1225 			      freq_config_info.rate_per_freq, 0x7));
   1226 	}
   1227 
   1228 	/* DUnit to Phy drive post edge, ADLL reset assert de-assert */
   1229 	CHECK_STATUS(ddr3_tip_if_write
   1230 		     (dev_num, access_type, if_id, DRAM_PHY_CFG_REG,
   1231 		      0, (0x80000000 | 0x40000000)));
   1232 	mdelay(100 / (freq_val[frequency] / freq_val[DDR_FREQ_LOW_FREQ]));
   1233 	CHECK_STATUS(ddr3_tip_if_write
   1234 		     (dev_num, access_type, if_id, DRAM_PHY_CFG_REG,
   1235 		      (0x80000000 | 0x40000000), (0x80000000 | 0x40000000)));
   1236 
   1237 	/* polling for ADLL Done */
   1238 	if (ddr3_tip_if_polling(dev_num, access_type, if_id,
   1239 				0x3ff03ff, 0x3ff03ff, PHY_LOCK_STATUS_REG,
   1240 				MAX_POLLING_ITERATIONS) != MV_OK) {
   1241 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   1242 				  ("Freq_set: DDR3 poll failed(1)"));
   1243 	}
   1244 
   1245 	/* pup data_pup reset assert-> deassert */
   1246 	CHECK_STATUS(ddr3_tip_if_write
   1247 		     (dev_num, access_type, if_id, SDRAM_CFG_REG,
   1248 		      0, 0x60000000));
   1249 	mdelay(10);
   1250 	CHECK_STATUS(ddr3_tip_if_write
   1251 		     (dev_num, access_type, if_id, SDRAM_CFG_REG,
   1252 		      0x60000000, 0x60000000));
   1253 
   1254 	return MV_OK;
   1255 }
   1256 
   1257 int ddr3_tip_freq_set(u32 dev_num, enum hws_access_type access_type,
   1258 		      u32 if_id, enum hws_ddr_freq frequency)
   1259 {
   1260 	u32 cl_value = 0, cwl_value = 0, mem_mask = 0, val = 0,
   1261 		bus_cnt = 0, t_wr = 0, t_ckclk = 0,
   1262 		cnt_id;
   1263 	u32 end_if, start_if;
   1264 	u32 bus_index = 0;
   1265 	int is_dll_off = 0;
   1266 	enum hws_speed_bin speed_bin_index = 0;
   1267 	struct hws_tip_freq_config_info freq_config_info;
   1268 	enum hws_result *flow_result = training_result[training_stage];
   1269 	u32 adll_tap = 0;
   1270 	u32 cs_num;
   1271 	u32 t2t;
   1272 	u32 cs_mask[MAX_INTERFACE_NUM];
   1273 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
   1274 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
   1275 	unsigned int tclk;
   1276 	enum mv_ddr_timing timing = tm->interface_params[if_id].timing;
   1277 
   1278 	DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
   1279 			  ("dev %d access %d IF %d freq %d\n", dev_num,
   1280 			   access_type, if_id, frequency));
   1281 
   1282 	if (frequency == DDR_FREQ_LOW_FREQ)
   1283 		is_dll_off = 1;
   1284 	if (access_type == ACCESS_TYPE_MULTICAST) {
   1285 		start_if = 0;
   1286 		end_if = MAX_INTERFACE_NUM - 1;
   1287 	} else {
   1288 		start_if = if_id;
   1289 		end_if = if_id;
   1290 	}
   1291 
   1292 	/* calculate interface cs mask - Oferb 4/11 */
   1293 	/* speed bin can be different for each interface */
   1294 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
   1295 		/* cs enable is active low */
   1296 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
   1297 		cs_mask[if_id] = CS_BIT_MASK;
   1298 		training_result[training_stage][if_id] = TEST_SUCCESS;
   1299 		ddr3_tip_calc_cs_mask(dev_num, if_id, effective_cs,
   1300 				      &cs_mask[if_id]);
   1301 	}
   1302 
   1303 	/* speed bin can be different for each interface */
   1304 	/*
   1305 	 * moti b - need to remove the loop for multicas access functions
   1306 	 * and loop the unicast access functions
   1307 	 */
   1308 	for (if_id = start_if; if_id <= end_if; if_id++) {
   1309 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
   1310 
   1311 		flow_result[if_id] = TEST_SUCCESS;
   1312 		speed_bin_index =
   1313 			tm->interface_params[if_id].speed_bin_index;
   1314 		if (tm->interface_params[if_id].memory_freq ==
   1315 		    frequency) {
   1316 			cl_value =
   1317 				tm->interface_params[if_id].cas_l;
   1318 			cwl_value =
   1319 				tm->interface_params[if_id].cas_wl;
   1320 		} else if (tm->cfg_src == MV_DDR_CFG_SPD) {
   1321 			tclk = 1000000 / freq_val[frequency];
   1322 			cl_value = mv_ddr_cl_calc(tm->timing_data[MV_DDR_TAA_MIN], tclk);
   1323 			if (cl_value == 0) {
   1324 				printf("mv_ddr: unsupported cas latency value found\n");
   1325 				return MV_FAIL;
   1326 			}
   1327 			cwl_value = mv_ddr_cwl_calc(tclk);
   1328 			if (cwl_value == 0) {
   1329 				printf("mv_ddr: unsupported cas write latency value found\n");
   1330 				return MV_FAIL;
   1331 			}
   1332 		} else {
   1333 			cl_value =
   1334 				cas_latency_table[speed_bin_index].cl_val[frequency];
   1335 			cwl_value =
   1336 				cas_write_latency_table[speed_bin_index].
   1337 				cl_val[frequency];
   1338 		}
   1339 
   1340 		DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
   1341 				  ("Freq_set dev 0x%x access 0x%x if 0x%x freq 0x%x speed %d:\n\t",
   1342 				   dev_num, access_type, if_id,
   1343 				   frequency, speed_bin_index));
   1344 
   1345 		for (cnt_id = 0; cnt_id < DDR_FREQ_LAST; cnt_id++) {
   1346 			DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
   1347 					  ("%d ",
   1348 					   cas_latency_table[speed_bin_index].
   1349 					   cl_val[cnt_id]));
   1350 		}
   1351 
   1352 		DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, ("\n"));
   1353 		mem_mask = 0;
   1354 		for (bus_index = 0; bus_index < octets_per_if_num;
   1355 		     bus_index++) {
   1356 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_index);
   1357 			mem_mask |=
   1358 				tm->interface_params[if_id].
   1359 				as_bus_params[bus_index].mirror_enable_bitmask;
   1360 		}
   1361 
   1362 		if (mem_mask != 0) {
   1363 			/* motib redundent in KW28 */
   1364 			CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
   1365 						       if_id,
   1366 						       DUAL_DUNIT_CFG_REG, 0, 0x8));
   1367 		}
   1368 
   1369 		/* dll state after exiting SR */
   1370 		if (is_dll_off == 1) {
   1371 			CHECK_STATUS(ddr3_tip_if_write
   1372 				     (dev_num, access_type, if_id,
   1373 				      DFS_REG, 0x1, 0x1));
   1374 		} else {
   1375 			CHECK_STATUS(ddr3_tip_if_write
   1376 				     (dev_num, access_type, if_id,
   1377 				      DFS_REG, 0, 0x1));
   1378 		}
   1379 
   1380 		CHECK_STATUS(ddr3_tip_if_write
   1381 			     (dev_num, access_type, if_id,
   1382 			      DUNIT_MMASK_REG, 0, 0x1));
   1383 		/* DFS  - block  transactions */
   1384 		CHECK_STATUS(ddr3_tip_if_write
   1385 			     (dev_num, access_type, if_id,
   1386 			      DFS_REG, 0x2, 0x2));
   1387 
   1388 		/* disable ODT in case of dll off */
   1389 		if (is_dll_off == 1) {
   1390 			CHECK_STATUS(ddr3_tip_if_write
   1391 				     (dev_num, access_type, if_id,
   1392 				      0x1874, 0, 0x244));
   1393 			CHECK_STATUS(ddr3_tip_if_write
   1394 				     (dev_num, access_type, if_id,
   1395 				      0x1884, 0, 0x244));
   1396 			CHECK_STATUS(ddr3_tip_if_write
   1397 				     (dev_num, access_type, if_id,
   1398 				      0x1894, 0, 0x244));
   1399 			CHECK_STATUS(ddr3_tip_if_write
   1400 				     (dev_num, access_type, if_id,
   1401 				      0x18a4, 0, 0x244));
   1402 		}
   1403 
   1404 		/* DFS  - Enter Self-Refresh */
   1405 		CHECK_STATUS(ddr3_tip_if_write
   1406 			     (dev_num, access_type, if_id, DFS_REG, 0x4,
   1407 			      0x4));
   1408 		/* polling on self refresh entry */
   1409 		if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST,
   1410 					if_id, 0x8, 0x8, DFS_REG,
   1411 					MAX_POLLING_ITERATIONS) != MV_OK) {
   1412 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   1413 					  ("Freq_set: DDR3 poll failed on SR entry\n"));
   1414 		}
   1415 
   1416 		/* Calculate 2T mode */
   1417 		if (mode_2t != 0xff) {
   1418 			t2t = mode_2t;
   1419 		} else if (timing != MV_DDR_TIM_DEFAULT) {
   1420 			t2t = (timing == MV_DDR_TIM_2T) ? 1 : 0;
   1421 		} else {
   1422 			/* Calculate number of CS per interface */
   1423 			CHECK_STATUS(calc_cs_num(dev_num, if_id, &cs_num));
   1424 			t2t = (cs_num == 1) ? 0 : 1;
   1425 		}
   1426 
   1427 
   1428 		if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_INTERLEAVE_WA) == 1) {
   1429 			/* Use 1T mode if 1:1 ratio configured */
   1430 			if (config_func_info[dev_num].tip_get_clock_ratio(frequency) == 1) {
   1431 				/* Low freq*/
   1432 				CHECK_STATUS(ddr3_tip_if_write
   1433 					     (dev_num, access_type, if_id,
   1434 					      SDRAM_OPEN_PAGES_CTRL_REG, 0x0, 0x3C0));
   1435 				t2t = 0;
   1436 			} else {
   1437 				/* Middle or target freq */
   1438 				CHECK_STATUS(ddr3_tip_if_write
   1439 					     (dev_num, access_type, if_id,
   1440 					      SDRAM_OPEN_PAGES_CTRL_REG, 0x3C0, 0x3C0));
   1441 			}
   1442 		}
   1443 		CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
   1444 					       DUNIT_CTRL_LOW_REG, t2t << 3, 0x3 << 3));
   1445 
   1446 		/* PLL configuration */
   1447 		config_func_info[dev_num].tip_set_freq_divider_func(dev_num, if_id,
   1448 								    frequency);
   1449 
   1450 		/* DFS  - CL/CWL/WR parameters after exiting SR */
   1451 		CHECK_STATUS(ddr3_tip_if_write
   1452 			     (dev_num, access_type, if_id, DFS_REG,
   1453 			      (cl_mask_table[cl_value] << 8), 0xf00));
   1454 		CHECK_STATUS(ddr3_tip_if_write
   1455 			     (dev_num, access_type, if_id, DFS_REG,
   1456 			      (cwl_mask_table[cwl_value] << 12), 0x7000));
   1457 
   1458 		t_ckclk = (MEGA / freq_val[frequency]);
   1459 		t_wr = time_to_nclk(speed_bin_table
   1460 					   (speed_bin_index,
   1461 					    SPEED_BIN_TWR), t_ckclk);
   1462 
   1463 		CHECK_STATUS(ddr3_tip_if_write
   1464 			     (dev_num, access_type, if_id, DFS_REG,
   1465 			      (twr_mask_table[t_wr] << 16), 0x70000));
   1466 
   1467 		/* Restore original RTT values if returning from DLL OFF mode */
   1468 		if (is_dll_off == 1) {
   1469 			CHECK_STATUS(ddr3_tip_if_write
   1470 				     (dev_num, access_type, if_id, 0x1874,
   1471 				      g_dic | g_rtt_nom, 0x266));
   1472 			CHECK_STATUS(ddr3_tip_if_write
   1473 				     (dev_num, access_type, if_id, 0x1884,
   1474 				      g_dic | g_rtt_nom, 0x266));
   1475 			CHECK_STATUS(ddr3_tip_if_write
   1476 				     (dev_num, access_type, if_id, 0x1894,
   1477 				      g_dic | g_rtt_nom, 0x266));
   1478 			CHECK_STATUS(ddr3_tip_if_write
   1479 				     (dev_num, access_type, if_id, 0x18a4,
   1480 				      g_dic | g_rtt_nom, 0x266));
   1481 		}
   1482 
   1483 		/* Reset divider_b assert -> de-assert */
   1484 		CHECK_STATUS(ddr3_tip_if_write
   1485 			     (dev_num, access_type, if_id,
   1486 			      SDRAM_CFG_REG, 0, 0x10000000));
   1487 		mdelay(10);
   1488 		CHECK_STATUS(ddr3_tip_if_write
   1489 			     (dev_num, access_type, if_id,
   1490 			      SDRAM_CFG_REG, 0x10000000, 0x10000000));
   1491 
   1492 		/* ADLL configuration function of process and frequency */
   1493 		CHECK_STATUS(config_func_info[dev_num].
   1494 			     tip_get_freq_config_info_func(dev_num, frequency,
   1495 							   &freq_config_info));
   1496 
   1497 		/* TBD check milo5 using device ID ? */
   1498 		for (bus_cnt = 0; bus_cnt < octets_per_if_num;
   1499 		     bus_cnt++) {
   1500 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
   1501 			CHECK_STATUS(ddr3_tip_bus_read_modify_write
   1502 				     (dev_num, ACCESS_TYPE_UNICAST,
   1503 				      if_id, bus_cnt, DDR_PHY_DATA,
   1504 				      0x92,
   1505 				      freq_config_info.
   1506 				      bw_per_freq << 8
   1507 				      /*freq_mask[dev_num][frequency] << 8 */
   1508 				      , 0x700));
   1509 			CHECK_STATUS(ddr3_tip_bus_read_modify_write
   1510 				     (dev_num, ACCESS_TYPE_UNICAST, if_id,
   1511 				      bus_cnt, DDR_PHY_DATA, 0x94,
   1512 				      freq_config_info.rate_per_freq, 0x7));
   1513 		}
   1514 
   1515 		/* Dunit to PHY drive post edge, ADLL reset assert -> de-assert */
   1516 		CHECK_STATUS(ddr3_tip_if_write
   1517 			     (dev_num, access_type, if_id,
   1518 			      DRAM_PHY_CFG_REG, 0,
   1519 			      (0x80000000 | 0x40000000)));
   1520 		mdelay(100 / (freq_val[frequency] / freq_val[DDR_FREQ_LOW_FREQ]));
   1521 		CHECK_STATUS(ddr3_tip_if_write
   1522 			     (dev_num, access_type, if_id,
   1523 			      DRAM_PHY_CFG_REG, (0x80000000 | 0x40000000),
   1524 			      (0x80000000 | 0x40000000)));
   1525 
   1526 		/* polling for ADLL Done */
   1527 		if (ddr3_tip_if_polling
   1528 		    (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x3ff03ff,
   1529 		     0x3ff03ff, PHY_LOCK_STATUS_REG,
   1530 		     MAX_POLLING_ITERATIONS) != MV_OK) {
   1531 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   1532 					  ("Freq_set: DDR3 poll failed(1)\n"));
   1533 		}
   1534 
   1535 		/* pup data_pup reset assert-> deassert */
   1536 		CHECK_STATUS(ddr3_tip_if_write
   1537 			     (dev_num, access_type, if_id,
   1538 			      SDRAM_CFG_REG, 0, 0x60000000));
   1539 		mdelay(10);
   1540 		CHECK_STATUS(ddr3_tip_if_write
   1541 			     (dev_num, access_type, if_id,
   1542 			      SDRAM_CFG_REG, 0x60000000, 0x60000000));
   1543 
   1544 		/* Set proper timing params before existing Self-Refresh */
   1545 		ddr3_tip_set_timing(dev_num, access_type, if_id, frequency);
   1546 		if (delay_enable != 0) {
   1547 			adll_tap = (is_dll_off == 1) ? 1000 : (MEGA / (freq_val[frequency] * 64));
   1548 			ddr3_tip_cmd_addr_init_delay(dev_num, adll_tap);
   1549 		}
   1550 
   1551 		/* Exit SR */
   1552 		CHECK_STATUS(ddr3_tip_if_write
   1553 			     (dev_num, access_type, if_id, DFS_REG, 0,
   1554 			      0x4));
   1555 		if (ddr3_tip_if_polling
   1556 		    (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x8, DFS_REG,
   1557 		     MAX_POLLING_ITERATIONS) != MV_OK) {
   1558 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   1559 					  ("Freq_set: DDR3 poll failed(2)"));
   1560 		}
   1561 
   1562 		/* Refresh Command */
   1563 		CHECK_STATUS(ddr3_tip_if_write
   1564 			     (dev_num, access_type, if_id,
   1565 			      SDRAM_OP_REG, 0x2, 0xf1f));
   1566 		if (ddr3_tip_if_polling
   1567 		    (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1f,
   1568 		     SDRAM_OP_REG, MAX_POLLING_ITERATIONS) != MV_OK) {
   1569 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   1570 					  ("Freq_set: DDR3 poll failed(3)"));
   1571 		}
   1572 
   1573 		/* Release DFS Block */
   1574 		CHECK_STATUS(ddr3_tip_if_write
   1575 			     (dev_num, access_type, if_id, DFS_REG, 0,
   1576 			      0x2));
   1577 		/* Controller to MBUS Retry - normal */
   1578 		CHECK_STATUS(ddr3_tip_if_write
   1579 			     (dev_num, access_type, if_id, DUNIT_MMASK_REG,
   1580 			      0x1, 0x1));
   1581 
   1582 		/* MRO: Burst Length 8, CL , Auto_precharge 0x16cc */
   1583 		val =
   1584 			((cl_mask_table[cl_value] & 0x1) << 2) |
   1585 			((cl_mask_table[cl_value] & 0xe) << 3);
   1586 		CHECK_STATUS(ddr3_tip_if_write
   1587 			     (dev_num, access_type, if_id, MR0_REG,
   1588 			      val, (0x7 << 4) | (1 << 2)));
   1589 		/* MR2:  CWL = 10 , Auto Self-Refresh - disable */
   1590 		val = (cwl_mask_table[cwl_value] << 3) | g_rtt_wr;
   1591 		/*
   1592 		 * nklein 24.10.13 - should not be here - leave value as set in
   1593 		 * the init configuration val |= (1 << 9);
   1594 		 * val |= ((tm->interface_params[if_id].
   1595 		 * interface_temp == MV_DDR_TEMP_HIGH) ? (1 << 7) : 0);
   1596 		 */
   1597 		/* nklein 24.10.13 - see above comment */
   1598 		CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
   1599 					       if_id, MR2_REG,
   1600 					       val, (0x7 << 3) | (0x3 << 9)));
   1601 
   1602 		/* ODT TIMING */
   1603 		val = ((cl_value - cwl_value + 1) << 4) |
   1604 			((cl_value - cwl_value + 6) << 8) |
   1605 			((cl_value - 1) << 12) | ((cl_value + 6) << 16);
   1606 		CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
   1607 					       if_id, DDR_ODT_TIMING_LOW_REG,
   1608 					       val, 0xffff0));
   1609 		val = 0x91 | ((cwl_value - 1) << 8) | ((cwl_value + 5) << 12);
   1610 		CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
   1611 					       if_id, DDR_ODT_TIMING_HIGH_REG,
   1612 					       val, 0xffff));
   1613 
   1614 		/* in case of ddr4 need to set the receiver to odt always 'on' (odt_config = '0')
   1615 		 * in case of ddr3 configure the odt through the timing
   1616 		 */
   1617 		if (odt_config != 0) {
   1618 			CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, DUNIT_ODT_CTRL_REG, 0xf, 0xf));
   1619 		}
   1620 		else {
   1621 			CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, DUNIT_ODT_CTRL_REG,
   1622 						       0x30f, 0x30f));
   1623 		}
   1624 
   1625 		/* re-write CL */
   1626 		val = ((cl_mask_table[cl_value] & 0x1) << 2) |
   1627 			((cl_mask_table[cl_value] & 0xe) << 3);
   1628 
   1629 		CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask, MR_CMD0,
   1630 			val, (0x7 << 4) | (0x1 << 2)));
   1631 
   1632 		/* re-write CWL */
   1633 		val = (cwl_mask_table[cwl_value] << 3) | g_rtt_wr;
   1634 		CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask, MR_CMD2,
   1635 			val, (0x7 << 3) | (0x3 << 9)));
   1636 
   1637 		if (mem_mask != 0) {
   1638 			CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
   1639 						       if_id,
   1640 						       DUAL_DUNIT_CFG_REG,
   1641 						       1 << 3, 0x8));
   1642 		}
   1643 	}
   1644 
   1645 	return MV_OK;
   1646 }
   1647 
   1648 /*
   1649  * Set ODT values
   1650  */
   1651 static int ddr3_tip_write_odt(u32 dev_num, enum hws_access_type access_type,
   1652 			      u32 if_id, u32 cl_value, u32 cwl_value)
   1653 {
   1654 	/* ODT TIMING */
   1655 	u32 val = (cl_value - cwl_value + 6);
   1656 
   1657 	val = ((cl_value - cwl_value + 1) << 4) | ((val & 0xf) << 8) |
   1658 		(((cl_value - 1) & 0xf) << 12) |
   1659 		(((cl_value + 6) & 0xf) << 16) | (((val & 0x10) >> 4) << 21);
   1660 	val |= (((cl_value - 1) >> 4) << 22) | (((cl_value + 6) >> 4) << 23);
   1661 
   1662 	CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
   1663 				       DDR_ODT_TIMING_LOW_REG, val, 0xffff0));
   1664 	val = 0x91 | ((cwl_value - 1) << 8) | ((cwl_value + 5) << 12);
   1665 	CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
   1666 				       DDR_ODT_TIMING_HIGH_REG, val, 0xffff));
   1667 	if (odt_additional == 1) {
   1668 		CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
   1669 					       if_id,
   1670 					       SDRAM_ODT_CTRL_HIGH_REG,
   1671 					       0xf, 0xf));
   1672 	}
   1673 
   1674 	/* ODT Active */
   1675 	CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
   1676 				       DUNIT_ODT_CTRL_REG, 0xf, 0xf));
   1677 
   1678 	return MV_OK;
   1679 }
   1680 
   1681 /*
   1682  * Set Timing values for training
   1683  */
   1684 static int ddr3_tip_set_timing(u32 dev_num, enum hws_access_type access_type,
   1685 			       u32 if_id, enum hws_ddr_freq frequency)
   1686 {
   1687 	u32 t_ckclk = 0, t_ras = 0;
   1688 	u32 t_rcd = 0, t_rp = 0, t_wr = 0, t_wtr = 0, t_rrd = 0, t_rtp = 0,
   1689 		t_rfc = 0, t_mod = 0, t_r2r = 0x3, t_r2r_high = 0,
   1690 		t_r2w_w2r = 0x3, t_r2w_w2r_high = 0x1, t_w2w = 0x3;
   1691 	u32 refresh_interval_cnt, t_hclk, t_refi, t_faw, t_pd, t_xpdll;
   1692 	u32 val = 0, page_size = 0, mask = 0;
   1693 	enum hws_speed_bin speed_bin_index;
   1694 	enum mv_ddr_die_capacity memory_size = MV_DDR_DIE_CAP_2GBIT;
   1695 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
   1696 	struct page_element *page_param = mv_ddr_page_tbl_get();
   1697 
   1698 	speed_bin_index = tm->interface_params[if_id].speed_bin_index;
   1699 	memory_size = tm->interface_params[if_id].memory_size;
   1700 	page_size =
   1701 		(tm->interface_params[if_id].bus_width ==
   1702 		 MV_DDR_DEV_WIDTH_8BIT) ? page_param[memory_size].
   1703 		page_size_8bit : page_param[memory_size].page_size_16bit;
   1704 	t_ckclk = (MEGA / freq_val[frequency]);
   1705 	/* HCLK in[ps] */
   1706 	t_hclk = MEGA / (freq_val[frequency] / config_func_info[dev_num].tip_get_clock_ratio(frequency));
   1707 
   1708 	t_refi = (tm->interface_params[if_id].interface_temp == MV_DDR_TEMP_HIGH) ? TREFI_HIGH : TREFI_LOW;
   1709 	t_refi *= 1000;	/* psec */
   1710 	refresh_interval_cnt = t_refi / t_hclk;	/* no units */
   1711 
   1712 	if (page_size == 1) {
   1713 		t_faw = speed_bin_table(speed_bin_index, SPEED_BIN_TFAW1K);
   1714 		t_faw = time_to_nclk(t_faw, t_ckclk);
   1715 		t_faw = GET_MAX_VALUE(20, t_faw);
   1716 	} else {	/* page size =2, we do not support page size 0.5k */
   1717 		t_faw = speed_bin_table(speed_bin_index, SPEED_BIN_TFAW2K);
   1718 		t_faw = time_to_nclk(t_faw, t_ckclk);
   1719 		t_faw = GET_MAX_VALUE(28, t_faw);
   1720 	}
   1721 
   1722 	t_pd = GET_MAX_VALUE(t_ckclk * 3, speed_bin_table(speed_bin_index, SPEED_BIN_TPD));
   1723 	t_pd = time_to_nclk(t_pd, t_ckclk);
   1724 
   1725 	t_xpdll = GET_MAX_VALUE(t_ckclk * 10, speed_bin_table(speed_bin_index, SPEED_BIN_TXPDLL));
   1726 	t_xpdll = time_to_nclk(t_xpdll, t_ckclk);
   1727 
   1728 	t_rrd =	(page_size == 1) ? speed_bin_table(speed_bin_index,
   1729 						   SPEED_BIN_TRRD1K) :
   1730 		speed_bin_table(speed_bin_index, SPEED_BIN_TRRD2K);
   1731 	t_rrd = GET_MAX_VALUE(t_ckclk * 4, t_rrd);
   1732 	t_rtp =	GET_MAX_VALUE(t_ckclk * 4, speed_bin_table(speed_bin_index,
   1733 							   SPEED_BIN_TRTP));
   1734 	t_mod = GET_MAX_VALUE(t_ckclk * 12, 15000);
   1735 	t_wtr = GET_MAX_VALUE(t_ckclk * 4, speed_bin_table(speed_bin_index,
   1736 							   SPEED_BIN_TWTR));
   1737 	t_ras = time_to_nclk(speed_bin_table(speed_bin_index,
   1738 						    SPEED_BIN_TRAS),
   1739 				    t_ckclk);
   1740 	t_rcd = time_to_nclk(speed_bin_table(speed_bin_index,
   1741 						    SPEED_BIN_TRCD),
   1742 				    t_ckclk);
   1743 	t_rp = time_to_nclk(speed_bin_table(speed_bin_index,
   1744 						   SPEED_BIN_TRP),
   1745 				   t_ckclk);
   1746 	t_wr = time_to_nclk(speed_bin_table(speed_bin_index,
   1747 						   SPEED_BIN_TWR),
   1748 				   t_ckclk);
   1749 	t_wtr = time_to_nclk(t_wtr, t_ckclk);
   1750 	t_rrd = time_to_nclk(t_rrd, t_ckclk);
   1751 	t_rtp = time_to_nclk(t_rtp, t_ckclk);
   1752 	t_rfc = time_to_nclk(rfc_table[memory_size] * 1000, t_ckclk);
   1753 	t_mod = time_to_nclk(t_mod, t_ckclk);
   1754 
   1755 	/* SDRAM Timing Low */
   1756 	val = (((t_ras - 1) & SDRAM_TIMING_LOW_TRAS_MASK) << SDRAM_TIMING_LOW_TRAS_OFFS) |
   1757 	      (((t_rcd - 1) & SDRAM_TIMING_LOW_TRCD_MASK) << SDRAM_TIMING_LOW_TRCD_OFFS) |
   1758 	      (((t_rcd - 1) >> SDRAM_TIMING_LOW_TRCD_OFFS & SDRAM_TIMING_HIGH_TRCD_MASK)
   1759 	      << SDRAM_TIMING_HIGH_TRCD_OFFS) |
   1760 	      (((t_rp - 1) & SDRAM_TIMING_LOW_TRP_MASK) << SDRAM_TIMING_LOW_TRP_OFFS) |
   1761 	      (((t_rp - 1) >> SDRAM_TIMING_LOW_TRP_MASK & SDRAM_TIMING_HIGH_TRP_MASK)
   1762 	      << SDRAM_TIMING_HIGH_TRP_OFFS) |
   1763 	      (((t_wr - 1) & SDRAM_TIMING_LOW_TWR_MASK) << SDRAM_TIMING_LOW_TWR_OFFS) |
   1764 	      (((t_wtr - 1) & SDRAM_TIMING_LOW_TWTR_MASK) << SDRAM_TIMING_LOW_TWTR_OFFS) |
   1765 	      ((((t_ras - 1) >> 4) & SDRAM_TIMING_LOW_TRAS_HIGH_MASK) << SDRAM_TIMING_LOW_TRAS_HIGH_OFFS) |
   1766 	      (((t_rrd - 1) & SDRAM_TIMING_LOW_TRRD_MASK) << SDRAM_TIMING_LOW_TRRD_OFFS) |
   1767 	      (((t_rtp - 1) & SDRAM_TIMING_LOW_TRTP_MASK) << SDRAM_TIMING_LOW_TRTP_OFFS);
   1768 
   1769 	mask = (SDRAM_TIMING_LOW_TRAS_MASK << SDRAM_TIMING_LOW_TRAS_OFFS) |
   1770 	       (SDRAM_TIMING_LOW_TRCD_MASK << SDRAM_TIMING_LOW_TRCD_OFFS) |
   1771 	       (SDRAM_TIMING_HIGH_TRCD_MASK << SDRAM_TIMING_HIGH_TRCD_OFFS) |
   1772 	       (SDRAM_TIMING_LOW_TRP_MASK << SDRAM_TIMING_LOW_TRP_OFFS) |
   1773 	       (SDRAM_TIMING_HIGH_TRP_MASK << SDRAM_TIMING_HIGH_TRP_OFFS) |
   1774 	       (SDRAM_TIMING_LOW_TWR_MASK << SDRAM_TIMING_LOW_TWR_OFFS) |
   1775 	       (SDRAM_TIMING_LOW_TWTR_MASK << SDRAM_TIMING_LOW_TWTR_OFFS) |
   1776 	       (SDRAM_TIMING_LOW_TRAS_HIGH_MASK << SDRAM_TIMING_LOW_TRAS_HIGH_OFFS) |
   1777 	       (SDRAM_TIMING_LOW_TRRD_MASK << SDRAM_TIMING_LOW_TRRD_OFFS) |
   1778 	       (SDRAM_TIMING_LOW_TRTP_MASK << SDRAM_TIMING_LOW_TRTP_OFFS);
   1779 
   1780 	CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
   1781 				       SDRAM_TIMING_LOW_REG, val, mask));
   1782 
   1783 	/* SDRAM Timing High */
   1784 	val = 0;
   1785 	mask = 0;
   1786 
   1787 	val = (((t_rfc - 1) & SDRAM_TIMING_HIGH_TRFC_MASK) << SDRAM_TIMING_HIGH_TRFC_OFFS) |
   1788 	      ((t_r2r & SDRAM_TIMING_HIGH_TR2R_MASK) << SDRAM_TIMING_HIGH_TR2R_OFFS) |
   1789 	      ((t_r2w_w2r & SDRAM_TIMING_HIGH_TR2W_W2R_MASK) << SDRAM_TIMING_HIGH_TR2W_W2R_OFFS) |
   1790 	      ((t_w2w & SDRAM_TIMING_HIGH_TW2W_MASK) << SDRAM_TIMING_HIGH_TW2W_OFFS) |
   1791 	      ((((t_rfc - 1) >> 7) & SDRAM_TIMING_HIGH_TRFC_HIGH_MASK) << SDRAM_TIMING_HIGH_TRFC_HIGH_OFFS) |
   1792 	      ((t_r2r_high & SDRAM_TIMING_HIGH_TR2R_HIGH_MASK) << SDRAM_TIMING_HIGH_TR2R_HIGH_OFFS) |
   1793 	      ((t_r2w_w2r_high & SDRAM_TIMING_HIGH_TR2W_W2R_HIGH_MASK) << SDRAM_TIMING_HIGH_TR2W_W2R_HIGH_OFFS) |
   1794 	      (((t_mod - 1) & SDRAM_TIMING_HIGH_TMOD_MASK) << SDRAM_TIMING_HIGH_TMOD_OFFS) |
   1795 	      ((((t_mod - 1) >> 4) & SDRAM_TIMING_HIGH_TMOD_HIGH_MASK) << SDRAM_TIMING_HIGH_TMOD_HIGH_OFFS);
   1796 
   1797 	mask = (SDRAM_TIMING_HIGH_TRFC_MASK << SDRAM_TIMING_HIGH_TRFC_OFFS) |
   1798 	       (SDRAM_TIMING_HIGH_TR2R_MASK << SDRAM_TIMING_HIGH_TR2R_OFFS) |
   1799 	       (SDRAM_TIMING_HIGH_TR2W_W2R_MASK << SDRAM_TIMING_HIGH_TR2W_W2R_OFFS) |
   1800 	       (SDRAM_TIMING_HIGH_TW2W_MASK << SDRAM_TIMING_HIGH_TW2W_OFFS) |
   1801 	       (SDRAM_TIMING_HIGH_TRFC_HIGH_MASK << SDRAM_TIMING_HIGH_TRFC_HIGH_OFFS) |
   1802 	       (SDRAM_TIMING_HIGH_TR2R_HIGH_MASK << SDRAM_TIMING_HIGH_TR2R_HIGH_OFFS) |
   1803 	       (SDRAM_TIMING_HIGH_TR2W_W2R_HIGH_MASK << SDRAM_TIMING_HIGH_TR2W_W2R_HIGH_OFFS) |
   1804 	       (SDRAM_TIMING_HIGH_TMOD_MASK << SDRAM_TIMING_HIGH_TMOD_OFFS) |
   1805 	       (SDRAM_TIMING_HIGH_TMOD_HIGH_MASK << SDRAM_TIMING_HIGH_TMOD_HIGH_OFFS);
   1806 
   1807 	CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
   1808 				       SDRAM_TIMING_HIGH_REG, val, mask));
   1809 
   1810 	CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
   1811 				       SDRAM_CFG_REG,
   1812 				       refresh_interval_cnt << REFRESH_OFFS,
   1813 				       REFRESH_MASK << REFRESH_OFFS));
   1814 	CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
   1815 				       SDRAM_ADDR_CTRL_REG, (t_faw - 1) << T_FAW_OFFS,
   1816 				       T_FAW_MASK << T_FAW_OFFS));
   1817 
   1818 	CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, DDR_TIMING_REG,
   1819 				       (t_pd - 1) << DDR_TIMING_TPD_OFFS |
   1820 				       (t_xpdll - 1) << DDR_TIMING_TXPDLL_OFFS,
   1821 				       DDR_TIMING_TPD_MASK << DDR_TIMING_TPD_OFFS |
   1822 				       DDR_TIMING_TXPDLL_MASK << DDR_TIMING_TXPDLL_OFFS));
   1823 
   1824 
   1825 	return MV_OK;
   1826 }
   1827 
   1828 
   1829 /*
   1830  * Mode Read
   1831  */
   1832 int hws_ddr3_tip_mode_read(u32 dev_num, struct mode_info *mode_info)
   1833 {
   1834 	u32 ret;
   1835 
   1836 	ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
   1837 			       MR0_REG, mode_info->reg_mr0, MASK_ALL_BITS);
   1838 	if (ret != MV_OK)
   1839 		return ret;
   1840 
   1841 	ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
   1842 			       MR1_REG, mode_info->reg_mr1, MASK_ALL_BITS);
   1843 	if (ret != MV_OK)
   1844 		return ret;
   1845 
   1846 	ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
   1847 			       MR2_REG, mode_info->reg_mr2, MASK_ALL_BITS);
   1848 	if (ret != MV_OK)
   1849 		return ret;
   1850 
   1851 	ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
   1852 			       MR3_REG, mode_info->reg_mr2, MASK_ALL_BITS);
   1853 	if (ret != MV_OK)
   1854 		return ret;
   1855 
   1856 	ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
   1857 			       RD_DATA_SMPL_DLYS_REG, mode_info->read_data_sample,
   1858 			       MASK_ALL_BITS);
   1859 	if (ret != MV_OK)
   1860 		return ret;
   1861 
   1862 	ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
   1863 			       RD_DATA_RDY_DLYS_REG, mode_info->read_data_ready,
   1864 			       MASK_ALL_BITS);
   1865 	if (ret != MV_OK)
   1866 		return ret;
   1867 
   1868 	return MV_OK;
   1869 }
   1870 
   1871 /*
   1872  * Get first active IF
   1873  */
   1874 int ddr3_tip_get_first_active_if(u8 dev_num, u32 interface_mask,
   1875 				 u32 *interface_id)
   1876 {
   1877 	u32 if_id;
   1878 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
   1879 
   1880 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
   1881 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
   1882 		if (interface_mask & (1 << if_id)) {
   1883 			*interface_id = if_id;
   1884 			break;
   1885 		}
   1886 	}
   1887 
   1888 	return MV_OK;
   1889 }
   1890 
   1891 /*
   1892  * Write CS Result
   1893  */
   1894 int ddr3_tip_write_cs_result(u32 dev_num, u32 offset)
   1895 {
   1896 	u32 if_id, bus_num, cs_bitmask, data_val, cs_num;
   1897 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
   1898 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
   1899 
   1900 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
   1901 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
   1902 		for (bus_num = 0; bus_num < octets_per_if_num;
   1903 		     bus_num++) {
   1904 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num);
   1905 			cs_bitmask =
   1906 				tm->interface_params[if_id].
   1907 				as_bus_params[bus_num].cs_bitmask;
   1908 			if (cs_bitmask != effective_cs) {
   1909 				cs_num = GET_CS_FROM_MASK(cs_bitmask);
   1910 				ddr3_tip_bus_read(dev_num, if_id,
   1911 						  ACCESS_TYPE_UNICAST, bus_num,
   1912 						  DDR_PHY_DATA,
   1913 						  offset +
   1914 						  (effective_cs * 0x4),
   1915 						  &data_val);
   1916 				ddr3_tip_bus_write(dev_num,
   1917 						   ACCESS_TYPE_UNICAST,
   1918 						   if_id,
   1919 						   ACCESS_TYPE_UNICAST,
   1920 						   bus_num, DDR_PHY_DATA,
   1921 						   offset +
   1922 						   (cs_num * 0x4),
   1923 						   data_val);
   1924 			}
   1925 		}
   1926 	}
   1927 
   1928 	return MV_OK;
   1929 }
   1930 
   1931 /*
   1932  * Write MRS
   1933  */
   1934 int ddr3_tip_write_mrs_cmd(u32 dev_num, u32 *cs_mask_arr, enum mr_number mr_num, u32 data, u32 mask)
   1935 {
   1936 	u32 if_id;
   1937 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
   1938 
   1939 	CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
   1940 				       PARAM_NOT_CARE, mr_data[mr_num].reg_addr, data, mask));
   1941 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
   1942 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
   1943 		CHECK_STATUS(ddr3_tip_if_write
   1944 			     (dev_num, ACCESS_TYPE_UNICAST, if_id,
   1945 			      SDRAM_OP_REG,
   1946 			      (cs_mask_arr[if_id] << 8) | mr_data[mr_num].cmd, 0xf1f));
   1947 	}
   1948 
   1949 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
   1950 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
   1951 		if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST, if_id, 0,
   1952 					0x1f, SDRAM_OP_REG,
   1953 					MAX_POLLING_ITERATIONS) != MV_OK) {
   1954 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   1955 					  ("write_mrs_cmd: Poll cmd fail"));
   1956 		}
   1957 	}
   1958 
   1959 	return MV_OK;
   1960 }
   1961 
   1962 /*
   1963  * Reset XSB Read FIFO
   1964  */
   1965 int ddr3_tip_reset_fifo_ptr(u32 dev_num)
   1966 {
   1967 	u32 if_id = 0;
   1968 
   1969 	/* Configure PHY reset value to 0 in order to "clean" the FIFO */
   1970 	CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
   1971 				       if_id, 0x15c8, 0, 0xff000000));
   1972 	/*
   1973 	 * Move PHY to RL mode (only in RL mode the PHY overrides FIFO values
   1974 	 * during FIFO reset)
   1975 	 */
   1976 	CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
   1977 				       if_id, TRAINING_SW_2_REG,
   1978 				       0x1, 0x9));
   1979 	/* In order that above configuration will influence the PHY */
   1980 	CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
   1981 				       if_id, 0x15b0,
   1982 				       0x80000000, 0x80000000));
   1983 	/* Reset read fifo assertion */
   1984 	CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
   1985 				       if_id, 0x1400, 0, 0x40000000));
   1986 	/* Reset read fifo deassertion */
   1987 	CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
   1988 				       if_id, 0x1400,
   1989 				       0x40000000, 0x40000000));
   1990 	/* Move PHY back to functional mode */
   1991 	CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
   1992 				       if_id, TRAINING_SW_2_REG,
   1993 				       0x8, 0x9));
   1994 	/* Stop training machine */
   1995 	CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
   1996 				       if_id, 0x15b4, 0x10000, 0x10000));
   1997 
   1998 	return MV_OK;
   1999 }
   2000 
   2001 /*
   2002  * Reset Phy registers
   2003  */
   2004 int ddr3_tip_ddr3_reset_phy_regs(u32 dev_num)
   2005 {
   2006 	u32 if_id, phy_id, cs;
   2007 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
   2008 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
   2009 
   2010 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
   2011 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
   2012 		for (phy_id = 0; phy_id < octets_per_if_num;
   2013 		     phy_id++) {
   2014 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, phy_id);
   2015 			CHECK_STATUS(ddr3_tip_bus_write
   2016 				     (dev_num, ACCESS_TYPE_UNICAST,
   2017 				      if_id, ACCESS_TYPE_UNICAST,
   2018 				      phy_id, DDR_PHY_DATA,
   2019 				      WL_PHY_REG(effective_cs),
   2020 				      phy_reg0_val));
   2021 			CHECK_STATUS(ddr3_tip_bus_write
   2022 				     (dev_num, ACCESS_TYPE_UNICAST, if_id,
   2023 				      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
   2024 				      RL_PHY_REG(effective_cs),
   2025 				      phy_reg2_val));
   2026 			CHECK_STATUS(ddr3_tip_bus_write
   2027 				     (dev_num, ACCESS_TYPE_UNICAST, if_id,
   2028 				      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
   2029 				      CRX_PHY_REG(effective_cs), phy_reg3_val));
   2030 			CHECK_STATUS(ddr3_tip_bus_write
   2031 				     (dev_num, ACCESS_TYPE_UNICAST, if_id,
   2032 				      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
   2033 				      CTX_PHY_REG(effective_cs), phy_reg1_val));
   2034 			CHECK_STATUS(ddr3_tip_bus_write
   2035 				     (dev_num, ACCESS_TYPE_UNICAST, if_id,
   2036 				      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
   2037 				      PBS_TX_BCAST_PHY_REG(effective_cs), 0x0));
   2038 			CHECK_STATUS(ddr3_tip_bus_write
   2039 				     (dev_num, ACCESS_TYPE_UNICAST, if_id,
   2040 				      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
   2041 				      PBS_RX_BCAST_PHY_REG(effective_cs), 0));
   2042 			CHECK_STATUS(ddr3_tip_bus_write
   2043 				     (dev_num, ACCESS_TYPE_UNICAST, if_id,
   2044 				      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
   2045 				      PBS_TX_PHY_REG(effective_cs, DQSP_PAD), 0));
   2046 			CHECK_STATUS(ddr3_tip_bus_write
   2047 				     (dev_num, ACCESS_TYPE_UNICAST, if_id,
   2048 				      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
   2049 				      PBS_RX_PHY_REG(effective_cs, DQSP_PAD), 0));
   2050 			CHECK_STATUS(ddr3_tip_bus_write
   2051 				     (dev_num, ACCESS_TYPE_UNICAST, if_id,
   2052 				      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
   2053 				      PBS_TX_PHY_REG(effective_cs, DQSN_PAD), 0));
   2054 			CHECK_STATUS(ddr3_tip_bus_write
   2055 				     (dev_num, ACCESS_TYPE_UNICAST, if_id,
   2056 				      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
   2057 				      PBS_RX_PHY_REG(effective_cs, DQSN_PAD), 0));
   2058 		}
   2059 	}
   2060 
   2061 	/* Set Receiver Calibration value */
   2062 	for (cs = 0; cs < MAX_CS_NUM; cs++) {
   2063 		/* PHY register 0xdb bits[5:0] - configure to 63 */
   2064 		CHECK_STATUS(ddr3_tip_bus_write
   2065 			     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
   2066 			      ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
   2067 			      DDR_PHY_DATA, VREF_BCAST_PHY_REG(cs), 63));
   2068 	}
   2069 
   2070 	return MV_OK;
   2071 }
   2072 
   2073 /*
   2074  * Restore Dunit registers
   2075  */
   2076 int ddr3_tip_restore_dunit_regs(u32 dev_num)
   2077 {
   2078 	u32 index_cnt;
   2079 
   2080 	mv_ddr_set_calib_controller();
   2081 
   2082 	CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
   2083 				       PARAM_NOT_CARE, MAIN_PADS_CAL_MACH_CTRL_REG,
   2084 				       0x1, 0x1));
   2085 	CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
   2086 				       PARAM_NOT_CARE, MAIN_PADS_CAL_MACH_CTRL_REG,
   2087 				       calibration_update_control << 3,
   2088 				       0x3 << 3));
   2089 	CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
   2090 				       PARAM_NOT_CARE,
   2091 				       ODPG_WR_RD_MODE_ENA_REG,
   2092 				       0xffff, MASK_ALL_BITS));
   2093 
   2094 	for (index_cnt = 0; index_cnt < ARRAY_SIZE(odpg_default_value);
   2095 	     index_cnt++) {
   2096 		CHECK_STATUS(ddr3_tip_if_write
   2097 			     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
   2098 			      odpg_default_value[index_cnt].reg_addr,
   2099 			      odpg_default_value[index_cnt].reg_data,
   2100 			      odpg_default_value[index_cnt].reg_mask));
   2101 	}
   2102 
   2103 	return MV_OK;
   2104 }
   2105 
   2106 int ddr3_tip_adll_regs_bypass(u32 dev_num, u32 reg_val1, u32 reg_val2)
   2107 {
   2108 	u32 if_id, phy_id;
   2109 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
   2110 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
   2111 
   2112 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
   2113 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
   2114 		for (phy_id = 0; phy_id < octets_per_if_num; phy_id++) {
   2115 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, phy_id);
   2116 			CHECK_STATUS(ddr3_tip_bus_write
   2117 				     (dev_num, ACCESS_TYPE_UNICAST, if_id,
   2118 				     ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
   2119 				     CTX_PHY_REG(effective_cs), reg_val1));
   2120 			CHECK_STATUS(ddr3_tip_bus_write
   2121 				     (dev_num, ACCESS_TYPE_UNICAST, if_id,
   2122 				     ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
   2123 				     PBS_TX_BCAST_PHY_REG(effective_cs), reg_val2));
   2124 		}
   2125 	}
   2126 
   2127 	return MV_OK;
   2128 }
   2129 
   2130 /*
   2131  * Auto tune main flow
   2132  */
   2133 static int ddr3_tip_ddr3_training_main_flow(u32 dev_num)
   2134 {
   2135 /* TODO: enable this functionality for other platforms */
   2136 #if defined(CONFIG_ARMADA_38X) || defined(CONFIG_ARMADA_39X)
   2137 	struct init_cntr_param init_cntr_prm;
   2138 #endif
   2139 	int ret = MV_OK;
   2140 	int adll_bypass_flag = 0;
   2141 	u32 if_id;
   2142 	u32 max_cs = ddr3_tip_max_cs_get(dev_num);
   2143 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
   2144 	enum hws_ddr_freq freq = tm->interface_params[0].memory_freq;
   2145 
   2146 #ifdef DDR_VIEWER_TOOL
   2147 	if (debug_training == DEBUG_LEVEL_TRACE) {
   2148 		CHECK_STATUS(print_device_info((u8)dev_num));
   2149 	}
   2150 #endif
   2151 
   2152 	ddr3_tip_validate_algo_components(dev_num);
   2153 
   2154 	for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
   2155 		CHECK_STATUS(ddr3_tip_ddr3_reset_phy_regs(dev_num));
   2156 	}
   2157 	/* Set to 0 after each loop to avoid illegal value may be used */
   2158 	effective_cs = 0;
   2159 
   2160 	freq_val[DDR_FREQ_LOW_FREQ] = dfs_low_freq;
   2161 
   2162 	if (is_pll_before_init != 0) {
   2163 		for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
   2164 			VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
   2165 			config_func_info[dev_num].tip_set_freq_divider_func(
   2166 				(u8)dev_num, if_id, freq);
   2167 		}
   2168 	}
   2169 
   2170 /* TODO: enable this functionality for other platforms */
   2171 #if defined(CONFIG_ARMADA_38X) || defined(CONFIG_ARMADA_39X)
   2172 	if (is_adll_calib_before_init != 0) {
   2173 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2174 				  ("with adll calib before init\n"));
   2175 		adll_calibration(dev_num, ACCESS_TYPE_MULTICAST, 0, freq);
   2176 	}
   2177 
   2178 	if (is_reg_dump != 0) {
   2179 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2180 				  ("Dump before init controller\n"));
   2181 		ddr3_tip_reg_dump(dev_num);
   2182 	}
   2183 
   2184 	if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
   2185 		training_stage = INIT_CONTROLLER;
   2186 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2187 				  ("INIT_CONTROLLER_MASK_BIT\n"));
   2188 		init_cntr_prm.do_mrs_phy = 1;
   2189 		init_cntr_prm.is_ctrl64_bit = 0;
   2190 		init_cntr_prm.init_phy = 1;
   2191 		init_cntr_prm.msys_init = 0;
   2192 		ret = hws_ddr3_tip_init_controller(dev_num, &init_cntr_prm);
   2193 		if (is_reg_dump != 0)
   2194 			ddr3_tip_reg_dump(dev_num);
   2195 		if (ret != MV_OK) {
   2196 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2197 					  ("hws_ddr3_tip_init_controller failure\n"));
   2198 			if (debug_mode == 0)
   2199 				return MV_FAIL;
   2200 		}
   2201 	}
   2202 #endif
   2203 
   2204 	ret = adll_calibration(dev_num, ACCESS_TYPE_MULTICAST, 0, freq);
   2205 	if (ret != MV_OK) {
   2206 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2207 			("adll_calibration failure\n"));
   2208 		if (debug_mode == 0)
   2209 			return MV_FAIL;
   2210 	}
   2211 
   2212 	if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
   2213 		training_stage = SET_LOW_FREQ;
   2214 
   2215 		for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
   2216 			ddr3_tip_adll_regs_bypass(dev_num, 0, 0x1f);
   2217 			adll_bypass_flag = 1;
   2218 		}
   2219 		effective_cs = 0;
   2220 
   2221 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2222 				  ("SET_LOW_FREQ_MASK_BIT %d\n",
   2223 				   freq_val[low_freq]));
   2224 		ret = ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST,
   2225 					PARAM_NOT_CARE, low_freq);
   2226 		if (is_reg_dump != 0)
   2227 			ddr3_tip_reg_dump(dev_num);
   2228 		if (ret != MV_OK) {
   2229 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2230 					  ("ddr3_tip_freq_set failure\n"));
   2231 			if (debug_mode == 0)
   2232 				return MV_FAIL;
   2233 		}
   2234 	}
   2235 
   2236 	if (mask_tune_func & WRITE_LEVELING_LF_MASK_BIT) {
   2237 		training_stage = WRITE_LEVELING_LF;
   2238 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2239 			("WRITE_LEVELING_LF_MASK_BIT\n"));
   2240 		ret = ddr3_tip_dynamic_write_leveling(dev_num, 1);
   2241 		if (is_reg_dump != 0)
   2242 			ddr3_tip_reg_dump(dev_num);
   2243 		if (ret != MV_OK) {
   2244 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2245 				("ddr3_tip_dynamic_write_leveling LF failure\n"));
   2246 			if (debug_mode == 0)
   2247 				return MV_FAIL;
   2248 		}
   2249 	}
   2250 
   2251 	for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
   2252 		if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
   2253 			training_stage = LOAD_PATTERN;
   2254 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2255 					  ("LOAD_PATTERN_MASK_BIT #%d\n",
   2256 					   effective_cs));
   2257 			ret = ddr3_tip_load_all_pattern_to_mem(dev_num);
   2258 			if (is_reg_dump != 0)
   2259 				ddr3_tip_reg_dump(dev_num);
   2260 			if (ret != MV_OK) {
   2261 				DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2262 						  ("ddr3_tip_load_all_pattern_to_mem failure CS #%d\n",
   2263 						   effective_cs));
   2264 				if (debug_mode == 0)
   2265 					return MV_FAIL;
   2266 			}
   2267 		}
   2268 	}
   2269 
   2270 	if (adll_bypass_flag == 1) {
   2271 		for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
   2272 			ddr3_tip_adll_regs_bypass(dev_num, phy_reg1_val, 0);
   2273 			adll_bypass_flag = 0;
   2274 		}
   2275 	}
   2276 
   2277 	/* Set to 0 after each loop to avoid illegal value may be used */
   2278 	effective_cs = 0;
   2279 
   2280 	if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
   2281 		training_stage = SET_MEDIUM_FREQ;
   2282 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2283 				  ("SET_MEDIUM_FREQ_MASK_BIT %d\n",
   2284 				   freq_val[medium_freq]));
   2285 		ret =
   2286 			ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST,
   2287 					  PARAM_NOT_CARE, medium_freq);
   2288 		if (is_reg_dump != 0)
   2289 			ddr3_tip_reg_dump(dev_num);
   2290 		if (ret != MV_OK) {
   2291 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2292 					  ("ddr3_tip_freq_set failure\n"));
   2293 			if (debug_mode == 0)
   2294 				return MV_FAIL;
   2295 		}
   2296 	}
   2297 
   2298 	if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
   2299 		training_stage = WRITE_LEVELING;
   2300 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2301 				  ("WRITE_LEVELING_MASK_BIT\n"));
   2302 		if ((rl_mid_freq_wa == 0) || (freq_val[medium_freq] == 533)) {
   2303 			ret = ddr3_tip_dynamic_write_leveling(dev_num, 0);
   2304 		} else {
   2305 			/* Use old WL */
   2306 			ret = ddr3_tip_legacy_dynamic_write_leveling(dev_num);
   2307 		}
   2308 
   2309 		if (is_reg_dump != 0)
   2310 			ddr3_tip_reg_dump(dev_num);
   2311 		if (ret != MV_OK) {
   2312 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2313 					  ("ddr3_tip_dynamic_write_leveling failure\n"));
   2314 			if (debug_mode == 0)
   2315 				return MV_FAIL;
   2316 		}
   2317 	}
   2318 
   2319 	for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
   2320 		if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
   2321 			training_stage = LOAD_PATTERN_2;
   2322 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2323 					  ("LOAD_PATTERN_2_MASK_BIT CS #%d\n",
   2324 					   effective_cs));
   2325 			ret = ddr3_tip_load_all_pattern_to_mem(dev_num);
   2326 			if (is_reg_dump != 0)
   2327 				ddr3_tip_reg_dump(dev_num);
   2328 			if (ret != MV_OK) {
   2329 				DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2330 						  ("ddr3_tip_load_all_pattern_to_mem failure CS #%d\n",
   2331 						   effective_cs));
   2332 				if (debug_mode == 0)
   2333 					return MV_FAIL;
   2334 			}
   2335 		}
   2336 	}
   2337 	/* Set to 0 after each loop to avoid illegal value may be used */
   2338 	effective_cs = 0;
   2339 
   2340 	if (mask_tune_func & READ_LEVELING_MASK_BIT) {
   2341 		training_stage = READ_LEVELING;
   2342 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2343 				  ("READ_LEVELING_MASK_BIT\n"));
   2344 		if ((rl_mid_freq_wa == 0) || (freq_val[medium_freq] == 533)) {
   2345 			ret = ddr3_tip_dynamic_read_leveling(dev_num, medium_freq);
   2346 		} else {
   2347 			/* Use old RL */
   2348 			ret = ddr3_tip_legacy_dynamic_read_leveling(dev_num);
   2349 		}
   2350 
   2351 		if (is_reg_dump != 0)
   2352 			ddr3_tip_reg_dump(dev_num);
   2353 		if (ret != MV_OK) {
   2354 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2355 					  ("ddr3_tip_dynamic_read_leveling failure\n"));
   2356 			if (debug_mode == 0)
   2357 				return MV_FAIL;
   2358 		}
   2359 	}
   2360 
   2361 	if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
   2362 		training_stage = WRITE_LEVELING_SUPP;
   2363 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2364 				  ("WRITE_LEVELING_SUPP_MASK_BIT\n"));
   2365 		ret = ddr3_tip_dynamic_write_leveling_supp(dev_num);
   2366 		if (is_reg_dump != 0)
   2367 			ddr3_tip_reg_dump(dev_num);
   2368 		if (ret != MV_OK) {
   2369 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2370 					  ("ddr3_tip_dynamic_write_leveling_supp failure\n"));
   2371 			if (debug_mode == 0)
   2372 				return MV_FAIL;
   2373 		}
   2374 	}
   2375 
   2376 	for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
   2377 		if (mask_tune_func & PBS_RX_MASK_BIT) {
   2378 			training_stage = PBS_RX;
   2379 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2380 					  ("PBS_RX_MASK_BIT CS #%d\n",
   2381 					   effective_cs));
   2382 			ret = ddr3_tip_pbs_rx(dev_num);
   2383 			if (is_reg_dump != 0)
   2384 				ddr3_tip_reg_dump(dev_num);
   2385 			if (ret != MV_OK) {
   2386 				DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2387 						  ("ddr3_tip_pbs_rx failure CS #%d\n",
   2388 						   effective_cs));
   2389 				if (debug_mode == 0)
   2390 					return MV_FAIL;
   2391 			}
   2392 		}
   2393 	}
   2394 
   2395 	for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
   2396 		if (mask_tune_func & PBS_TX_MASK_BIT) {
   2397 			training_stage = PBS_TX;
   2398 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2399 					  ("PBS_TX_MASK_BIT CS #%d\n",
   2400 					   effective_cs));
   2401 			ret = ddr3_tip_pbs_tx(dev_num);
   2402 			if (is_reg_dump != 0)
   2403 				ddr3_tip_reg_dump(dev_num);
   2404 			if (ret != MV_OK) {
   2405 				DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2406 						  ("ddr3_tip_pbs_tx failure CS #%d\n",
   2407 						   effective_cs));
   2408 				if (debug_mode == 0)
   2409 					return MV_FAIL;
   2410 			}
   2411 		}
   2412 	}
   2413 	/* Set to 0 after each loop to avoid illegal value may be used */
   2414 	effective_cs = 0;
   2415 
   2416 	if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
   2417 		training_stage = SET_TARGET_FREQ;
   2418 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2419 				  ("SET_TARGET_FREQ_MASK_BIT %d\n",
   2420 				   freq_val[tm->
   2421 					    interface_params[first_active_if].
   2422 					    memory_freq]));
   2423 		ret = ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST,
   2424 					PARAM_NOT_CARE,
   2425 					tm->interface_params[first_active_if].
   2426 					memory_freq);
   2427 #if defined(A70X0) || defined(A80X0)
   2428 	if (apn806_rev_id_get() == APN806_REV_ID_A0) {
   2429 		reg_write(0x6f812c, extension_avs);
   2430 		reg_write(0x6f8130, nominal_avs);
   2431 	}
   2432 #endif /* #if defined(A70X0) || defined(A80X0) */
   2433 		if (is_reg_dump != 0)
   2434 			ddr3_tip_reg_dump(dev_num);
   2435 		if (ret != MV_OK) {
   2436 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2437 					  ("ddr3_tip_freq_set failure\n"));
   2438 			if (debug_mode == 0)
   2439 				return MV_FAIL;
   2440 		}
   2441 	}
   2442 
   2443 	if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
   2444 		training_stage = WRITE_LEVELING_TF;
   2445 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2446 				  ("WRITE_LEVELING_TF_MASK_BIT\n"));
   2447 		ret = ddr3_tip_dynamic_write_leveling(dev_num, 0);
   2448 		if (is_reg_dump != 0)
   2449 			ddr3_tip_reg_dump(dev_num);
   2450 		if (ret != MV_OK) {
   2451 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2452 					  ("ddr3_tip_dynamic_write_leveling TF failure\n"));
   2453 			if (debug_mode == 0)
   2454 				return MV_FAIL;
   2455 		}
   2456 	}
   2457 
   2458 	if (mask_tune_func & LOAD_PATTERN_HIGH_MASK_BIT) {
   2459 		training_stage = LOAD_PATTERN_HIGH;
   2460 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("LOAD_PATTERN_HIGH\n"));
   2461 		ret = ddr3_tip_load_all_pattern_to_mem(dev_num);
   2462 		if (is_reg_dump != 0)
   2463 			ddr3_tip_reg_dump(dev_num);
   2464 		if (ret != MV_OK) {
   2465 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2466 					  ("ddr3_tip_load_all_pattern_to_mem failure\n"));
   2467 			if (debug_mode == 0)
   2468 				return MV_FAIL;
   2469 		}
   2470 	}
   2471 
   2472 	if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
   2473 		training_stage = READ_LEVELING_TF;
   2474 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2475 				  ("READ_LEVELING_TF_MASK_BIT\n"));
   2476 		ret = ddr3_tip_dynamic_read_leveling(dev_num, tm->
   2477 						     interface_params[first_active_if].
   2478 						     memory_freq);
   2479 		if (is_reg_dump != 0)
   2480 			ddr3_tip_reg_dump(dev_num);
   2481 		if (ret != MV_OK) {
   2482 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2483 					  ("ddr3_tip_dynamic_read_leveling TF failure\n"));
   2484 			if (debug_mode == 0)
   2485 				return MV_FAIL;
   2486 		}
   2487 	}
   2488 
   2489 	if (mask_tune_func & RL_DQS_BURST_MASK_BIT) {
   2490 		training_stage = READ_LEVELING_TF;
   2491 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2492 				  ("RL_DQS_BURST_MASK_BIT\n"));
   2493 		ret = mv_ddr_rl_dqs_burst(0, 0, tm->interface_params[0].memory_freq);
   2494 		if (is_reg_dump != 0)
   2495 			ddr3_tip_reg_dump(dev_num);
   2496 		if (ret != MV_OK) {
   2497 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2498 					  ("mv_ddr_rl_dqs_burst TF failure\n"));
   2499 			if (debug_mode == 0)
   2500 				return MV_FAIL;
   2501 		}
   2502 	}
   2503 
   2504 	if (mask_tune_func & DM_PBS_TX_MASK_BIT) {
   2505 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("DM_PBS_TX_MASK_BIT\n"));
   2506 	}
   2507 
   2508 	for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
   2509 		if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
   2510 			training_stage = VREF_CALIBRATION;
   2511 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("VREF\n"));
   2512 			ret = ddr3_tip_vref(dev_num);
   2513 			if (is_reg_dump != 0) {
   2514 				DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2515 						  ("VREF Dump\n"));
   2516 				ddr3_tip_reg_dump(dev_num);
   2517 			}
   2518 			if (ret != MV_OK) {
   2519 				DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2520 						  ("ddr3_tip_vref failure\n"));
   2521 				if (debug_mode == 0)
   2522 					return MV_FAIL;
   2523 			}
   2524 		}
   2525 	}
   2526 	/* Set to 0 after each loop to avoid illegal value may be used */
   2527 	effective_cs = 0;
   2528 
   2529 	for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
   2530 		if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
   2531 			training_stage = CENTRALIZATION_RX;
   2532 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2533 					  ("CENTRALIZATION_RX_MASK_BIT CS #%d\n",
   2534 					   effective_cs));
   2535 			ret = ddr3_tip_centralization_rx(dev_num);
   2536 			if (is_reg_dump != 0)
   2537 				ddr3_tip_reg_dump(dev_num);
   2538 			if (ret != MV_OK) {
   2539 				DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2540 						  ("ddr3_tip_centralization_rx failure CS #%d\n",
   2541 						   effective_cs));
   2542 				if (debug_mode == 0)
   2543 					return MV_FAIL;
   2544 			}
   2545 		}
   2546 	}
   2547 	/* Set to 0 after each loop to avoid illegal value may be used */
   2548 	effective_cs = 0;
   2549 
   2550 	for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
   2551 		if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
   2552 			training_stage = WRITE_LEVELING_SUPP_TF;
   2553 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2554 					  ("WRITE_LEVELING_SUPP_TF_MASK_BIT CS #%d\n",
   2555 					   effective_cs));
   2556 			ret = ddr3_tip_dynamic_write_leveling_supp(dev_num);
   2557 			if (is_reg_dump != 0)
   2558 				ddr3_tip_reg_dump(dev_num);
   2559 			if (ret != MV_OK) {
   2560 				DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2561 						  ("ddr3_tip_dynamic_write_leveling_supp TF failure CS #%d\n",
   2562 						   effective_cs));
   2563 				if (debug_mode == 0)
   2564 					return MV_FAIL;
   2565 			}
   2566 		}
   2567 	}
   2568 	/* Set to 0 after each loop to avoid illegal value may be used */
   2569 	effective_cs = 0;
   2570 
   2571 
   2572 	for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
   2573 		if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
   2574 			training_stage = CENTRALIZATION_TX;
   2575 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2576 					  ("CENTRALIZATION_TX_MASK_BIT CS #%d\n",
   2577 					   effective_cs));
   2578 			ret = ddr3_tip_centralization_tx(dev_num);
   2579 			if (is_reg_dump != 0)
   2580 				ddr3_tip_reg_dump(dev_num);
   2581 			if (ret != MV_OK) {
   2582 				DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2583 						  ("ddr3_tip_centralization_tx failure CS #%d\n",
   2584 						   effective_cs));
   2585 				if (debug_mode == 0)
   2586 					return MV_FAIL;
   2587 			}
   2588 		}
   2589 	}
   2590 	/* Set to 0 after each loop to avoid illegal value may be used */
   2591 	effective_cs = 0;
   2592 
   2593 	DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("restore registers to default\n"));
   2594 	/* restore register values */
   2595 	CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
   2596 
   2597 	if (is_reg_dump != 0)
   2598 		ddr3_tip_reg_dump(dev_num);
   2599 
   2600 	return MV_OK;
   2601 }
   2602 
   2603 /*
   2604  * DDR3 Dynamic training flow
   2605  */
   2606 static int ddr3_tip_ddr3_auto_tune(u32 dev_num)
   2607 {
   2608 	int status;
   2609 	u32 if_id, stage;
   2610 	int is_if_fail = 0, is_auto_tune_fail = 0;
   2611 
   2612 	training_stage = INIT_CONTROLLER;
   2613 
   2614 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
   2615 		for (stage = 0; stage < MAX_STAGE_LIMIT; stage++)
   2616 			training_result[stage][if_id] = NO_TEST_DONE;
   2617 	}
   2618 
   2619 	status = ddr3_tip_ddr3_training_main_flow(dev_num);
   2620 
   2621 	/* activate XSB test */
   2622 	if (xsb_validate_type != 0) {
   2623 		run_xsb_test(dev_num, xsb_validation_base_address, 1, 1,
   2624 			     0x1024);
   2625 	}
   2626 
   2627 	if (is_reg_dump != 0)
   2628 		ddr3_tip_reg_dump(dev_num);
   2629 
   2630 	/* print log */
   2631 	CHECK_STATUS(ddr3_tip_print_log(dev_num, window_mem_addr));
   2632 
   2633 #ifndef EXCLUDE_DEBUG_PRINTS
   2634 	if (status != MV_OK) {
   2635 		CHECK_STATUS(ddr3_tip_print_stability_log(dev_num));
   2636 	}
   2637 #endif /* EXCLUDE_DEBUG_PRINTS */
   2638 
   2639 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
   2640 		is_if_fail = 0;
   2641 		for (stage = 0; stage < MAX_STAGE_LIMIT; stage++) {
   2642 			if (training_result[stage][if_id] == TEST_FAILED)
   2643 				is_if_fail = 1;
   2644 		}
   2645 		if (is_if_fail == 1) {
   2646 			is_auto_tune_fail = 1;
   2647 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2648 					  ("Auto Tune failed for IF %d\n",
   2649 					   if_id));
   2650 		}
   2651 	}
   2652 
   2653 	if (((status == MV_FAIL) && (is_auto_tune_fail == 0)) ||
   2654 	    ((status == MV_OK) && (is_auto_tune_fail == 1))) {
   2655 		/*
   2656 		 * If MainFlow result and trainingResult DB not in sync,
   2657 		 * issue warning (caused by no update of trainingResult DB
   2658 		 * when failed)
   2659 		 */
   2660 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
   2661 				  ("Warning: Algorithm return value and Result DB"
   2662 				   "are not synced (status 0x%x  result DB %d)\n",
   2663 				   status, is_auto_tune_fail));
   2664 	}
   2665 
   2666 	if ((status != MV_OK) || (is_auto_tune_fail == 1))
   2667 		return MV_FAIL;
   2668 	else
   2669 		return MV_OK;
   2670 }
   2671 
   2672 /*
   2673  * Enable init sequence
   2674  */
   2675 int ddr3_tip_enable_init_sequence(u32 dev_num)
   2676 {
   2677 	int is_fail = 0;
   2678 	u32 if_id = 0, mem_mask = 0, bus_index = 0;
   2679 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
   2680 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
   2681 
   2682 	/* Enable init sequence */
   2683 	CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 0,
   2684 				       SDRAM_INIT_CTRL_REG, 0x1, 0x1));
   2685 
   2686 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
   2687 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
   2688 
   2689 		if (ddr3_tip_if_polling
   2690 		    (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1,
   2691 		     SDRAM_INIT_CTRL_REG,
   2692 		     MAX_POLLING_ITERATIONS) != MV_OK) {
   2693 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2694 					  ("polling failed IF %d\n",
   2695 					   if_id));
   2696 			is_fail = 1;
   2697 			continue;
   2698 		}
   2699 
   2700 		mem_mask = 0;
   2701 		for (bus_index = 0; bus_index < octets_per_if_num;
   2702 		     bus_index++) {
   2703 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_index);
   2704 			mem_mask |=
   2705 				tm->interface_params[if_id].
   2706 				as_bus_params[bus_index].mirror_enable_bitmask;
   2707 		}
   2708 
   2709 		if (mem_mask != 0) {
   2710 			/* Disable Multi CS */
   2711 			CHECK_STATUS(ddr3_tip_if_write
   2712 				     (dev_num, ACCESS_TYPE_MULTICAST,
   2713 				      if_id, DUAL_DUNIT_CFG_REG, 1 << 3,
   2714 				      1 << 3));
   2715 		}
   2716 	}
   2717 
   2718 	return (is_fail == 0) ? MV_OK : MV_FAIL;
   2719 }
   2720 
   2721 int ddr3_tip_register_dq_table(u32 dev_num, u32 *table)
   2722 {
   2723 	dq_map_table = table;
   2724 
   2725 	return MV_OK;
   2726 }
   2727 
   2728 /*
   2729  * Check if pup search is locked
   2730  */
   2731 int ddr3_tip_is_pup_lock(u32 *pup_buf, enum hws_training_result read_mode)
   2732 {
   2733 	u32 bit_start = 0, bit_end = 0, bit_id;
   2734 
   2735 	if (read_mode == RESULT_PER_BIT) {
   2736 		bit_start = 0;
   2737 		bit_end = BUS_WIDTH_IN_BITS - 1;
   2738 	} else {
   2739 		bit_start = 0;
   2740 		bit_end = 0;
   2741 	}
   2742 
   2743 	for (bit_id = bit_start; bit_id <= bit_end; bit_id++) {
   2744 		if (GET_LOCK_RESULT(pup_buf[bit_id]) == 0)
   2745 			return 0;
   2746 	}
   2747 
   2748 	return 1;
   2749 }
   2750 
   2751 /*
   2752  * Get minimum buffer value
   2753  */
   2754 u8 ddr3_tip_get_buf_min(u8 *buf_ptr)
   2755 {
   2756 	u8 min_val = 0xff;
   2757 	u8 cnt = 0;
   2758 
   2759 	for (cnt = 0; cnt < BUS_WIDTH_IN_BITS; cnt++) {
   2760 		if (buf_ptr[cnt] < min_val)
   2761 			min_val = buf_ptr[cnt];
   2762 	}
   2763 
   2764 	return min_val;
   2765 }
   2766 
   2767 /*
   2768  * Get maximum buffer value
   2769  */
   2770 u8 ddr3_tip_get_buf_max(u8 *buf_ptr)
   2771 {
   2772 	u8 max_val = 0;
   2773 	u8 cnt = 0;
   2774 
   2775 	for (cnt = 0; cnt < BUS_WIDTH_IN_BITS; cnt++) {
   2776 		if (buf_ptr[cnt] > max_val)
   2777 			max_val = buf_ptr[cnt];
   2778 	}
   2779 
   2780 	return max_val;
   2781 }
   2782 
   2783 /*
   2784  * The following functions return memory parameters:
   2785  * bus and device width, device size
   2786  */
   2787 
   2788 u32 hws_ddr3_get_bus_width(void)
   2789 {
   2790 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
   2791 
   2792 	return (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) ==
   2793 		1) ? 16 : 32;
   2794 }
   2795 
   2796 u32 hws_ddr3_get_device_width(u32 if_id)
   2797 {
   2798 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
   2799 
   2800 	return (tm->interface_params[if_id].bus_width ==
   2801 		MV_DDR_DEV_WIDTH_8BIT) ? 8 : 16;
   2802 }
   2803 
   2804 u32 hws_ddr3_get_device_size(u32 if_id)
   2805 {
   2806 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
   2807 
   2808 	if (tm->interface_params[if_id].memory_size >=
   2809 	    MV_DDR_DIE_CAP_LAST) {
   2810 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2811 				  ("Error: Wrong device size of Cs: %d",
   2812 				   tm->interface_params[if_id].memory_size));
   2813 		return 0;
   2814 	} else {
   2815 		return 1 << tm->interface_params[if_id].memory_size;
   2816 	}
   2817 }
   2818 
   2819 int hws_ddr3_calc_mem_cs_size(u32 if_id, u32 cs, u32 *cs_size)
   2820 {
   2821 	u32 cs_mem_size, dev_size;
   2822 
   2823 	dev_size = hws_ddr3_get_device_size(if_id);
   2824 	if (dev_size != 0) {
   2825 		cs_mem_size = ((hws_ddr3_get_bus_width() /
   2826 				hws_ddr3_get_device_width(if_id)) * dev_size);
   2827 
   2828 		/* the calculated result in Gbytex16 to avoid float using */
   2829 
   2830 		if (cs_mem_size == 2) {
   2831 			*cs_size = _128M;
   2832 		} else if (cs_mem_size == 4) {
   2833 			*cs_size = _256M;
   2834 		} else if (cs_mem_size == 8) {
   2835 			*cs_size = _512M;
   2836 		} else if (cs_mem_size == 16) {
   2837 			*cs_size = _1G;
   2838 		} else if (cs_mem_size == 32) {
   2839 			*cs_size = _2G;
   2840 		} else {
   2841 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2842 					  ("Error: Wrong Memory size of Cs: %d", cs));
   2843 			return MV_FAIL;
   2844 		}
   2845 		return MV_OK;
   2846 	} else {
   2847 		return MV_FAIL;
   2848 	}
   2849 }
   2850 
   2851 int hws_ddr3_cs_base_adr_calc(u32 if_id, u32 cs, u32 *cs_base_addr)
   2852 {
   2853 	u32 cs_mem_size = 0;
   2854 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
   2855 	u32 physical_mem_size;
   2856 	u32 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE;
   2857 #endif
   2858 
   2859 	if (hws_ddr3_calc_mem_cs_size(if_id, cs, &cs_mem_size) != MV_OK)
   2860 		return MV_FAIL;
   2861 
   2862 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
   2863 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
   2864 	/*
   2865 	 * if number of address pins doesn't allow to use max mem size that
   2866 	 * is defined in topology mem size is defined by
   2867 	 * DEVICE_MAX_DRAM_ADDRESS_SIZE
   2868 	 */
   2869 	physical_mem_size = mem_size[tm->interface_params[0].memory_size];
   2870 
   2871 	if (hws_ddr3_get_device_width(cs) == 16) {
   2872 		/*
   2873 		 * 16bit mem device can be twice more - no need in less
   2874 		 * significant pin
   2875 		 */
   2876 		max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE * 2;
   2877 	}
   2878 
   2879 	if (physical_mem_size > max_mem_size) {
   2880 		cs_mem_size = max_mem_size *
   2881 			(hws_ddr3_get_bus_width() /
   2882 			 hws_ddr3_get_device_width(if_id));
   2883 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
   2884 				  ("Updated Physical Mem size is from 0x%x to %x\n",
   2885 				   physical_mem_size,
   2886 				   DEVICE_MAX_DRAM_ADDRESS_SIZE));
   2887 	}
   2888 #endif
   2889 
   2890 	/* calculate CS base addr */
   2891 	*cs_base_addr = ((cs_mem_size) * cs) & 0xffff0000;
   2892 
   2893 	return MV_OK;
   2894 }
   2895