Home | History | Annotate | Download | only in axp
      1 // SPDX-License-Identifier: GPL-2.0
      2 /*
      3  * Copyright (C) Marvell International Ltd. and its affiliates
      4  */
      5 
      6 #include <common.h>
      7 #include <i2c.h>
      8 #include <spl.h>
      9 #include <asm/io.h>
     10 #include <asm/arch/cpu.h>
     11 #include <asm/arch/soc.h>
     12 
     13 #include "ddr3_init.h"
     14 #include "ddr3_hw_training.h"
     15 #include "xor.h"
     16 
     17 #ifdef MV88F78X60
     18 #include "ddr3_patterns_64bit.h"
     19 #else
     20 #include "ddr3_patterns_16bit.h"
     21 #if defined(MV88F672X)
     22 #include "ddr3_patterns_16bit.h"
     23 #endif
     24 #endif
     25 
     26 /*
     27  * Debug
     28  */
     29 
     30 #define DEBUG_MAIN_C(s, d, l) \
     31 	DEBUG_MAIN_S(s); DEBUG_MAIN_D(d, l); DEBUG_MAIN_S("\n")
     32 #define DEBUG_MAIN_FULL_C(s, d, l) \
     33 	DEBUG_MAIN_FULL_S(s); DEBUG_MAIN_FULL_D(d, l); DEBUG_MAIN_FULL_S("\n")
     34 
     35 #ifdef MV_DEBUG_MAIN
     36 #define DEBUG_MAIN_S(s)			puts(s)
     37 #define DEBUG_MAIN_D(d, l)		printf("%x", d)
     38 #else
     39 #define DEBUG_MAIN_S(s)
     40 #define DEBUG_MAIN_D(d, l)
     41 #endif
     42 
     43 #ifdef MV_DEBUG_MAIN_FULL
     44 #define DEBUG_MAIN_FULL_S(s)		puts(s)
     45 #define DEBUG_MAIN_FULL_D(d, l)		printf("%x", d)
     46 #else
     47 #define DEBUG_MAIN_FULL_S(s)
     48 #define DEBUG_MAIN_FULL_D(d, l)
     49 #endif
     50 
     51 #ifdef MV_DEBUG_SUSPEND_RESUME
     52 #define DEBUG_SUSPEND_RESUME_S(s)	puts(s)
     53 #define DEBUG_SUSPEND_RESUME_D(d, l)	printf("%x", d)
     54 #else
     55 #define DEBUG_SUSPEND_RESUME_S(s)
     56 #define DEBUG_SUSPEND_RESUME_D(d, l)
     57 #endif
     58 
     59 static u32 ddr3_sw_wl_rl_debug;
     60 static u32 ddr3_run_pbs = 1;
     61 
     62 void ddr3_print_version(void)
     63 {
     64 	puts("DDR3 Training Sequence - Ver 5.7.");
     65 }
     66 
     67 void ddr3_set_sw_wl_rl_debug(u32 val)
     68 {
     69 	ddr3_sw_wl_rl_debug = val;
     70 }
     71 
     72 void ddr3_set_pbs(u32 val)
     73 {
     74 	ddr3_run_pbs = val;
     75 }
     76 
     77 int ddr3_hw_training(u32 target_freq, u32 ddr_width, int xor_bypass,
     78 		     u32 scrub_offs, u32 scrub_size, int dqs_clk_aligned,
     79 		     int debug_mode, int reg_dimm_skip_wl)
     80 {
     81 	/* A370 has no PBS mechanism */
     82 	__maybe_unused u32 first_loop_flag = 0;
     83 	u32 freq, reg;
     84 	MV_DRAM_INFO dram_info;
     85 	int ratio_2to1 = 0;
     86 	int tmp_ratio = 1;
     87 	int status;
     88 
     89 	if (debug_mode)
     90 		DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 1\n");
     91 
     92 	memset(&dram_info, 0, sizeof(dram_info));
     93 	dram_info.num_cs = ddr3_get_cs_num_from_reg();
     94 	dram_info.cs_ena = ddr3_get_cs_ena_from_reg();
     95 	dram_info.target_frequency = target_freq;
     96 	dram_info.ddr_width = ddr_width;
     97 	dram_info.num_of_std_pups = ddr_width / PUP_SIZE;
     98 	dram_info.rl400_bug = 0;
     99 	dram_info.multi_cs_mr_support = 0;
    100 #ifdef MV88F67XX
    101 	dram_info.rl400_bug = 1;
    102 #endif
    103 
    104 	/* Ignore ECC errors - if ECC is enabled */
    105 	reg = reg_read(REG_SDRAM_CONFIG_ADDR);
    106 	if (reg & (1 << REG_SDRAM_CONFIG_ECC_OFFS)) {
    107 		dram_info.ecc_ena = 1;
    108 		reg |= (1 << REG_SDRAM_CONFIG_IERR_OFFS);
    109 		reg_write(REG_SDRAM_CONFIG_ADDR, reg);
    110 	} else {
    111 		dram_info.ecc_ena = 0;
    112 	}
    113 
    114 	reg = reg_read(REG_SDRAM_CONFIG_ADDR);
    115 	if (reg & (1 << REG_SDRAM_CONFIG_REGDIMM_OFFS))
    116 		dram_info.reg_dimm = 1;
    117 	else
    118 		dram_info.reg_dimm = 0;
    119 
    120 	dram_info.num_of_total_pups = ddr_width / PUP_SIZE + dram_info.ecc_ena;
    121 
    122 	/* Get target 2T value */
    123 	reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
    124 	dram_info.mode_2t = (reg >> REG_DUNIT_CTRL_LOW_2T_OFFS) &
    125 		REG_DUNIT_CTRL_LOW_2T_MASK;
    126 
    127 	/* Get target CL value */
    128 #ifdef MV88F67XX
    129 	reg = reg_read(REG_DDR3_MR0_ADDR) >> 2;
    130 #else
    131 	reg = reg_read(REG_DDR3_MR0_CS_ADDR) >> 2;
    132 #endif
    133 
    134 	reg = (((reg >> 1) & 0xE) | (reg & 0x1)) & 0xF;
    135 	dram_info.cl = ddr3_valid_cl_to_cl(reg);
    136 
    137 	/* Get target CWL value */
    138 #ifdef MV88F67XX
    139 	reg = reg_read(REG_DDR3_MR2_ADDR) >> REG_DDR3_MR2_CWL_OFFS;
    140 #else
    141 	reg = reg_read(REG_DDR3_MR2_CS_ADDR) >> REG_DDR3_MR2_CWL_OFFS;
    142 #endif
    143 
    144 	reg &= REG_DDR3_MR2_CWL_MASK;
    145 	dram_info.cwl = reg;
    146 #if !defined(MV88F67XX)
    147 	/* A370 has no PBS mechanism */
    148 #if defined(MV88F78X60)
    149 	if ((dram_info.target_frequency > DDR_400) && (ddr3_run_pbs))
    150 		first_loop_flag = 1;
    151 #else
    152 	/* first_loop_flag = 1; skip mid freq at ALP/A375 */
    153 	if ((dram_info.target_frequency > DDR_400) && (ddr3_run_pbs) &&
    154 	    (mv_ctrl_revision_get() >= UMC_A0))
    155 		first_loop_flag = 1;
    156 	else
    157 		first_loop_flag = 0;
    158 #endif
    159 #endif
    160 
    161 	freq = dram_info.target_frequency;
    162 
    163 	/* Set ODT to always on */
    164 	ddr3_odt_activate(1);
    165 
    166 	/* Init XOR */
    167 	mv_sys_xor_init(&dram_info);
    168 
    169 	/* Get DRAM/HCLK ratio */
    170 	if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
    171 		ratio_2to1 = 1;
    172 
    173 	/*
    174 	 * Xor Bypass - ECC support in AXP is currently available for 1:1
    175 	 * modes frequency modes.
    176 	 * Not all frequency modes support the ddr3 training sequence
    177 	 * (Only 1200/300).
    178 	 * Xor Bypass allows using the Xor initializations and scrubbing
    179 	 * inside the ddr3 training sequence without running the training
    180 	 * itself.
    181 	 */
    182 	if (xor_bypass == 0) {
    183 		if (ddr3_run_pbs) {
    184 			DEBUG_MAIN_S("DDR3 Training Sequence - Run with PBS.\n");
    185 		} else {
    186 			DEBUG_MAIN_S("DDR3 Training Sequence - Run without PBS.\n");
    187 		}
    188 
    189 		if (dram_info.target_frequency > DFS_MARGIN) {
    190 			tmp_ratio = 0;
    191 			freq = DDR_100;
    192 
    193 			if (dram_info.reg_dimm == 1)
    194 				freq = DDR_300;
    195 
    196 			if (MV_OK != ddr3_dfs_high_2_low(freq, &dram_info)) {
    197 				/* Set low - 100Mhz DDR Frequency by HW */
    198 				DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Dfs High2Low)\n");
    199 				return MV_DDR3_TRAINING_ERR_DFS_H2L;
    200 			}
    201 
    202 			if ((dram_info.reg_dimm == 1) &&
    203 			    (reg_dimm_skip_wl == 0)) {
    204 				if (MV_OK !=
    205 				    ddr3_write_leveling_hw_reg_dimm(freq,
    206 								    &dram_info))
    207 					DEBUG_MAIN_S("DDR3 Training Sequence - Registered DIMM Low WL - SKIP\n");
    208 			}
    209 
    210 			if (ddr3_get_log_level() >= MV_LOG_LEVEL_1)
    211 				ddr3_print_freq(freq);
    212 
    213 			if (debug_mode)
    214 				DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 2\n");
    215 		} else {
    216 			if (!dqs_clk_aligned) {
    217 #ifdef MV88F67XX
    218 				/*
    219 				 * If running training sequence without DFS,
    220 				 * we must run Write leveling before writing
    221 				 * the patterns
    222 				 */
    223 
    224 				/*
    225 				 * ODT - Multi CS system use SW WL,
    226 				 * Single CS System use HW WL
    227 				 */
    228 				if (dram_info.cs_ena > 1) {
    229 					if (MV_OK !=
    230 					    ddr3_write_leveling_sw(
    231 						    freq, tmp_ratio,
    232 						    &dram_info)) {
    233 						DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
    234 						return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
    235 					}
    236 				} else {
    237 					if (MV_OK !=
    238 					    ddr3_write_leveling_hw(freq,
    239 								   &dram_info)) {
    240 						DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
    241 						return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
    242 					}
    243 				}
    244 #else
    245 				if (MV_OK != ddr3_write_leveling_hw(
    246 					    freq, &dram_info)) {
    247 					DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
    248 					if (ddr3_sw_wl_rl_debug) {
    249 						if (MV_OK !=
    250 						    ddr3_write_leveling_sw(
    251 							    freq, tmp_ratio,
    252 							    &dram_info)) {
    253 							DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
    254 							return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
    255 						}
    256 					} else {
    257 						return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
    258 					}
    259 				}
    260 #endif
    261 			}
    262 
    263 			if (debug_mode)
    264 				DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 3\n");
    265 		}
    266 
    267 		if (MV_OK != ddr3_load_patterns(&dram_info, 0)) {
    268 			DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Loading Patterns)\n");
    269 			return MV_DDR3_TRAINING_ERR_LOAD_PATTERNS;
    270 		}
    271 
    272 		/*
    273 		 * TODO:
    274 		 * The mainline U-Boot port of the bin_hdr DDR training code
    275 		 * needs a delay of minimum 20ms here (10ms is a bit too short
    276 		 * and the CPU hangs). The bin_hdr code doesn't have this delay.
    277 		 * To be save here, lets add a delay of 50ms here.
    278 		 *
    279 		 * Tested on the Marvell DB-MV784MP-GP board
    280 		 */
    281 		mdelay(50);
    282 
    283 		do {
    284 			freq = dram_info.target_frequency;
    285 			tmp_ratio = ratio_2to1;
    286 			DEBUG_MAIN_FULL_S("DDR3 Training Sequence - DEBUG - 4\n");
    287 
    288 #if defined(MV88F78X60)
    289 			/*
    290 			 * There is a difference on the DFS frequency at the
    291 			 * first iteration of this loop
    292 			 */
    293 			if (first_loop_flag) {
    294 				freq = DDR_400;
    295 				tmp_ratio = 0;
    296 			}
    297 #endif
    298 
    299 			if (MV_OK != ddr3_dfs_low_2_high(freq, tmp_ratio,
    300 							 &dram_info)) {
    301 				DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Dfs Low2High)\n");
    302 				return MV_DDR3_TRAINING_ERR_DFS_H2L;
    303 			}
    304 
    305 			if (ddr3_get_log_level() >= MV_LOG_LEVEL_1) {
    306 				ddr3_print_freq(freq);
    307 			}
    308 
    309 			if (debug_mode)
    310 				DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 5\n");
    311 
    312 			/* Write leveling */
    313 			if (!dqs_clk_aligned) {
    314 #ifdef MV88F67XX
    315 				/*
    316 				 * ODT - Multi CS system that not support Multi
    317 				 * CS MRS commands must use SW WL
    318 				 */
    319 				if (dram_info.cs_ena > 1) {
    320 					if (MV_OK != ddr3_write_leveling_sw(
    321 						    freq, tmp_ratio, &dram_info)) {
    322 						DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
    323 						return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
    324 					}
    325 				} else {
    326 					if (MV_OK != ddr3_write_leveling_hw(
    327 						    freq, &dram_info)) {
    328 						DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
    329 						return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
    330 					}
    331 				}
    332 #else
    333 				if ((dram_info.reg_dimm == 1) &&
    334 				    (freq == DDR_400)) {
    335 					if (reg_dimm_skip_wl == 0) {
    336 						if (MV_OK != ddr3_write_leveling_hw_reg_dimm(
    337 							    freq, &dram_info))
    338 							DEBUG_MAIN_S("DDR3 Training Sequence - Registered DIMM WL - SKIP\n");
    339 					}
    340 				} else {
    341 					if (MV_OK != ddr3_write_leveling_hw(
    342 						    freq, &dram_info)) {
    343 						DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
    344 						if (ddr3_sw_wl_rl_debug) {
    345 							if (MV_OK != ddr3_write_leveling_sw(
    346 								    freq, tmp_ratio, &dram_info)) {
    347 								DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
    348 								return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
    349 							}
    350 						} else {
    351 							return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
    352 						}
    353 					}
    354 				}
    355 #endif
    356 				if (debug_mode)
    357 					DEBUG_MAIN_S
    358 					    ("DDR3 Training Sequence - DEBUG - 6\n");
    359 			}
    360 
    361 			/* Read Leveling */
    362 			/*
    363 			 * Armada 370 - Support for HCLK @ 400MHZ - must use
    364 			 * SW read leveling
    365 			 */
    366 			if (freq == DDR_400 && dram_info.rl400_bug) {
    367 				status = ddr3_read_leveling_sw(freq, tmp_ratio,
    368 						       &dram_info);
    369 				if (MV_OK != status) {
    370 					DEBUG_MAIN_S
    371 					    ("DDR3 Training Sequence - FAILED (Read Leveling Sw)\n");
    372 					return status;
    373 				}
    374 			} else {
    375 				if (MV_OK != ddr3_read_leveling_hw(
    376 					    freq, &dram_info)) {
    377 					DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Read Leveling Hw)\n");
    378 					if (ddr3_sw_wl_rl_debug) {
    379 						if (MV_OK != ddr3_read_leveling_sw(
    380 							    freq, tmp_ratio,
    381 							    &dram_info)) {
    382 							DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Read Leveling Sw)\n");
    383 							return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
    384 						}
    385 					} else {
    386 						return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
    387 					}
    388 				}
    389 			}
    390 
    391 			if (debug_mode)
    392 				DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 7\n");
    393 
    394 			if (MV_OK != ddr3_wl_supplement(&dram_info)) {
    395 				DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hi-Freq Sup)\n");
    396 				return MV_DDR3_TRAINING_ERR_WR_LVL_HI_FREQ;
    397 			}
    398 
    399 			if (debug_mode)
    400 				DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 8\n");
    401 #if !defined(MV88F67XX)
    402 			/* A370 has no PBS mechanism */
    403 #if defined(MV88F78X60) || defined(MV88F672X)
    404 			if (first_loop_flag == 1) {
    405 				first_loop_flag = 0;
    406 
    407 				status = MV_OK;
    408 				status = ddr3_pbs_rx(&dram_info);
    409 				if (MV_OK != status) {
    410 					DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (PBS RX)\n");
    411 					return status;
    412 				}
    413 
    414 				if (debug_mode)
    415 					DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 9\n");
    416 
    417 				status = ddr3_pbs_tx(&dram_info);
    418 				if (MV_OK != status) {
    419 					DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (PBS TX)\n");
    420 					return status;
    421 				}
    422 
    423 				if (debug_mode)
    424 					DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 10\n");
    425 			}
    426 #endif
    427 #endif
    428 		} while (freq != dram_info.target_frequency);
    429 
    430 		status = ddr3_dqs_centralization_rx(&dram_info);
    431 		if (MV_OK != status) {
    432 			DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (DQS Centralization RX)\n");
    433 			return status;
    434 		}
    435 
    436 		if (debug_mode)
    437 			DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 11\n");
    438 
    439 		status = ddr3_dqs_centralization_tx(&dram_info);
    440 		if (MV_OK != status) {
    441 			DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (DQS Centralization TX)\n");
    442 			return status;
    443 		}
    444 
    445 		if (debug_mode)
    446 			DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 12\n");
    447 	}
    448 
    449 	ddr3_set_performance_params(&dram_info);
    450 
    451 	if (dram_info.ecc_ena) {
    452 		/* Need to SCRUB the DRAM memory area to load U-Boot */
    453 		mv_sys_xor_finish();
    454 		dram_info.num_cs = 1;
    455 		dram_info.cs_ena = 1;
    456 		mv_sys_xor_init(&dram_info);
    457 		mv_xor_mem_init(0, scrub_offs, scrub_size, 0xdeadbeef,
    458 				0xdeadbeef);
    459 
    460 		/* Wait for previous transfer completion */
    461 		while (mv_xor_state_get(0) != MV_IDLE)
    462 			;
    463 
    464 		if (debug_mode)
    465 			DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 13\n");
    466 	}
    467 
    468 	/* Return XOR State */
    469 	mv_sys_xor_finish();
    470 
    471 #if defined(MV88F78X60)
    472 	/* Save training results in memeory for resume state */
    473 	ddr3_save_training(&dram_info);
    474 #endif
    475 	/* Clear ODT always on */
    476 	ddr3_odt_activate(0);
    477 
    478 	/* Configure Dynamic read ODT */
    479 	ddr3_odt_read_dynamic_config(&dram_info);
    480 
    481 	return MV_OK;
    482 }
    483 
    484 void ddr3_set_performance_params(MV_DRAM_INFO *dram_info)
    485 {
    486 	u32 twr2wr, trd2rd, trd2wr_wr2rd;
    487 	u32 tmp1, tmp2, reg;
    488 
    489 	DEBUG_MAIN_FULL_C("Max WL Phase: ", dram_info->wl_max_phase, 2);
    490 	DEBUG_MAIN_FULL_C("Min WL Phase: ", dram_info->wl_min_phase, 2);
    491 	DEBUG_MAIN_FULL_C("Max RL Phase: ", dram_info->rl_max_phase, 2);
    492 	DEBUG_MAIN_FULL_C("Min RL Phase: ", dram_info->rl_min_phase, 2);
    493 
    494 	if (dram_info->wl_max_phase < 2)
    495 		twr2wr = 0x2;
    496 	else
    497 		twr2wr = 0x3;
    498 
    499 	trd2rd = 0x1 + (dram_info->rl_max_phase + 1) / 2 +
    500 		(dram_info->rl_max_phase + 1) % 2;
    501 
    502 	tmp1 = (dram_info->rl_max_phase - dram_info->wl_min_phase) / 2 +
    503 		(((dram_info->rl_max_phase - dram_info->wl_min_phase) % 2) >
    504 		 0 ? 1 : 0);
    505 	tmp2 = (dram_info->wl_max_phase - dram_info->rl_min_phase) / 2 +
    506 		((dram_info->wl_max_phase - dram_info->rl_min_phase) % 2 >
    507 		 0 ? 1 : 0);
    508 	trd2wr_wr2rd = (tmp1 >= tmp2) ? tmp1 : tmp2;
    509 
    510 	trd2wr_wr2rd += 2;
    511 	trd2rd += 2;
    512 	twr2wr += 2;
    513 
    514 	DEBUG_MAIN_FULL_C("WR 2 WR: ", twr2wr, 2);
    515 	DEBUG_MAIN_FULL_C("RD 2 RD: ", trd2rd, 2);
    516 	DEBUG_MAIN_FULL_C("RD 2 WR / WR 2 RD: ", trd2wr_wr2rd, 2);
    517 
    518 	reg = reg_read(REG_SDRAM_TIMING_HIGH_ADDR);
    519 
    520 	reg &= ~(REG_SDRAM_TIMING_H_W2W_MASK << REG_SDRAM_TIMING_H_W2W_OFFS);
    521 	reg |= ((twr2wr & REG_SDRAM_TIMING_H_W2W_MASK) <<
    522 		REG_SDRAM_TIMING_H_W2W_OFFS);
    523 
    524 	reg &= ~(REG_SDRAM_TIMING_H_R2R_MASK << REG_SDRAM_TIMING_H_R2R_OFFS);
    525 	reg &= ~(REG_SDRAM_TIMING_H_R2R_H_MASK <<
    526 		 REG_SDRAM_TIMING_H_R2R_H_OFFS);
    527 	reg |= ((trd2rd & REG_SDRAM_TIMING_H_R2R_MASK) <<
    528 		REG_SDRAM_TIMING_H_R2R_OFFS);
    529 	reg |= (((trd2rd >> 2) & REG_SDRAM_TIMING_H_R2R_H_MASK) <<
    530 		REG_SDRAM_TIMING_H_R2R_H_OFFS);
    531 
    532 	reg &= ~(REG_SDRAM_TIMING_H_R2W_W2R_MASK <<
    533 		 REG_SDRAM_TIMING_H_R2W_W2R_OFFS);
    534 	reg &= ~(REG_SDRAM_TIMING_H_R2W_W2R_H_MASK <<
    535 		 REG_SDRAM_TIMING_H_R2W_W2R_H_OFFS);
    536 	reg |= ((trd2wr_wr2rd & REG_SDRAM_TIMING_H_R2W_W2R_MASK) <<
    537 		REG_SDRAM_TIMING_H_R2W_W2R_OFFS);
    538 	reg |= (((trd2wr_wr2rd >> 2) & REG_SDRAM_TIMING_H_R2W_W2R_H_MASK) <<
    539 		REG_SDRAM_TIMING_H_R2W_W2R_H_OFFS);
    540 
    541 	reg_write(REG_SDRAM_TIMING_HIGH_ADDR, reg);
    542 }
    543 
    544 /*
    545  * Perform DDR3 PUP Indirect Write
    546  */
    547 void ddr3_write_pup_reg(u32 mode, u32 cs, u32 pup, u32 phase, u32 delay)
    548 {
    549 	u32 reg = 0;
    550 
    551 	if (pup == PUP_BC)
    552 		reg |= (1 << REG_PHY_BC_OFFS);
    553 	else
    554 		reg |= (pup << REG_PHY_PUP_OFFS);
    555 
    556 	reg |= ((0x4 * cs + mode) << REG_PHY_CS_OFFS);
    557 	reg |= (phase << REG_PHY_PHASE_OFFS) | delay;
    558 
    559 	if (mode == PUP_WL_MODE)
    560 		reg |= ((INIT_WL_DELAY + delay) << REG_PHY_DQS_REF_DLY_OFFS);
    561 
    562 	reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg);	/* 0x16A0 */
    563 	reg |= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR;
    564 	reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg);	/* 0x16A0 */
    565 
    566 	do {
    567 		reg = reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR) &
    568 			REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE;
    569 	} while (reg);	/* Wait for '0' to mark the end of the transaction */
    570 
    571 	/* If read Leveling mode - need to write to register 3 separetly */
    572 	if (mode == PUP_RL_MODE) {
    573 		reg = 0;
    574 
    575 		if (pup == PUP_BC)
    576 			reg |= (1 << REG_PHY_BC_OFFS);
    577 		else
    578 			reg |= (pup << REG_PHY_PUP_OFFS);
    579 
    580 		reg |= ((0x4 * cs + mode + 1) << REG_PHY_CS_OFFS);
    581 		reg |= (INIT_RL_DELAY);
    582 
    583 		reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
    584 		reg |= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR;
    585 		reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
    586 
    587 		do {
    588 			reg = reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR) &
    589 				REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE;
    590 		} while (reg);
    591 	}
    592 }
    593 
    594 /*
    595  * Perform DDR3 PUP Indirect Read
    596  */
    597 u32 ddr3_read_pup_reg(u32 mode, u32 cs, u32 pup)
    598 {
    599 	u32 reg;
    600 
    601 	reg = (pup << REG_PHY_PUP_OFFS) |
    602 		((0x4 * cs + mode) << REG_PHY_CS_OFFS);
    603 	reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg);	/* 0x16A0 */
    604 
    605 	reg |= REG_PHY_REGISTRY_FILE_ACCESS_OP_RD;
    606 	reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg);	/* 0x16A0 */
    607 
    608 	do {
    609 		reg = reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR) &
    610 			REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE;
    611 	} while (reg);	/* Wait for '0' to mark the end of the transaction */
    612 
    613 	return reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR);	/* 0x16A0 */
    614 }
    615 
    616 /*
    617  * Set training patterns
    618  */
    619 int ddr3_load_patterns(MV_DRAM_INFO *dram_info, int resume)
    620 {
    621 	u32 reg;
    622 
    623 	/* Enable SW override - Required for the ECC Pup */
    624 	reg = reg_read(REG_DRAM_TRAINING_2_ADDR) |
    625 		(1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS);
    626 
    627 	/* [0] = 1 - Enable SW override  */
    628 	/* 0x15B8 - Training SW 2 Register */
    629 	reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
    630 
    631 	reg = (1 << REG_DRAM_TRAINING_AUTO_OFFS);
    632 	reg_write(REG_DRAM_TRAINING_ADDR, reg);	/* 0x15B0 - Training Register */
    633 
    634 	if (resume == 0) {
    635 #if defined(MV88F78X60) || defined(MV88F672X)
    636 		ddr3_load_pbs_patterns(dram_info);
    637 #endif
    638 		ddr3_load_dqs_patterns(dram_info);
    639 	}
    640 
    641 	/* Disable SW override - Must be in a different stage */
    642 	/* [0]=0 - Enable SW override  */
    643 	reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
    644 	reg &= ~(1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS);
    645 	/* 0x15B8 - Training SW 2 Register */
    646 	reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
    647 
    648 	reg = reg_read(REG_DRAM_TRAINING_1_ADDR) |
    649 		(1 << REG_DRAM_TRAINING_1_TRNBPOINT_OFFS);
    650 	reg_write(REG_DRAM_TRAINING_1_ADDR, reg);
    651 
    652 	/* Set Base Addr */
    653 #if defined(MV88F67XX)
    654 	reg_write(REG_DRAM_TRAINING_PATTERN_BASE_ADDR, 0);
    655 #else
    656 	if (resume == 0)
    657 		reg_write(REG_DRAM_TRAINING_PATTERN_BASE_ADDR, 0);
    658 	else
    659 		reg_write(REG_DRAM_TRAINING_PATTERN_BASE_ADDR,
    660 			  RESUME_RL_PATTERNS_ADDR);
    661 #endif
    662 
    663 	/* Set Patterns */
    664 	if (resume == 0) {
    665 		reg = (dram_info->cs_ena << REG_DRAM_TRAINING_CS_OFFS) |
    666 			(1 << REG_DRAM_TRAINING_PATTERNS_OFFS);
    667 	} else {
    668 		reg = (0x1 << REG_DRAM_TRAINING_CS_OFFS) |
    669 			(1 << REG_DRAM_TRAINING_PATTERNS_OFFS);
    670 	}
    671 
    672 	reg |= (1 << REG_DRAM_TRAINING_AUTO_OFFS);
    673 
    674 	reg_write(REG_DRAM_TRAINING_ADDR, reg);
    675 
    676 	udelay(100);
    677 
    678 	/* Check if Successful */
    679 	if (reg_read(REG_DRAM_TRAINING_ADDR) &
    680 	    (1 << REG_DRAM_TRAINING_ERROR_OFFS))
    681 		return MV_OK;
    682 	else
    683 		return MV_FAIL;
    684 }
    685 
    686 #if !defined(MV88F67XX)
    687 /*
    688  * Name:     ddr3_save_training(MV_DRAM_INFO *dram_info)
    689  * Desc:     saves the training results to memeory (RL,WL,PBS,Rx/Tx
    690  *           Centeralization)
    691  * Args:     MV_DRAM_INFO *dram_info
    692  * Notes:
    693  * Returns:  None.
    694  */
    695 void ddr3_save_training(MV_DRAM_INFO *dram_info)
    696 {
    697 	u32 val, pup, tmp_cs, cs, i, dq;
    698 	u32 crc = 0;
    699 	u32 regs = 0;
    700 	u32 *sdram_offset = (u32 *)RESUME_TRAINING_VALUES_ADDR;
    701 	u32 mode_config[MAX_TRAINING_MODE];
    702 
    703 	mode_config[DQS_WR_MODE] = PUP_DQS_WR;
    704 	mode_config[WL_MODE_] = PUP_WL_MODE;
    705 	mode_config[RL_MODE_] = PUP_RL_MODE;
    706 	mode_config[DQS_RD_MODE] = PUP_DQS_RD;
    707 	mode_config[PBS_TX_DM_MODE] = PUP_PBS_TX_DM;
    708 	mode_config[PBS_TX_MODE] = PUP_PBS_TX;
    709 	mode_config[PBS_RX_MODE] = PUP_PBS_RX;
    710 
    711 	/* num of training modes */
    712 	for (i = 0; i < MAX_TRAINING_MODE; i++) {
    713 		tmp_cs = dram_info->cs_ena;
    714 		/* num of CS */
    715 		for (cs = 0; cs < MAX_CS; cs++) {
    716 			if (tmp_cs & (1 << cs)) {
    717 				/* num of PUPs */
    718 				for (pup = 0; pup < dram_info->num_of_total_pups;
    719 				     pup++) {
    720 					if (pup == dram_info->num_of_std_pups &&
    721 					    dram_info->ecc_ena)
    722 						pup = ECC_PUP;
    723 					if (i == PBS_TX_DM_MODE) {
    724 						/*
    725 						 * Change CS bitmask because
    726 						 * PBS works only with CS0
    727 						 */
    728 						tmp_cs = 0x1;
    729 						val = ddr3_read_pup_reg(
    730 							mode_config[i], CS0, pup);
    731 					} else if (i == PBS_TX_MODE ||
    732 						   i == PBS_RX_MODE) {
    733 						/*
    734 						 * Change CS bitmask because
    735 						 * PBS works only with CS0
    736 						 */
    737 						tmp_cs = 0x1;
    738 						for (dq = 0; dq <= DQ_NUM;
    739 						     dq++) {
    740 							val = ddr3_read_pup_reg(
    741 								mode_config[i] + dq,
    742 								CS0,
    743 								pup);
    744 							(*sdram_offset) = val;
    745 							crc += *sdram_offset;
    746 							sdram_offset++;
    747 							regs++;
    748 						}
    749 						continue;
    750 					} else {
    751 						val = ddr3_read_pup_reg(
    752 							mode_config[i], cs, pup);
    753 					}
    754 
    755 					*sdram_offset = val;
    756 					crc += *sdram_offset;
    757 					sdram_offset++;
    758 					regs++;
    759 				}
    760 			}
    761 		}
    762 	}
    763 
    764 	*sdram_offset = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
    765 	crc += *sdram_offset;
    766 	sdram_offset++;
    767 	regs++;
    768 	*sdram_offset = reg_read(REG_READ_DATA_READY_DELAYS_ADDR);
    769 	crc += *sdram_offset;
    770 	sdram_offset++;
    771 	regs++;
    772 	sdram_offset = (u32 *)NUM_OF_REGISTER_ADDR;
    773 	*sdram_offset = regs;
    774 	DEBUG_SUSPEND_RESUME_S("Training Results CheckSum write= ");
    775 	DEBUG_SUSPEND_RESUME_D(crc, 8);
    776 	DEBUG_SUSPEND_RESUME_S("\n");
    777 	sdram_offset = (u32 *)CHECKSUM_RESULT_ADDR;
    778 	*sdram_offset = crc;
    779 }
    780 
    781 /*
    782  * Name:     ddr3_read_training_results()
    783  * Desc:     Reads the training results from memeory (RL,WL,PBS,Rx/Tx
    784  *           Centeralization)
    785  *           and writes them to the relevant registers
    786  * Args:     MV_DRAM_INFO *dram_info
    787  * Notes:
    788  * Returns:  None.
    789  */
    790 int ddr3_read_training_results(void)
    791 {
    792 	u32 val, reg, idx, dqs_wr_idx = 0, crc = 0;
    793 	u32 *sdram_offset = (u32 *)RESUME_TRAINING_VALUES_ADDR;
    794 	u32 training_val[RESUME_TRAINING_VALUES_MAX] = { 0 };
    795 	u32 regs = *((u32 *)NUM_OF_REGISTER_ADDR);
    796 
    797 	/*
    798 	 * Read Training results & Dunit registers from memory and write
    799 	 * it to an array
    800 	 */
    801 	for (idx = 0; idx < regs; idx++) {
    802 		training_val[idx] = *sdram_offset;
    803 		crc += *sdram_offset;
    804 		sdram_offset++;
    805 	}
    806 
    807 	sdram_offset = (u32 *)CHECKSUM_RESULT_ADDR;
    808 
    809 	if ((*sdram_offset) == crc) {
    810 		DEBUG_SUSPEND_RESUME_S("Training Results CheckSum read PASS= ");
    811 		DEBUG_SUSPEND_RESUME_D(crc, 8);
    812 		DEBUG_SUSPEND_RESUME_S("\n");
    813 	} else {
    814 		DEBUG_MAIN_S("Wrong Training Results CheckSum\n");
    815 		return MV_FAIL;
    816 	}
    817 
    818 	/*
    819 	 * We iterate through all the registers except for the last 2 since
    820 	 * they are Dunit registers (and not PHY registers)
    821 	 */
    822 	for (idx = 0; idx < (regs - 2); idx++) {
    823 		val = training_val[idx];
    824 		reg = (val >> REG_PHY_CS_OFFS) & 0x3F; /*read the phy address */
    825 
    826 		/* Check if the values belongs to the DQS WR */
    827 		if (reg == PUP_WL_MODE) {
    828 			/* bit[5:0] in DQS_WR are delay */
    829 			val = (training_val[dqs_wr_idx++] & 0x3F);
    830 			/*
    831 			 * bit[15:10] are DQS_WR delay & bit[9:0] are
    832 			 * WL phase & delay
    833 			 */
    834 			val = (val << REG_PHY_DQS_REF_DLY_OFFS) |
    835 				(training_val[idx] & 0x3C003FF);
    836 			/* Add Request pending and write operation bits */
    837 			val |= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR;
    838 		} else if (reg == PUP_DQS_WR) {
    839 			/*
    840 			 * Do nothing since DQS_WR will be done in PUP_WL_MODE
    841 			 */
    842 			continue;
    843 		}
    844 
    845 		val |= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR;
    846 		reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, val);
    847 		do {
    848 			val = (reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR)) &
    849 				REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE;
    850 		} while (val);	/* Wait for '0' to mark the end of the transaction */
    851 	}
    852 
    853 	/* write last 2 Dunit configurations */
    854 	val = training_val[idx];
    855 	reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, val);	/* reg 0x1538 */
    856 	val = training_val[idx + 1];
    857 	reg_write(REG_READ_DATA_READY_DELAYS_ADDR, val);	/* reg 0x153c */
    858 
    859 	return MV_OK;
    860 }
    861 
    862 /*
    863  * Name:     ddr3_check_if_resume_mode()
    864  * Desc:     Reads the address (0x3000) of the Resume Magic word (0xDEADB002)
    865  * Args:     MV_DRAM_INFO *dram_info
    866  * Notes:
    867  * Returns:  return (magic_word == SUSPEND_MAGIC_WORD)
    868  */
    869 int ddr3_check_if_resume_mode(MV_DRAM_INFO *dram_info, u32 freq)
    870 {
    871 	u32 magic_word;
    872 	u32 *sdram_offset = (u32 *)BOOT_INFO_ADDR;
    873 
    874 	if (dram_info->reg_dimm != 1) {
    875 		/*
    876 		 * Perform write levleling in order initiate the phy with
    877 		 * low frequency
    878 		 */
    879 		if (MV_OK != ddr3_write_leveling_hw(freq, dram_info)) {
    880 			DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
    881 			return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
    882 		}
    883 	}
    884 
    885 	if (MV_OK != ddr3_load_patterns(dram_info, 1)) {
    886 		DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Loading Patterns)\n");
    887 		return MV_DDR3_TRAINING_ERR_LOAD_PATTERNS;
    888 	}
    889 
    890 	/* Enable CS0 only for RL */
    891 	dram_info->cs_ena = 0x1;
    892 
    893 	/* Perform Read levleling in order to get stable memory */
    894 	if (MV_OK != ddr3_read_leveling_hw(freq, dram_info)) {
    895 		DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Read Leveling Hw)\n");
    896 		return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
    897 	}
    898 
    899 	/* Back to relevant CS */
    900 	dram_info->cs_ena = ddr3_get_cs_ena_from_reg();
    901 
    902 	magic_word = *sdram_offset;
    903 	return magic_word == SUSPEND_MAGIC_WORD;
    904 }
    905 
    906 /*
    907  * Name:     ddr3_training_suspend_resume()
    908  * Desc:     Execute the Resume state
    909  * Args:     MV_DRAM_INFO *dram_info
    910  * Notes:
    911  * Returns:  return (magic_word == SUSPEND_MAGIC_WORD)
    912  */
    913 int ddr3_training_suspend_resume(MV_DRAM_INFO *dram_info)
    914 {
    915 	u32 freq, reg;
    916 	int tmp_ratio;
    917 
    918 	/* Configure DDR */
    919 	if (MV_OK != ddr3_read_training_results())
    920 		return MV_FAIL;
    921 
    922 	/* Reset read FIFO */
    923 	reg = reg_read(REG_DRAM_TRAINING_ADDR);
    924 
    925 	/* Start Auto Read Leveling procedure */
    926 	reg |= (1 << REG_DRAM_TRAINING_RL_OFFS);
    927 	reg_write(REG_DRAM_TRAINING_ADDR, reg);	/* 0x15B0 - Training Register */
    928 
    929 	reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
    930 	reg |= ((1 << REG_DRAM_TRAINING_2_FIFO_RST_OFFS) +
    931 		(1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS));
    932 
    933 	/* [0] = 1 - Enable SW override, [4] = 1 - FIFO reset  */
    934 	/* 0x15B8 - Training SW 2 Register */
    935 	reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
    936 
    937 	udelay(2);
    938 
    939 	reg = reg_read(REG_DRAM_TRAINING_ADDR);
    940 	/* Clear Auto Read Leveling procedure */
    941 	reg &= ~(1 << REG_DRAM_TRAINING_RL_OFFS);
    942 	reg_write(REG_DRAM_TRAINING_ADDR, reg);	/* 0x15B0 - Training Register */
    943 
    944 	/* Return to target frequency */
    945 	freq = dram_info->target_frequency;
    946 	tmp_ratio = 1;
    947 	if (MV_OK != ddr3_dfs_low_2_high(freq, tmp_ratio, dram_info)) {
    948 		DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Dfs Low2High)\n");
    949 		return MV_DDR3_TRAINING_ERR_DFS_H2L;
    950 	}
    951 
    952 	if (dram_info->ecc_ena) {
    953 		/* Scabbling the RL area pattern and the training area */
    954 		mv_sys_xor_finish();
    955 		dram_info->num_cs = 1;
    956 		dram_info->cs_ena = 1;
    957 		mv_sys_xor_init(dram_info);
    958 		mv_xor_mem_init(0, RESUME_RL_PATTERNS_ADDR,
    959 				RESUME_RL_PATTERNS_SIZE, 0xFFFFFFFF, 0xFFFFFFFF);
    960 
    961 		/* Wait for previous transfer completion */
    962 
    963 		while (mv_xor_state_get(0) != MV_IDLE)
    964 			;
    965 
    966 		/* Return XOR State */
    967 		mv_sys_xor_finish();
    968 	}
    969 
    970 	return MV_OK;
    971 }
    972 #endif
    973 
    974 void ddr3_print_freq(u32 freq)
    975 {
    976 	u32 tmp_freq;
    977 
    978 	switch (freq) {
    979 	case 0:
    980 		tmp_freq = 100;
    981 		break;
    982 	case 1:
    983 		tmp_freq = 300;
    984 		break;
    985 	case 2:
    986 		tmp_freq = 360;
    987 		break;
    988 	case 3:
    989 		tmp_freq = 400;
    990 		break;
    991 	case 4:
    992 		tmp_freq = 444;
    993 		break;
    994 	case 5:
    995 		tmp_freq = 500;
    996 		break;
    997 	case 6:
    998 		tmp_freq = 533;
    999 		break;
   1000 	case 7:
   1001 		tmp_freq = 600;
   1002 		break;
   1003 	case 8:
   1004 		tmp_freq = 666;
   1005 		break;
   1006 	case 9:
   1007 		tmp_freq = 720;
   1008 		break;
   1009 	case 10:
   1010 		tmp_freq = 800;
   1011 		break;
   1012 	default:
   1013 		tmp_freq = 100;
   1014 	}
   1015 
   1016 	printf("Current frequency is: %dMHz\n", tmp_freq);
   1017 }
   1018 
   1019 int ddr3_get_min_max_read_sample_delay(u32 cs_enable, u32 reg, u32 *min,
   1020 				       u32 *max, u32 *cs_max)
   1021 {
   1022 	u32 cs, delay;
   1023 
   1024 	*min = 0xFFFFFFFF;
   1025 	*max = 0x0;
   1026 
   1027 	for (cs = 0; cs < MAX_CS; cs++) {
   1028 		if ((cs_enable & (1 << cs)) == 0)
   1029 			continue;
   1030 
   1031 		delay = ((reg >> (cs * 8)) & 0x1F);
   1032 
   1033 		if (delay < *min)
   1034 			*min = delay;
   1035 
   1036 		if (delay > *max) {
   1037 			*max = delay;
   1038 			*cs_max = cs;
   1039 		}
   1040 	}
   1041 
   1042 	return MV_OK;
   1043 }
   1044 
   1045 int ddr3_get_min_max_rl_phase(MV_DRAM_INFO *dram_info, u32 *min, u32 *max,
   1046 			      u32 cs)
   1047 {
   1048 	u32 pup, reg, phase;
   1049 
   1050 	*min = 0xFFFFFFFF;
   1051 	*max = 0x0;
   1052 
   1053 	for (pup = 0; pup < dram_info->num_of_total_pups; pup++) {
   1054 		reg = ddr3_read_pup_reg(PUP_RL_MODE, cs, pup);
   1055 		phase = ((reg >> 8) & 0x7);
   1056 
   1057 		if (phase < *min)
   1058 			*min = phase;
   1059 
   1060 		if (phase > *max)
   1061 			*max = phase;
   1062 	}
   1063 
   1064 	return MV_OK;
   1065 }
   1066 
   1067 int ddr3_odt_activate(int activate)
   1068 {
   1069 	u32 reg, mask;
   1070 
   1071 	mask = (1 << REG_DUNIT_ODT_CTRL_OVRD_OFFS) |
   1072 		(1 << REG_DUNIT_ODT_CTRL_OVRD_VAL_OFFS);
   1073 	/* {0x0000149C}  -   DDR Dunit ODT Control Register */
   1074 	reg = reg_read(REG_DUNIT_ODT_CTRL_ADDR);
   1075 	if (activate)
   1076 		reg |= mask;
   1077 	else
   1078 		reg &= ~mask;
   1079 
   1080 	reg_write(REG_DUNIT_ODT_CTRL_ADDR, reg);
   1081 
   1082 	return MV_OK;
   1083 }
   1084 
   1085 int ddr3_odt_read_dynamic_config(MV_DRAM_INFO *dram_info)
   1086 {
   1087 	u32 min_read_sample_delay, max_read_sample_delay, max_rl_phase;
   1088 	u32 min, max, cs_max;
   1089 	u32 cs_ena, reg;
   1090 
   1091 	reg = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
   1092 	cs_ena = ddr3_get_cs_ena_from_reg();
   1093 
   1094 	/* Get minimum and maximum of read sample delay of all CS */
   1095 	ddr3_get_min_max_read_sample_delay(cs_ena, reg, &min_read_sample_delay,
   1096 					   &max_read_sample_delay, &cs_max);
   1097 
   1098 	/*
   1099 	 * Get minimum and maximum read leveling phase which belongs to the
   1100 	 * maximal read sample delay
   1101 	 */
   1102 	ddr3_get_min_max_rl_phase(dram_info, &min, &max, cs_max);
   1103 	max_rl_phase = max;
   1104 
   1105 	/* DDR ODT Timing (Low) Register calculation */
   1106 	reg = reg_read(REG_ODT_TIME_LOW_ADDR);
   1107 	reg &= ~(0x1FF << REG_ODT_ON_CTL_RD_OFFS);
   1108 	reg |= (((min_read_sample_delay - 1) & 0xF) << REG_ODT_ON_CTL_RD_OFFS);
   1109 	reg |= (((max_read_sample_delay + 4 + (((max_rl_phase + 1) / 2) + 1)) &
   1110 		 0x1F) << REG_ODT_OFF_CTL_RD_OFFS);
   1111 	reg_write(REG_ODT_TIME_LOW_ADDR, reg);
   1112 
   1113 	return MV_OK;
   1114 }
   1115