1 /* 2 * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7 #include <arch.h> 8 #include <asm_macros.S> 9 #include <platform_def.h> 10 #include <pmu_regs.h> 11 12 .globl clst_warmboot_data 13 14 .macro sram_func _name 15 .cfi_sections .debug_frame 16 .section .sram.text, "ax" 17 .type \_name, %function 18 .func \_name 19 .cfi_startproc 20 \_name: 21 .endm 22 23 #define CRU_CLKSEL_CON6 0x118 24 25 #define DDRCTL0_C_SYSREQ_CFG 0x0100 26 #define DDRCTL1_C_SYSREQ_CFG 0x1000 27 28 #define DDRC0_SREF_DONE_EXT 0x01 29 #define DDRC1_SREF_DONE_EXT 0x04 30 31 #define PLL_MODE_SHIFT (0x8) 32 #define PLL_NORMAL_MODE ((0x3 << (PLL_MODE_SHIFT + 16)) | \ 33 (0x1 << PLL_MODE_SHIFT)) 34 #define MPIDR_CLST_L_BITS 0x0 35 /* 36 * For different socs, if we want to speed up warmboot, 37 * we need to config some regs here. 38 * If scu was suspend, we must resume related clk 39 * from slow (24M) mode to normal mode first. 40 * X0: MPIDR_EL1 & MPIDR_CLUSTER_MASK 41 */ 42 .macro func_rockchip_clst_warmboot 43 adr x4, clst_warmboot_data 44 lsr x5, x0, #6 45 ldr w3, [x4, x5] 46 str wzr, [x4, x5] 47 cmp w3, #PMU_CLST_RET 48 b.ne clst_warmboot_end 49 ldr w6, =(PLL_NORMAL_MODE) 50 /* 51 * core_l offset is CRU_BASE + 0xc, 52 * core_b offset is CRU_BASE + 0x2c 53 */ 54 ldr x7, =(CRU_BASE + 0xc) 55 lsr x2, x0, #3 56 str w6, [x7, x2] 57 clst_warmboot_end: 58 .endm 59 60 .macro rockchip_clst_warmboot_data 61 clst_warmboot_data: 62 .rept PLATFORM_CLUSTER_COUNT 63 .word 0 64 .endr 65 .endm 66 67 /* ----------------------------------------------- 68 * void sram_func_set_ddrctl_pll(uint32_t pll_src) 69 * Function to switch the PLL source for ddrctrl 70 * In: x0 - The PLL of the clk_ddrc clock source 71 * out: None 72 * Clobber list : x0 - x3, x5, x8 - x10 73 * ----------------------------------------------- 74 */ 75 76 .globl sram_func_set_ddrctl_pll 77 78 sram_func sram_func_set_ddrctl_pll 79 /* backup parameter */ 80 mov x8, x0 81 82 /* disable the MMU at EL3 */ 83 mrs x9, sctlr_el3 84 bic x10, x9, #(SCTLR_M_BIT) 85 msr sctlr_el3, x10 86 isb 87 dsb sy 88 89 /* enable ddrctl0_1 idle request */ 90 mov x5, PMU_BASE 91 ldr w0, [x5, #PMU_SFT_CON] 92 orr w0, w0, #DDRCTL0_C_SYSREQ_CFG 93 orr w0, w0, #DDRCTL1_C_SYSREQ_CFG 94 str w0, [x5, #PMU_SFT_CON] 95 96 check_ddrc0_1_sref_enter: 97 ldr w1, [x5, #PMU_DDR_SREF_ST] 98 and w2, w1, #DDRC0_SREF_DONE_EXT 99 and w3, w1, #DDRC1_SREF_DONE_EXT 100 orr w2, w2, w3 101 cmp w2, #(DDRC0_SREF_DONE_EXT | DDRC1_SREF_DONE_EXT) 102 b.eq check_ddrc0_1_sref_enter 103 104 /* 105 * select a PLL for ddrctrl: 106 * x0 = 0: ALPLL 107 * x0 = 1: ABPLL 108 * x0 = 2: DPLL 109 * x0 = 3: GPLLL 110 */ 111 mov x5, CRU_BASE 112 lsl w0, w8, #4 113 orr w0, w0, #0x00300000 114 str w0, [x5, #CRU_CLKSEL_CON6] 115 116 /* disable ddrctl0_1 idle request */ 117 mov x5, PMU_BASE 118 ldr w0, [x5, #PMU_SFT_CON] 119 bic w0, w0, #DDRCTL0_C_SYSREQ_CFG 120 bic w0, w0, #DDRCTL1_C_SYSREQ_CFG 121 str w0, [x5, #PMU_SFT_CON] 122 123 check_ddrc0_1_sref_exit: 124 ldr w1, [x5, #PMU_DDR_SREF_ST] 125 and w2, w1, #DDRC0_SREF_DONE_EXT 126 and w3, w1, #DDRC1_SREF_DONE_EXT 127 orr w2, w2, w3 128 cmp w2, #0x0 129 b.eq check_ddrc0_1_sref_exit 130 131 /* reenable the MMU at EL3 */ 132 msr sctlr_el3, x9 133 isb 134 dsb sy 135 136 ret 137 endfunc sram_func_set_ddrctl_pll 138