Home | History | Annotate | Download | only in asm
      1 /* SPDX-License-Identifier: GPL-2.0+ */
      2 /*
      3  * (C) Copyright 2010
      4  * Texas Instruments, <www.ti.com>
      5  * Aneesh V <aneesh (at) ti.com>
      6  */
      7 #ifndef ARMV7_H
      8 #define ARMV7_H
      9 
     10 /* Cortex-A9 revisions */
     11 #define MIDR_CORTEX_A9_R0P1	0x410FC091
     12 #define MIDR_CORTEX_A9_R1P2	0x411FC092
     13 #define MIDR_CORTEX_A9_R1P3	0x411FC093
     14 #define MIDR_CORTEX_A9_R2P10	0x412FC09A
     15 
     16 /* Cortex-A15 revisions */
     17 #define MIDR_CORTEX_A15_R0P0	0x410FC0F0
     18 #define MIDR_CORTEX_A15_R2P2	0x412FC0F2
     19 
     20 /* Cortex-A7 revisions */
     21 #define MIDR_CORTEX_A7_R0P0	0x410FC070
     22 
     23 #define MIDR_PRIMARY_PART_MASK	0xFF0FFFF0
     24 
     25 /* ID_PFR1 feature fields */
     26 #define CPUID_ARM_SEC_SHIFT		4
     27 #define CPUID_ARM_SEC_MASK		(0xF << CPUID_ARM_SEC_SHIFT)
     28 #define CPUID_ARM_VIRT_SHIFT		12
     29 #define CPUID_ARM_VIRT_MASK		(0xF << CPUID_ARM_VIRT_SHIFT)
     30 #define CPUID_ARM_GENTIMER_SHIFT	16
     31 #define CPUID_ARM_GENTIMER_MASK		(0xF << CPUID_ARM_GENTIMER_SHIFT)
     32 
     33 /* valid bits in CBAR register / PERIPHBASE value */
     34 #define CBAR_MASK			0xFFFF8000
     35 
     36 /* CCSIDR */
     37 #define CCSIDR_LINE_SIZE_OFFSET		0
     38 #define CCSIDR_LINE_SIZE_MASK		0x7
     39 #define CCSIDR_ASSOCIATIVITY_OFFSET	3
     40 #define CCSIDR_ASSOCIATIVITY_MASK	(0x3FF << 3)
     41 #define CCSIDR_NUM_SETS_OFFSET		13
     42 #define CCSIDR_NUM_SETS_MASK		(0x7FFF << 13)
     43 
     44 /*
     45  * Values for InD field in CSSELR
     46  * Selects the type of cache
     47  */
     48 #define ARMV7_CSSELR_IND_DATA_UNIFIED	0
     49 #define ARMV7_CSSELR_IND_INSTRUCTION	1
     50 
     51 /* Values for Ctype fields in CLIDR */
     52 #define ARMV7_CLIDR_CTYPE_NO_CACHE		0
     53 #define ARMV7_CLIDR_CTYPE_INSTRUCTION_ONLY	1
     54 #define ARMV7_CLIDR_CTYPE_DATA_ONLY		2
     55 #define ARMV7_CLIDR_CTYPE_INSTRUCTION_DATA	3
     56 #define ARMV7_CLIDR_CTYPE_UNIFIED		4
     57 
     58 #ifndef __ASSEMBLY__
     59 #include <linux/types.h>
     60 #include <asm/io.h>
     61 #include <asm/barriers.h>
     62 
     63 /* read L2 control register (L2CTLR) */
     64 static inline uint32_t read_l2ctlr(void)
     65 {
     66 	uint32_t val = 0;
     67 
     68 	asm volatile ("mrc p15, 1, %0, c9, c0, 2" : "=r" (val));
     69 
     70 	return val;
     71 }
     72 
     73 /* write L2 control register (L2CTLR) */
     74 static inline void write_l2ctlr(uint32_t val)
     75 {
     76 	/*
     77 	 * Note: L2CTLR can only be written when the L2 memory system
     78 	 * is idle, ie before the MMU is enabled.
     79 	 */
     80 	asm volatile("mcr p15, 1, %0, c9, c0, 2" : : "r" (val) : "memory");
     81 	isb();
     82 }
     83 
     84 /*
     85  * Workaround for ARM errata # 798870
     86  * Set L2ACTLR[7] to reissue any memory transaction in the L2 that has been
     87  * stalled for 1024 cycles to verify that its hazard condition still exists.
     88  */
     89 static inline void v7_enable_l2_hazard_detect(void)
     90 {
     91 	uint32_t val;
     92 
     93 	/* L2ACTLR[7]: Enable hazard detect timeout */
     94 	asm volatile ("mrc     p15, 1, %0, c15, c0, 0\n\t" : "=r"(val));
     95 	val |= (1 << 7);
     96 	asm volatile ("mcr     p15, 1, %0, c15, c0, 0\n\t" : : "r"(val));
     97 }
     98 
     99 /*
    100  * Workaround for ARM errata # 799270
    101  * Ensure that the L2 logic has been used within the previous 256 cycles
    102  * before modifying the ACTLR.SMP bit. This is required during boot before
    103  * MMU has been enabled, or during a specified reset or power down sequence.
    104  */
    105 static inline void v7_enable_smp(uint32_t address)
    106 {
    107 	uint32_t temp, val;
    108 
    109 	/* Read auxiliary control register */
    110 	asm volatile ("mrc     p15, 0, %0, c1, c0, 1\n\t" : "=r"(val));
    111 
    112 	/* Enable SMP */
    113 	val |= (1 << 6);
    114 
    115 	/* Dummy read to assure L2 access */
    116 	temp = readl(address);
    117 	temp &= 0;
    118 	val |= temp;
    119 
    120 	/* Write auxiliary control register */
    121 	asm volatile ("mcr     p15, 0, %0, c1, c0, 1\n\t" : : "r"(val));
    122 
    123 	CP15DSB;
    124 	CP15ISB;
    125 }
    126 
    127 void v7_en_l2_hazard_detect(void);
    128 void v7_outer_cache_enable(void);
    129 void v7_outer_cache_disable(void);
    130 void v7_outer_cache_flush_all(void);
    131 void v7_outer_cache_inval_all(void);
    132 void v7_outer_cache_flush_range(u32 start, u32 end);
    133 void v7_outer_cache_inval_range(u32 start, u32 end);
    134 
    135 #ifdef CONFIG_ARMV7_NONSEC
    136 
    137 int armv7_init_nonsec(void);
    138 int armv7_apply_memory_carveout(u64 *start, u64 *size);
    139 bool armv7_boot_nonsec(void);
    140 
    141 /* defined in assembly file */
    142 unsigned int _nonsec_init(void);
    143 void _do_nonsec_entry(void *target_pc, unsigned long r0,
    144 		      unsigned long r1, unsigned long r2);
    145 void _smp_pen(void);
    146 
    147 extern char __secure_start[];
    148 extern char __secure_end[];
    149 extern char __secure_stack_start[];
    150 extern char __secure_stack_end[];
    151 
    152 #endif /* CONFIG_ARMV7_NONSEC */
    153 
    154 void v7_arch_cp15_set_l2aux_ctrl(u32 l2auxctrl, u32 cpu_midr,
    155 				 u32 cpu_rev_comb, u32 cpu_variant,
    156 				 u32 cpu_rev);
    157 void v7_arch_cp15_set_acr(u32 acr, u32 cpu_midr, u32 cpu_rev_comb,
    158 			  u32 cpu_variant, u32 cpu_rev);
    159 #endif /* ! __ASSEMBLY__ */
    160 
    161 #endif
    162