1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233
|
/*
* Copyright (c) 2016-2021, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <arch.h>
#include <asm_macros.S>
.globl flush_dcache_range
.globl clean_dcache_range
.globl inv_dcache_range
.globl dcsw_op_louis
.globl dcsw_op_all
.globl dcsw_op_level1
.globl dcsw_op_level2
.globl dcsw_op_level3
/*
* This macro can be used for implementing various data cache operations `op`
*/
.macro do_dcache_maintenance_by_mva op, coproc, opc1, CRn, CRm, opc2
/* Exit early if size is zero */
cmp r1, #0
beq exit_loop_\op
dcache_line_size r2, r3
add r1, r0, r1
sub r3, r2, #1
bic r0, r0, r3
loop_\op:
stcopr r0, \coproc, \opc1, \CRn, \CRm, \opc2
add r0, r0, r2
cmp r0, r1
blo loop_\op
dsb sy
exit_loop_\op:
bx lr
.endm
/* ------------------------------------------
* Clean+Invalidate from base address till
* size. 'r0' = addr, 'r1' = size
* ------------------------------------------
*/
func flush_dcache_range
do_dcache_maintenance_by_mva cimvac, DCCIMVAC
endfunc flush_dcache_range
/* ------------------------------------------
* Clean from base address till size.
* 'r0' = addr, 'r1' = size
* ------------------------------------------
*/
func clean_dcache_range
do_dcache_maintenance_by_mva cmvac, DCCMVAC
endfunc clean_dcache_range
/* ------------------------------------------
* Invalidate from base address till
* size. 'r0' = addr, 'r1' = size
* ------------------------------------------
*/
func inv_dcache_range
do_dcache_maintenance_by_mva imvac, DCIMVAC
endfunc inv_dcache_range
/* ----------------------------------------------------------------
* Data cache operations by set/way to the level specified
*
* The main function, do_dcsw_op requires:
* r0: The operation type (DC_OP_ISW, DC_OP_CISW, DC_OP_CSW),
* as defined in arch.h
* r1: The cache level to begin operation from
* r2: clidr_el1
* r3: The last cache level to operate on
* and will carry out the operation on each data cache from level 0
* to the level in r3 in sequence
*
* The dcsw_op macro sets up the r2 and r3 parameters based on
* clidr_el1 cache information before invoking the main function
* ----------------------------------------------------------------
*/
.macro dcsw_op shift, fw, ls
ldcopr r2, CLIDR
ubfx r3, r2, \shift, \fw
lsl r3, r3, \ls
mov r1, #0
b do_dcsw_op
.endm
func do_dcsw_op
push {r4-r12, lr}
ldcopr r8, ID_MMFR4 // stash FEAT_CCIDX identifier in r8
ubfx r8, r8, #ID_MMFR4_CCIDX_SHIFT, #ID_MMFR4_CCIDX_LENGTH
adr r11, dcsw_loop_table // compute cache op based on the operation type
add r6, r11, r0, lsl #3 // cache op is 2x32-bit instructions
loop1:
add r10, r1, r1, LSR #1 // Work out 3x current cache level
mov r12, r2, LSR r10 // extract cache type bits from clidr
and r12, r12, #7 // mask the bits for current cache only
cmp r12, #2 // see what cache we have at this level
blo level_done // no cache or only instruction cache at this level
stcopr r1, CSSELR // select current cache level in csselr
isb // isb to sych the new cssr&csidr
ldcopr r12, CCSIDR // read the new ccsidr
and r10, r12, #7 // extract the length of the cache lines
add r10, r10, #4 // add 4 (r10 = line length offset)
cmp r8, #0 // check for FEAT_CCIDX for Associativity
beq 1f
ubfx r4, r12, #3, #21 // r4 = associativity CCSIDR[23:3]
b 2f
1:
ubfx r4, r12, #3, #10 // r4 = associativity CCSIDR[12:3]
2:
clz r5, r4 // r5 = the bit position of the way size increment
mov r9, r4 // r9 working copy of the aligned max way number
loop2:
cmp r8, #0 // check for FEAT_CCIDX for NumSets
beq 3f
ldcopr r12, CCSIDR2 // FEAT_CCIDX numsets is in CCSIDR2
ubfx r7, r12, #0, #24 // r7 = numsets CCSIDR2[23:0]
b loop3
3:
ubfx r7, r12, #13, #15 // r7 = numsets CCSIDR[27:13]
loop3:
orr r0, r1, r9, LSL r5 // factor in the way number and cache level into r0
orr r0, r0, r7, LSL r10 // factor in the set number
blx r6
subs r7, r7, #1 // decrement the set number
bhs loop3
subs r9, r9, #1 // decrement the way number
bhs loop2
level_done:
add r1, r1, #2 // increment the cache number
cmp r3, r1
// Ensure completion of previous cache maintenance instruction. Note
// this also mitigates erratum 814220 on Cortex-A7
dsb sy
bhi loop1
mov r6, #0
stcopr r6, CSSELR //select cache level 0 in csselr
dsb sy
isb
pop {r4-r12, pc}
dcsw_loop_table:
stcopr r0, DCISW
bx lr
stcopr r0, DCCISW
bx lr
stcopr r0, DCCSW
bx lr
endfunc do_dcsw_op
/* ---------------------------------------------------------------
* Data cache operations by set/way till PoU.
*
* The function requires :
* r0: The operation type (DC_OP_ISW, DC_OP_CISW, DC_OP_CSW),
* as defined in arch.h
* ---------------------------------------------------------------
*/
func dcsw_op_louis
dcsw_op #LOUIS_SHIFT, #CLIDR_FIELD_WIDTH, #LEVEL_SHIFT
endfunc dcsw_op_louis
/* ---------------------------------------------------------------
* Data cache operations by set/way till PoC.
*
* The function requires :
* r0: The operation type (DC_OP_ISW, DC_OP_CISW, DC_OP_CSW),
* as defined in arch.h
* ---------------------------------------------------------------
*/
func dcsw_op_all
dcsw_op #LOC_SHIFT, #CLIDR_FIELD_WIDTH, #LEVEL_SHIFT
endfunc dcsw_op_all
/* ---------------------------------------------------------------
* Helper macro for data cache operations by set/way for the
* level specified
* ---------------------------------------------------------------
*/
.macro dcsw_op_level level
ldcopr r2, CLIDR
mov r3, \level
sub r1, r3, #2
b do_dcsw_op
.endm
/* ---------------------------------------------------------------
* Data cache operations by set/way for level 1 cache
*
* The main function, do_dcsw_op requires:
* r0: The operation type (DC_OP_ISW, DC_OP_CISW, DC_OP_CSW),
* as defined in arch.h
* ---------------------------------------------------------------
*/
func dcsw_op_level1
dcsw_op_level #(1 << LEVEL_SHIFT)
endfunc dcsw_op_level1
/* ---------------------------------------------------------------
* Data cache operations by set/way for level 2 cache
*
* The main function, do_dcsw_op requires:
* r0: The operation type (DC_OP_ISW, DC_OP_CISW, DC_OP_CSW),
* as defined in arch.h
* ---------------------------------------------------------------
*/
func dcsw_op_level2
dcsw_op_level #(2 << LEVEL_SHIFT)
endfunc dcsw_op_level2
/* ---------------------------------------------------------------
* Data cache operations by set/way for level 3 cache
*
* The main function, do_dcsw_op requires:
* r0: The operation type (DC_OP_ISW, DC_OP_CISW, DC_OP_CSW),
* as defined in arch.h
* ---------------------------------------------------------------
*/
func dcsw_op_level3
dcsw_op_level #(3 << LEVEL_SHIFT)
endfunc dcsw_op_level3
|