]>
Commit | Line | Data |
---|---|---|
0ae76531 DF |
1 | /* |
2 | * (C) Copyright 2013 | |
3 | * David Feng <[email protected]> | |
4 | * | |
5 | * This file is based on sample code from ARMv8 ARM. | |
6 | * | |
7 | * SPDX-License-Identifier: GPL-2.0+ | |
8 | */ | |
9 | ||
10 | #include <asm-offsets.h> | |
11 | #include <config.h> | |
0ae76531 DF |
12 | #include <asm/macro.h> |
13 | #include <linux/linkage.h> | |
14 | ||
15 | /* | |
16 | * void __asm_flush_dcache_level(level) | |
17 | * | |
18 | * clean and invalidate one level cache. | |
19 | * | |
20 | * x0: cache level | |
1e6ad55c YS |
21 | * x1: 0 flush & invalidate, 1 invalidate only |
22 | * x2~x9: clobbered | |
0ae76531 DF |
23 | */ |
24 | ENTRY(__asm_flush_dcache_level) | |
1e6ad55c YS |
25 | lsl x12, x0, #1 |
26 | msr csselr_el1, x12 /* select cache level */ | |
0ae76531 DF |
27 | isb /* sync change of cssidr_el1 */ |
28 | mrs x6, ccsidr_el1 /* read the new cssidr_el1 */ | |
29 | and x2, x6, #7 /* x2 <- log2(cache line size)-4 */ | |
30 | add x2, x2, #4 /* x2 <- log2(cache line size) */ | |
31 | mov x3, #0x3ff | |
32 | and x3, x3, x6, lsr #3 /* x3 <- max number of #ways */ | |
42ddfad6 | 33 | clz w5, w3 /* bit position of #ways */ |
0ae76531 DF |
34 | mov x4, #0x7fff |
35 | and x4, x4, x6, lsr #13 /* x4 <- max number of #sets */ | |
1e6ad55c | 36 | /* x12 <- cache level << 1 */ |
0ae76531 DF |
37 | /* x2 <- line length offset */ |
38 | /* x3 <- number of cache ways - 1 */ | |
39 | /* x4 <- number of cache sets - 1 */ | |
40 | /* x5 <- bit position of #ways */ | |
41 | ||
42 | loop_set: | |
43 | mov x6, x3 /* x6 <- working copy of #ways */ | |
44 | loop_way: | |
45 | lsl x7, x6, x5 | |
1e6ad55c | 46 | orr x9, x12, x7 /* map way and level to cisw value */ |
0ae76531 DF |
47 | lsl x7, x4, x2 |
48 | orr x9, x9, x7 /* map set number to cisw value */ | |
1e6ad55c YS |
49 | tbz w1, #0, 1f |
50 | dc isw, x9 | |
51 | b 2f | |
52 | 1: dc cisw, x9 /* clean & invalidate by set/way */ | |
53 | 2: subs x6, x6, #1 /* decrement the way */ | |
0ae76531 DF |
54 | b.ge loop_way |
55 | subs x4, x4, #1 /* decrement the set */ | |
56 | b.ge loop_set | |
57 | ||
58 | ret | |
59 | ENDPROC(__asm_flush_dcache_level) | |
60 | ||
61 | /* | |
1e6ad55c YS |
62 | * void __asm_flush_dcache_all(int invalidate_only) |
63 | * | |
64 | * x0: 0 flush & invalidate, 1 invalidate only | |
0ae76531 DF |
65 | * |
66 | * clean and invalidate all data cache by SET/WAY. | |
67 | */ | |
1e6ad55c YS |
68 | ENTRY(__asm_dcache_all) |
69 | mov x1, x0 | |
0ae76531 DF |
70 | dsb sy |
71 | mrs x10, clidr_el1 /* read clidr_el1 */ | |
72 | lsr x11, x10, #24 | |
73 | and x11, x11, #0x7 /* x11 <- loc */ | |
74 | cbz x11, finished /* if loc is 0, exit */ | |
75 | mov x15, lr | |
76 | mov x0, #0 /* start flush at cache level 0 */ | |
77 | /* x0 <- cache level */ | |
78 | /* x10 <- clidr_el1 */ | |
79 | /* x11 <- loc */ | |
80 | /* x15 <- return address */ | |
81 | ||
82 | loop_level: | |
1e6ad55c YS |
83 | lsl x12, x0, #1 |
84 | add x12, x12, x0 /* x0 <- tripled cache level */ | |
85 | lsr x12, x10, x12 | |
86 | and x12, x12, #7 /* x12 <- cache type */ | |
87 | cmp x12, #2 | |
0ae76531 | 88 | b.lt skip /* skip if no cache or icache */ |
1e6ad55c | 89 | bl __asm_flush_dcache_level /* x1 = 0 flush, 1 invalidate */ |
0ae76531 DF |
90 | skip: |
91 | add x0, x0, #1 /* increment cache level */ | |
92 | cmp x11, x0 | |
93 | b.gt loop_level | |
94 | ||
95 | mov x0, #0 | |
f1075aed | 96 | msr csselr_el1, x0 /* restore csselr_el1 */ |
0ae76531 DF |
97 | dsb sy |
98 | isb | |
99 | mov lr, x15 | |
100 | ||
101 | finished: | |
102 | ret | |
1e6ad55c YS |
103 | ENDPROC(__asm_dcache_all) |
104 | ||
105 | ENTRY(__asm_flush_dcache_all) | |
106 | mov x16, lr | |
107 | mov x0, #0 | |
108 | bl __asm_dcache_all | |
109 | mov lr, x16 | |
110 | ret | |
0ae76531 DF |
111 | ENDPROC(__asm_flush_dcache_all) |
112 | ||
1e6ad55c YS |
113 | ENTRY(__asm_invalidate_dcache_all) |
114 | mov x16, lr | |
115 | mov x0, #0xffff | |
116 | bl __asm_dcache_all | |
117 | mov lr, x16 | |
118 | ret | |
119 | ENDPROC(__asm_invalidate_dcache_all) | |
120 | ||
0ae76531 DF |
121 | /* |
122 | * void __asm_flush_dcache_range(start, end) | |
123 | * | |
124 | * clean & invalidate data cache in the range | |
125 | * | |
126 | * x0: start address | |
127 | * x1: end address | |
128 | */ | |
129 | ENTRY(__asm_flush_dcache_range) | |
130 | mrs x3, ctr_el0 | |
131 | lsr x3, x3, #16 | |
132 | and x3, x3, #0xf | |
133 | mov x2, #4 | |
134 | lsl x2, x2, x3 /* cache line size */ | |
135 | ||
136 | /* x2 <- minimal cache line size in cache system */ | |
137 | sub x3, x2, #1 | |
138 | bic x0, x0, x3 | |
139 | 1: dc civac, x0 /* clean & invalidate data or unified cache */ | |
140 | add x0, x0, x2 | |
141 | cmp x0, x1 | |
142 | b.lo 1b | |
143 | dsb sy | |
144 | ret | |
145 | ENDPROC(__asm_flush_dcache_range) | |
146 | ||
147 | /* | |
148 | * void __asm_invalidate_icache_all(void) | |
149 | * | |
150 | * invalidate all tlb entries. | |
151 | */ | |
152 | ENTRY(__asm_invalidate_icache_all) | |
153 | ic ialluis | |
154 | isb sy | |
155 | ret | |
156 | ENDPROC(__asm_invalidate_icache_all) | |
dcd468b8 YS |
157 | |
158 | ENTRY(__asm_flush_l3_cache) | |
159 | mov x0, #0 /* return status as success */ | |
160 | ret | |
161 | ENDPROC(__asm_flush_l3_cache) | |
162 | .weak __asm_flush_l3_cache |