]>
Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | L2CR functions | |
96de0e25 | 3 | Copyright © 1997-1998 by PowerLogix R & D, Inc. |
1da177e4 LT |
4 | |
5 | This program is free software; you can redistribute it and/or modify | |
6 | it under the terms of the GNU General Public License as published by | |
7 | the Free Software Foundation; either version 2 of the License, or | |
8 | (at your option) any later version. | |
9 | ||
10 | This program is distributed in the hope that it will be useful, | |
11 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
12 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
13 | GNU General Public License for more details. | |
14 | ||
15 | You should have received a copy of the GNU General Public License | |
16 | along with this program; if not, write to the Free Software | |
17 | Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. | |
18 | */ | |
19 | /* | |
20 | Thur, Dec. 12, 1998. | |
21 | - First public release, contributed by PowerLogix. | |
22 | *********** | |
23 | Sat, Aug. 7, 1999. | |
24 | - Terry: Made sure code disabled interrupts before running. (Previously | |
25 | it was assumed interrupts were already disabled). | |
26 | - Terry: Updated for tentative G4 support. 4MB of memory is now flushed | |
27 | instead of 2MB. (Prob. only 3 is necessary). | |
28 | - Terry: Updated for workaround to HID0[DPM] processor bug | |
29 | during global invalidates. | |
30 | *********** | |
31 | Thu, July 13, 2000. | |
32 | - Terry: Added isync to correct for an errata. | |
33 | ||
34 | 22 August 2001. | |
35 | - DanM: Finally added the 7450 patch I've had for the past | |
36 | several months. The L2CR is similar, but I'm going | |
37 | to assume the user of this functions knows what they | |
38 | are doing. | |
39 | ||
40 | Author: Terry Greeniaus ([email protected]) | |
41 | Please e-mail updates to this file to me, thanks! | |
42 | */ | |
1da177e4 LT |
43 | #include <asm/processor.h> |
44 | #include <asm/cputable.h> | |
45 | #include <asm/ppc_asm.h> | |
46 | #include <asm/cache.h> | |
47 | #include <asm/page.h> | |
48 | ||
49 | /* Usage: | |
50 | ||
51 | When setting the L2CR register, you must do a few special | |
52 | things. If you are enabling the cache, you must perform a | |
53 | global invalidate. If you are disabling the cache, you must | |
54 | flush the cache contents first. This routine takes care of | |
55 | doing these things. When first enabling the cache, make sure | |
56 | you pass in the L2CR you want, as well as passing in the | |
57 | global invalidate bit set. A global invalidate will only be | |
58 | performed if the L2I bit is set in applyThis. When enabling | |
59 | the cache, you should also set the L2E bit in applyThis. If | |
60 | you want to modify the L2CR contents after the cache has been | |
61 | enabled, the recommended procedure is to first call | |
62 | __setL2CR(0) to disable the cache and then call it again with | |
63 | the new values for L2CR. Examples: | |
64 | ||
65 | _setL2CR(0) - disables the cache | |
66 | _setL2CR(0xB3A04000) - enables my G3 upgrade card: | |
67 | - L2E set to turn on the cache | |
68 | - L2SIZ set to 1MB | |
69 | - L2CLK set to 1:1 | |
70 | - L2RAM set to pipelined synchronous late-write | |
71 | - L2I set to perform a global invalidation | |
72 | - L2OH set to 0.5 nS | |
73 | - L2DF set because this upgrade card | |
74 | requires it | |
75 | ||
76 | A similar call should work for your card. You need to know | |
77 | the correct setting for your card and then place them in the | |
78 | fields I have outlined above. Other fields support optional | |
79 | features, such as L2DO which caches only data, or L2TS which | |
80 | causes cache pushes from the L1 cache to go to the L2 cache | |
81 | instead of to main memory. | |
82 | ||
83 | IMPORTANT: | |
84 | Starting with the 7450, the bits in this register have moved | |
85 | or behave differently. The Enable, Parity Enable, Size, | |
86 | and L2 Invalidate are the only bits that have not moved. | |
87 | The size is read-only for these processors with internal L2 | |
88 | cache, and the invalidate is a control as well as status. | |
89 | -- Dan | |
90 | ||
91 | */ | |
92 | /* | |
93 | * Summary: this procedure ignores the L2I bit in the value passed in, | |
94 | * flushes the cache if it was already enabled, always invalidates the | |
95 | * cache, then enables the cache if the L2E bit is set in the value | |
96 | * passed in. | |
97 | * -- paulus. | |
98 | */ | |
99 | _GLOBAL(_set_L2CR) | |
100 | /* Make sure this is a 750 or 7400 chip */ | |
101 | BEGIN_FTR_SECTION | |
102 | li r3,-1 | |
103 | blr | |
104 | END_FTR_SECTION_IFCLR(CPU_FTR_L2CR) | |
105 | ||
106 | mflr r9 | |
107 | ||
108 | /* Stop DST streams */ | |
109 | BEGIN_FTR_SECTION | |
110 | DSSALL | |
111 | sync | |
112 | END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC) | |
113 | ||
114 | /* Turn off interrupts and data relocation. */ | |
115 | mfmsr r7 /* Save MSR in r7 */ | |
116 | rlwinm r4,r7,0,17,15 | |
117 | rlwinm r4,r4,0,28,26 /* Turn off DR bit */ | |
118 | sync | |
119 | mtmsr r4 | |
120 | isync | |
121 | ||
122 | /* Before we perform the global invalidation, we must disable dynamic | |
123 | * power management via HID0[DPM] to work around a processor bug where | |
124 | * DPM can possibly interfere with the state machine in the processor | |
125 | * that invalidates the L2 cache tags. | |
126 | */ | |
127 | mfspr r8,SPRN_HID0 /* Save HID0 in r8 */ | |
128 | rlwinm r4,r8,0,12,10 /* Turn off HID0[DPM] */ | |
129 | sync | |
130 | mtspr SPRN_HID0,r4 /* Disable DPM */ | |
131 | sync | |
132 | ||
133 | /* Get the current enable bit of the L2CR into r4 */ | |
134 | mfspr r4,SPRN_L2CR | |
135 | ||
136 | /* Tweak some bits */ | |
137 | rlwinm r5,r3,0,0,0 /* r5 contains the new enable bit */ | |
138 | rlwinm r3,r3,0,11,9 /* Turn off the invalidate bit */ | |
139 | rlwinm r3,r3,0,1,31 /* Turn off the enable bit */ | |
140 | ||
141 | /* Check to see if we need to flush */ | |
142 | rlwinm. r4,r4,0,0,0 | |
143 | beq 2f | |
144 | ||
145 | /* Flush the cache. First, read the first 4MB of memory (physical) to | |
146 | * put new data in the cache. (Actually we only need | |
147 | * the size of the L2 cache plus the size of the L1 cache, but 4MB will | |
148 | * cover everything just to be safe). | |
149 | */ | |
150 | ||
151 | /**** Might be a good idea to set L2DO here - to prevent instructions | |
152 | from getting into the cache. But since we invalidate | |
153 | the next time we enable the cache it doesn't really matter. | |
25985edc | 154 | Don't do this unless you accommodate all processor variations. |
1da177e4 LT |
155 | The bit moved on the 7450..... |
156 | ****/ | |
157 | ||
80ac2912 KG |
158 | BEGIN_FTR_SECTION |
159 | /* Disable L2 prefetch on some 745x and try to ensure | |
160 | * L2 prefetch engines are idle. As explained by errata | |
161 | * text, we can't be sure they are, we just hope very hard | |
162 | * that well be enough (sic !). At least I noticed Apple | |
163 | * doesn't even bother doing the dcbf's here... | |
164 | */ | |
165 | mfspr r4,SPRN_MSSCR0 | |
166 | rlwinm r4,r4,0,0,29 | |
167 | sync | |
168 | mtspr SPRN_MSSCR0,r4 | |
169 | sync | |
170 | isync | |
171 | lis r4,KERNELBASE@h | |
172 | dcbf 0,r4 | |
173 | dcbf 0,r4 | |
174 | dcbf 0,r4 | |
175 | dcbf 0,r4 | |
176 | END_FTR_SECTION_IFSET(CPU_FTR_SPEC7450) | |
177 | ||
1da177e4 LT |
178 | /* TODO: use HW flush assist when available */ |
179 | ||
180 | lis r4,0x0002 | |
181 | mtctr r4 | |
182 | li r4,0 | |
183 | 1: | |
184 | lwzx r0,r0,r4 | |
185 | addi r4,r4,32 /* Go to start of next cache line */ | |
186 | bdnz 1b | |
187 | isync | |
188 | ||
189 | /* Now, flush the first 4MB of memory */ | |
190 | lis r4,0x0002 | |
191 | mtctr r4 | |
192 | li r4,0 | |
193 | sync | |
194 | 1: | |
195 | dcbf 0,r4 | |
196 | addi r4,r4,32 /* Go to start of next cache line */ | |
197 | bdnz 1b | |
198 | ||
199 | 2: | |
200 | /* Set up the L2CR configuration bits (and switch L2 off) */ | |
201 | /* CPU errata: Make sure the mtspr below is already in the | |
202 | * L1 icache | |
203 | */ | |
204 | b 20f | |
7dffb720 | 205 | .balign L1_CACHE_BYTES |
1da177e4 LT |
206 | 22: |
207 | sync | |
208 | mtspr SPRN_L2CR,r3 | |
209 | sync | |
210 | b 23f | |
211 | 20: | |
212 | b 21f | |
213 | 21: sync | |
214 | isync | |
215 | b 22b | |
216 | ||
217 | 23: | |
218 | /* Perform a global invalidation */ | |
219 | oris r3,r3,0x0020 | |
220 | sync | |
221 | mtspr SPRN_L2CR,r3 | |
222 | sync | |
223 | isync /* For errata */ | |
224 | ||
225 | BEGIN_FTR_SECTION | |
226 | /* On the 7450, we wait for the L2I bit to clear...... | |
227 | */ | |
228 | 10: mfspr r3,SPRN_L2CR | |
229 | andis. r4,r3,0x0020 | |
230 | bne 10b | |
231 | b 11f | |
232 | END_FTR_SECTION_IFSET(CPU_FTR_SPEC7450) | |
233 | ||
234 | /* Wait for the invalidation to complete */ | |
235 | 3: mfspr r3,SPRN_L2CR | |
236 | rlwinm. r4,r3,0,31,31 | |
237 | bne 3b | |
238 | ||
239 | 11: rlwinm r3,r3,0,11,9 /* Turn off the L2I bit */ | |
240 | sync | |
241 | mtspr SPRN_L2CR,r3 | |
242 | sync | |
243 | ||
244 | /* See if we need to enable the cache */ | |
245 | cmplwi r5,0 | |
246 | beq 4f | |
247 | ||
248 | /* Enable the cache */ | |
249 | oris r3,r3,0x8000 | |
250 | mtspr SPRN_L2CR,r3 | |
251 | sync | |
80ac2912 KG |
252 | |
253 | /* Enable L2 HW prefetch on 744x/745x */ | |
254 | BEGIN_FTR_SECTION | |
255 | mfspr r3,SPRN_MSSCR0 | |
256 | ori r3,r3,3 | |
257 | sync | |
258 | mtspr SPRN_MSSCR0,r3 | |
259 | sync | |
260 | isync | |
261 | END_FTR_SECTION_IFSET(CPU_FTR_SPEC7450) | |
1da177e4 LT |
262 | 4: |
263 | ||
264 | /* Restore HID0[DPM] to whatever it was before */ | |
265 | sync | |
266 | mtspr 1008,r8 | |
267 | sync | |
268 | ||
269 | /* Restore MSR (restores EE and DR bits to original state) */ | |
270 | SYNC | |
271 | mtmsr r7 | |
272 | isync | |
273 | ||
274 | mtlr r9 | |
275 | blr | |
276 | ||
277 | _GLOBAL(_get_L2CR) | |
278 | /* Return the L2CR contents */ | |
279 | li r3,0 | |
280 | BEGIN_FTR_SECTION | |
281 | mfspr r3,SPRN_L2CR | |
282 | END_FTR_SECTION_IFSET(CPU_FTR_L2CR) | |
283 | blr | |
284 | ||
285 | ||
286 | /* | |
287 | * Here is a similar routine for dealing with the L3 cache | |
288 | * on the 745x family of chips | |
289 | */ | |
290 | ||
291 | _GLOBAL(_set_L3CR) | |
292 | /* Make sure this is a 745x chip */ | |
293 | BEGIN_FTR_SECTION | |
294 | li r3,-1 | |
295 | blr | |
296 | END_FTR_SECTION_IFCLR(CPU_FTR_L3CR) | |
297 | ||
298 | /* Turn off interrupts and data relocation. */ | |
299 | mfmsr r7 /* Save MSR in r7 */ | |
300 | rlwinm r4,r7,0,17,15 | |
301 | rlwinm r4,r4,0,28,26 /* Turn off DR bit */ | |
302 | sync | |
303 | mtmsr r4 | |
304 | isync | |
305 | ||
306 | /* Stop DST streams */ | |
307 | DSSALL | |
308 | sync | |
309 | ||
310 | /* Get the current enable bit of the L3CR into r4 */ | |
311 | mfspr r4,SPRN_L3CR | |
312 | ||
313 | /* Tweak some bits */ | |
314 | rlwinm r5,r3,0,0,0 /* r5 contains the new enable bit */ | |
315 | rlwinm r3,r3,0,22,20 /* Turn off the invalidate bit */ | |
316 | rlwinm r3,r3,0,2,31 /* Turn off the enable & PE bits */ | |
317 | rlwinm r3,r3,0,5,3 /* Turn off the clken bit */ | |
318 | /* Check to see if we need to flush */ | |
319 | rlwinm. r4,r4,0,0,0 | |
320 | beq 2f | |
321 | ||
322 | /* Flush the cache. | |
323 | */ | |
324 | ||
325 | /* TODO: use HW flush assist */ | |
326 | ||
327 | lis r4,0x0008 | |
328 | mtctr r4 | |
329 | li r4,0 | |
330 | 1: | |
331 | lwzx r0,r0,r4 | |
332 | dcbf 0,r4 | |
333 | addi r4,r4,32 /* Go to start of next cache line */ | |
334 | bdnz 1b | |
335 | ||
336 | 2: | |
337 | /* Set up the L3CR configuration bits (and switch L3 off) */ | |
338 | sync | |
339 | mtspr SPRN_L3CR,r3 | |
340 | sync | |
341 | ||
342 | oris r3,r3,L3CR_L3RES@h /* Set reserved bit 5 */ | |
343 | mtspr SPRN_L3CR,r3 | |
344 | sync | |
345 | oris r3,r3,L3CR_L3CLKEN@h /* Set clken */ | |
346 | mtspr SPRN_L3CR,r3 | |
347 | sync | |
348 | ||
349 | /* Wait for stabilize */ | |
350 | li r0,256 | |
351 | mtctr r0 | |
352 | 1: bdnz 1b | |
353 | ||
354 | /* Perform a global invalidation */ | |
355 | ori r3,r3,0x0400 | |
356 | sync | |
357 | mtspr SPRN_L3CR,r3 | |
358 | sync | |
359 | isync | |
360 | ||
361 | /* We wait for the L3I bit to clear...... */ | |
362 | 10: mfspr r3,SPRN_L3CR | |
363 | andi. r4,r3,0x0400 | |
364 | bne 10b | |
365 | ||
366 | /* Clear CLKEN */ | |
367 | rlwinm r3,r3,0,5,3 /* Turn off the clken bit */ | |
368 | mtspr SPRN_L3CR,r3 | |
369 | sync | |
370 | ||
371 | /* Wait for stabilize */ | |
372 | li r0,256 | |
373 | mtctr r0 | |
374 | 1: bdnz 1b | |
375 | ||
376 | /* See if we need to enable the cache */ | |
377 | cmplwi r5,0 | |
378 | beq 4f | |
379 | ||
380 | /* Enable the cache */ | |
381 | oris r3,r3,(L3CR_L3E | L3CR_L3CLKEN)@h | |
382 | mtspr SPRN_L3CR,r3 | |
383 | sync | |
384 | ||
385 | /* Wait for stabilize */ | |
386 | li r0,256 | |
387 | mtctr r0 | |
388 | 1: bdnz 1b | |
389 | ||
390 | /* Restore MSR (restores EE and DR bits to original state) */ | |
391 | 4: SYNC | |
392 | mtmsr r7 | |
393 | isync | |
394 | blr | |
395 | ||
396 | _GLOBAL(_get_L3CR) | |
397 | /* Return the L3CR contents */ | |
398 | li r3,0 | |
399 | BEGIN_FTR_SECTION | |
400 | mfspr r3,SPRN_L3CR | |
401 | END_FTR_SECTION_IFSET(CPU_FTR_L3CR) | |
402 | blr | |
403 | ||
404 | /* --- End of PowerLogix code --- | |
405 | */ | |
406 | ||
407 | ||
408 | /* flush_disable_L1() - Flush and disable L1 cache | |
409 | * | |
410 | * clobbers r0, r3, ctr, cr0 | |
411 | * Must be called with interrupts disabled and MMU enabled. | |
412 | */ | |
413 | _GLOBAL(__flush_disable_L1) | |
414 | /* Stop pending alitvec streams and memory accesses */ | |
415 | BEGIN_FTR_SECTION | |
416 | DSSALL | |
417 | END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC) | |
418 | sync | |
419 | ||
420 | /* Load counter to 0x4000 cache lines (512k) and | |
421 | * load cache with datas | |
422 | */ | |
423 | li r3,0x4000 /* 512kB / 32B */ | |
424 | mtctr r3 | |
425 | lis r3,KERNELBASE@h | |
426 | 1: | |
427 | lwz r0,0(r3) | |
428 | addi r3,r3,0x0020 /* Go to start of next cache line */ | |
429 | bdnz 1b | |
430 | isync | |
431 | sync | |
432 | ||
433 | /* Now flush those cache lines */ | |
434 | li r3,0x4000 /* 512kB / 32B */ | |
435 | mtctr r3 | |
436 | lis r3,KERNELBASE@h | |
437 | 1: | |
438 | dcbf 0,r3 | |
439 | addi r3,r3,0x0020 /* Go to start of next cache line */ | |
440 | bdnz 1b | |
441 | sync | |
442 | ||
443 | /* We can now disable the L1 cache (HID0:DCE, HID0:ICE) */ | |
444 | mfspr r3,SPRN_HID0 | |
445 | rlwinm r3,r3,0,18,15 | |
446 | mtspr SPRN_HID0,r3 | |
447 | sync | |
448 | isync | |
449 | blr | |
450 | ||
451 | /* inval_enable_L1 - Invalidate and enable L1 cache | |
452 | * | |
453 | * Assumes L1 is already disabled and MSR:EE is off | |
454 | * | |
455 | * clobbers r3 | |
456 | */ | |
457 | _GLOBAL(__inval_enable_L1) | |
458 | /* Enable and then Flash inval the instruction & data cache */ | |
459 | mfspr r3,SPRN_HID0 | |
460 | ori r3,r3, HID0_ICE|HID0_ICFI|HID0_DCE|HID0_DCI | |
461 | sync | |
462 | isync | |
463 | mtspr SPRN_HID0,r3 | |
464 | xori r3,r3, HID0_ICFI|HID0_DCI | |
465 | mtspr SPRN_HID0,r3 | |
466 | sync | |
467 | ||
468 | blr | |
469 | ||
470 |