]>
Commit | Line | Data |
---|---|---|
2a4aca11 BH |
1 | /* |
2 | * This file contains low-level functions for performing various | |
3 | * types of TLB invalidations on various processors with no hash | |
4 | * table. | |
5 | * | |
6 | * This file implements the following functions for all no-hash | |
7 | * processors. Some aren't implemented for some variants. Some | |
8 | * are inline in tlbflush.h | |
9 | * | |
10 | * - tlbil_va | |
11 | * - tlbil_pid | |
12 | * - tlbil_all | |
e7f75ad0 | 13 | * - tlbivax_bcast |
2a4aca11 BH |
14 | * |
15 | * Code mostly moved over from misc_32.S | |
16 | * | |
17 | * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org) | |
18 | * | |
19 | * Partially rewritten by Cort Dougan (cort@cs.nmt.edu) | |
20 | * Paul Mackerras, Kumar Gala and Benjamin Herrenschmidt. | |
21 | * | |
22 | * This program is free software; you can redistribute it and/or | |
23 | * modify it under the terms of the GNU General Public License | |
24 | * as published by the Free Software Foundation; either version | |
25 | * 2 of the License, or (at your option) any later version. | |
26 | * | |
27 | */ | |
28 | ||
29 | #include <asm/reg.h> | |
30 | #include <asm/page.h> | |
31 | #include <asm/cputable.h> | |
32 | #include <asm/mmu.h> | |
33 | #include <asm/ppc_asm.h> | |
34 | #include <asm/asm-offsets.h> | |
35 | #include <asm/processor.h> | |
e7f75ad0 | 36 | #include <asm/bug.h> |
2a4aca11 BH |
37 | |
38 | #if defined(CONFIG_40x) | |
39 | ||
40 | /* | |
41 | * 40x implementation needs only tlbil_va | |
42 | */ | |
d4e167da | 43 | _GLOBAL(__tlbil_va) |
2a4aca11 BH |
44 | /* We run the search with interrupts disabled because we have to change |
45 | * the PID and I don't want to preempt when that happens. | |
46 | */ | |
47 | mfmsr r5 | |
48 | mfspr r6,SPRN_PID | |
49 | wrteei 0 | |
50 | mtspr SPRN_PID,r4 | |
51 | tlbsx. r3, 0, r3 | |
52 | mtspr SPRN_PID,r6 | |
53 | wrtee r5 | |
54 | bne 1f | |
55 | sync | |
56 | /* There are only 64 TLB entries, so r3 < 64, which means bit 25 is | |
57 | * clear. Since 25 is the V bit in the TLB_TAG, loading this value | |
58 | * will invalidate the TLB entry. */ | |
59 | tlbwe r3, r3, TLB_TAG | |
60 | isync | |
61 | 1: blr | |
62 | ||
63 | #elif defined(CONFIG_8xx) | |
64 | ||
65 | /* | |
66 | * Nothing to do for 8xx, everything is inline | |
67 | */ | |
68 | ||
e7f75ad0 | 69 | #elif defined(CONFIG_44x) /* Includes 47x */ |
2a4aca11 BH |
70 | |
71 | /* | |
72 | * 440 implementation uses tlbsx/we for tlbil_va and a full sweep | |
73 | * of the TLB for everything else. | |
74 | */ | |
d4e167da | 75 | _GLOBAL(__tlbil_va) |
2a4aca11 | 76 | mfspr r5,SPRN_MMUCR |
e7f75ad0 DK |
77 | mfmsr r10 |
78 | ||
79 | /* | |
80 | * We write 16 bits of STID since 47x supports that much, we | |
81 | * will never be passed out of bounds values on 440 (hopefully) | |
82 | */ | |
83 | rlwimi r5,r4,0,16,31 | |
2a4aca11 | 84 | |
760ec0e0 BH |
85 | /* We have to run the search with interrupts disabled, otherwise |
86 | * an interrupt which causes a TLB miss can clobber the MMUCR | |
87 | * between the mtspr and the tlbsx. | |
88 | * | |
89 | * Critical and Machine Check interrupts take care of saving | |
90 | * and restoring MMUCR, so only normal interrupts have to be | |
91 | * taken care of. | |
92 | */ | |
760ec0e0 | 93 | wrteei 0 |
2a4aca11 | 94 | mtspr SPRN_MMUCR,r5 |
e7f75ad0 DK |
95 | tlbsx. r6,0,r3 |
96 | bne 10f | |
2a4aca11 | 97 | sync |
e7f75ad0 DK |
98 | BEGIN_MMU_FTR_SECTION |
99 | b 2f | |
100 | END_MMU_FTR_SECTION_IFSET(MMU_FTR_TYPE_47x) | |
101 | /* On 440 There are only 64 TLB entries, so r3 < 64, which means bit | |
102 | * 22, is clear. Since 22 is the V bit in the TLB_PAGEID, loading this | |
2a4aca11 BH |
103 | * value will invalidate the TLB entry. |
104 | */ | |
e7f75ad0 | 105 | tlbwe r6,r6,PPC44x_TLB_PAGEID |
2a4aca11 | 106 | isync |
e7f75ad0 DK |
107 | 10: wrtee r10 |
108 | blr | |
109 | 2: | |
110 | #ifdef CONFIG_PPC_47x | |
111 | oris r7,r6,0x8000 /* specify way explicitely */ | |
112 | clrrwi r4,r3,12 /* get an EPN for the hashing with V = 0 */ | |
113 | ori r4,r4,PPC47x_TLBE_SIZE | |
114 | tlbwe r4,r7,0 /* write it */ | |
115 | isync | |
116 | wrtee r10 | |
117 | blr | |
118 | #else /* CONFIG_PPC_47x */ | |
119 | 1: trap | |
120 | EMIT_BUG_ENTRY 1b,__FILE__,__LINE__,0; | |
121 | #endif /* !CONFIG_PPC_47x */ | |
2a4aca11 BH |
122 | |
123 | _GLOBAL(_tlbil_all) | |
124 | _GLOBAL(_tlbil_pid) | |
e7f75ad0 DK |
125 | BEGIN_MMU_FTR_SECTION |
126 | b 2f | |
127 | END_MMU_FTR_SECTION_IFSET(MMU_FTR_TYPE_47x) | |
2a4aca11 BH |
128 | li r3,0 |
129 | sync | |
130 | ||
131 | /* Load high watermark */ | |
132 | lis r4,tlb_44x_hwater@ha | |
133 | lwz r5,tlb_44x_hwater@l(r4) | |
134 | ||
135 | 1: tlbwe r3,r3,PPC44x_TLB_PAGEID | |
136 | addi r3,r3,1 | |
137 | cmpw 0,r3,r5 | |
138 | ble 1b | |
139 | ||
140 | isync | |
141 | blr | |
e7f75ad0 DK |
142 | 2: |
143 | #ifdef CONFIG_PPC_47x | |
144 | /* 476 variant. There's not simple way to do this, hopefully we'll | |
145 | * try to limit the amount of such full invalidates | |
146 | */ | |
147 | mfmsr r11 /* Interrupts off */ | |
148 | wrteei 0 | |
149 | li r3,-1 /* Current set */ | |
150 | lis r10,tlb_47x_boltmap@h | |
151 | ori r10,r10,tlb_47x_boltmap@l | |
152 | lis r7,0x8000 /* Specify way explicitely */ | |
153 | ||
154 | b 9f /* For each set */ | |
155 | ||
156 | 1: li r9,4 /* Number of ways */ | |
157 | li r4,0 /* Current way */ | |
158 | li r6,0 /* Default entry value 0 */ | |
159 | andi. r0,r8,1 /* Check if way 0 is bolted */ | |
160 | mtctr r9 /* Load way counter */ | |
161 | bne- 3f /* Bolted, skip loading it */ | |
162 | ||
163 | 2: /* For each way */ | |
164 | or r5,r3,r4 /* Make way|index for tlbre */ | |
165 | rlwimi r5,r5,16,8,15 /* Copy index into position */ | |
166 | tlbre r6,r5,0 /* Read entry */ | |
167 | 3: addis r4,r4,0x2000 /* Next way */ | |
168 | andi. r0,r6,PPC47x_TLB0_VALID /* Valid entry ? */ | |
169 | beq 4f /* Nope, skip it */ | |
170 | rlwimi r7,r5,0,1,2 /* Insert way number */ | |
171 | rlwinm r6,r6,0,21,19 /* Clear V */ | |
172 | tlbwe r6,r7,0 /* Write it */ | |
173 | 4: bdnz 2b /* Loop for each way */ | |
174 | srwi r8,r8,1 /* Next boltmap bit */ | |
175 | 9: cmpwi cr1,r3,255 /* Last set done ? */ | |
176 | addi r3,r3,1 /* Next set */ | |
177 | beq cr1,1f /* End of loop */ | |
178 | andi. r0,r3,0x1f /* Need to load a new boltmap word ? */ | |
179 | bne 1b /* No, loop */ | |
180 | lwz r8,0(r10) /* Load boltmap entry */ | |
181 | addi r10,r10,4 /* Next word */ | |
182 | b 1b /* Then loop */ | |
183 | 1: isync /* Sync shadows */ | |
184 | wrtee r11 | |
185 | #else /* CONFIG_PPC_47x */ | |
186 | 1: trap | |
187 | EMIT_BUG_ENTRY 1b,__FILE__,__LINE__,0; | |
188 | #endif /* !CONFIG_PPC_47x */ | |
189 | blr | |
190 | ||
191 | #ifdef CONFIG_PPC_47x | |
192 | /* | |
193 | * _tlbivax_bcast is only on 47x. We don't bother doing a runtime | |
194 | * check though, it will blow up soon enough if we mistakenly try | |
195 | * to use it on a 440. | |
196 | */ | |
197 | _GLOBAL(_tlbivax_bcast) | |
198 | mfspr r5,SPRN_MMUCR | |
199 | mfmsr r10 | |
200 | rlwimi r5,r4,0,16,31 | |
201 | wrteei 0 | |
202 | mtspr SPRN_MMUCR,r5 | |
32412aa2 | 203 | isync |
e7f75ad0 DK |
204 | /* tlbivax 0,r3 - use .long to avoid binutils deps */ |
205 | .long 0x7c000624 | (r3 << 11) | |
206 | isync | |
207 | eieio | |
208 | tlbsync | |
209 | sync | |
210 | wrtee r10 | |
211 | blr | |
212 | #endif /* CONFIG_PPC_47x */ | |
2a4aca11 BH |
213 | |
214 | #elif defined(CONFIG_FSL_BOOKE) | |
215 | /* | |
c3071951 KG |
216 | * FSL BookE implementations. |
217 | * | |
218 | * Since feature sections are using _SECTION_ELSE we need | |
219 | * to have the larger code path before the _SECTION_ELSE | |
2a4aca11 BH |
220 | */ |
221 | ||
222 | /* | |
223 | * Flush MMU TLB on the local processor | |
224 | */ | |
2a4aca11 | 225 | _GLOBAL(_tlbil_all) |
c3071951 KG |
226 | BEGIN_MMU_FTR_SECTION |
227 | li r3,(MMUCSR0_TLBFI)@l | |
228 | mtspr SPRN_MMUCSR0, r3 | |
229 | 1: | |
230 | mfspr r3,SPRN_MMUCSR0 | |
231 | andi. r3,r3,MMUCSR0_TLBFI@l | |
232 | bne 1b | |
233 | MMU_FTR_SECTION_ELSE | |
323d23ae | 234 | PPC_TLBILX_ALL(0,0) |
c3071951 KG |
235 | ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX) |
236 | msync | |
237 | isync | |
238 | blr | |
239 | ||
240 | _GLOBAL(_tlbil_pid) | |
241 | BEGIN_MMU_FTR_SECTION | |
242 | slwi r3,r3,16 | |
243 | mfmsr r10 | |
244 | wrteei 0 | |
245 | mfspr r4,SPRN_MAS6 /* save MAS6 */ | |
246 | mtspr SPRN_MAS6,r3 | |
247 | PPC_TLBILX_PID(0,0) | |
248 | mtspr SPRN_MAS6,r4 /* restore MAS6 */ | |
249 | wrtee r10 | |
250 | MMU_FTR_SECTION_ELSE | |
2a4aca11 BH |
251 | li r3,(MMUCSR0_TLBFI)@l |
252 | mtspr SPRN_MMUCSR0, r3 | |
253 | 1: | |
254 | mfspr r3,SPRN_MMUCSR0 | |
255 | andi. r3,r3,MMUCSR0_TLBFI@l | |
256 | bne 1b | |
c3071951 | 257 | ALT_MMU_FTR_SECTION_END_IFSET(MMU_FTR_USE_TLBILX) |
2a4aca11 BH |
258 | msync |
259 | isync | |
260 | blr | |
261 | ||
262 | /* | |
263 | * Flush MMU TLB for a particular address, but only on the local processor | |
264 | * (no broadcast) | |
265 | */ | |
d4e167da | 266 | _GLOBAL(__tlbil_va) |
2a4aca11 BH |
267 | mfmsr r10 |
268 | wrteei 0 | |
269 | slwi r4,r4,16 | |
c3071951 | 270 | ori r4,r4,(MAS6_ISIZE(BOOK3E_PAGESZ_4K))@l |
2a4aca11 | 271 | mtspr SPRN_MAS6,r4 /* assume AS=0 for now */ |
c3071951 | 272 | BEGIN_MMU_FTR_SECTION |
2a4aca11 BH |
273 | tlbsx 0,r3 |
274 | mfspr r4,SPRN_MAS1 /* check valid */ | |
275 | andis. r3,r4,MAS1_VALID@h | |
276 | beq 1f | |
277 | rlwinm r4,r4,0,1,31 | |
278 | mtspr SPRN_MAS1,r4 | |
279 | tlbwe | |
c3071951 KG |
280 | MMU_FTR_SECTION_ELSE |
281 | PPC_TLBILX_VA(0,r3) | |
282 | ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX) | |
2a4aca11 BH |
283 | msync |
284 | isync | |
285 | 1: wrtee r10 | |
286 | blr | |
25d21ad6 BH |
287 | #elif defined(CONFIG_PPC_BOOK3E) |
288 | /* | |
289 | * New Book3E (>= 2.06) implementation | |
290 | * | |
291 | * Note: We may be able to get away without the interrupt masking stuff | |
292 | * if we save/restore MAS6 on exceptions that might modify it | |
293 | */ | |
294 | _GLOBAL(_tlbil_pid) | |
295 | slwi r4,r3,MAS6_SPID_SHIFT | |
296 | mfmsr r10 | |
297 | wrteei 0 | |
298 | mtspr SPRN_MAS6,r4 | |
299 | PPC_TLBILX_PID(0,0) | |
300 | wrtee r10 | |
301 | msync | |
302 | isync | |
303 | blr | |
304 | ||
305 | _GLOBAL(_tlbil_pid_noind) | |
306 | slwi r4,r3,MAS6_SPID_SHIFT | |
307 | mfmsr r10 | |
308 | ori r4,r4,MAS6_SIND | |
309 | wrteei 0 | |
310 | mtspr SPRN_MAS6,r4 | |
311 | PPC_TLBILX_PID(0,0) | |
312 | wrtee r10 | |
313 | msync | |
314 | isync | |
315 | blr | |
316 | ||
317 | _GLOBAL(_tlbil_all) | |
318 | PPC_TLBILX_ALL(0,0) | |
319 | msync | |
320 | isync | |
321 | blr | |
322 | ||
323 | _GLOBAL(_tlbil_va) | |
324 | mfmsr r10 | |
325 | wrteei 0 | |
326 | cmpwi cr0,r6,0 | |
327 | slwi r4,r4,MAS6_SPID_SHIFT | |
328 | rlwimi r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK | |
329 | beq 1f | |
330 | rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIND | |
331 | 1: mtspr SPRN_MAS6,r4 /* assume AS=0 for now */ | |
332 | PPC_TLBILX_VA(0,r3) | |
333 | msync | |
334 | isync | |
335 | wrtee r10 | |
336 | blr | |
337 | ||
338 | _GLOBAL(_tlbivax_bcast) | |
339 | mfmsr r10 | |
340 | wrteei 0 | |
341 | cmpwi cr0,r6,0 | |
342 | slwi r4,r4,MAS6_SPID_SHIFT | |
343 | rlwimi r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK | |
344 | beq 1f | |
345 | rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIND | |
346 | 1: mtspr SPRN_MAS6,r4 /* assume AS=0 for now */ | |
347 | PPC_TLBIVAX(0,r3) | |
348 | eieio | |
349 | tlbsync | |
350 | sync | |
351 | wrtee r10 | |
352 | blr | |
353 | ||
354 | _GLOBAL(set_context) | |
355 | #ifdef CONFIG_BDI_SWITCH | |
356 | /* Context switch the PTE pointer for the Abatron BDI2000. | |
357 | * The PGDIR is the second parameter. | |
358 | */ | |
359 | lis r5, abatron_pteptrs@h | |
360 | ori r5, r5, abatron_pteptrs@l | |
361 | stw r4, 0x4(r5) | |
362 | #endif | |
363 | mtspr SPRN_PID,r3 | |
364 | isync /* Force context change */ | |
365 | blr | |
b62c31ae | 366 | #else |
2a4aca11 BH |
367 | #error Unsupported processor type ! |
368 | #endif | |
78f62237 KG |
369 | |
370 | #if defined(CONFIG_FSL_BOOKE) | |
371 | /* | |
372 | * extern void loadcam_entry(unsigned int index) | |
373 | * | |
374 | * Load TLBCAM[index] entry in to the L2 CAM MMU | |
375 | */ | |
376 | _GLOBAL(loadcam_entry) | |
377 | LOAD_REG_ADDR(r4, TLBCAM) | |
378 | mulli r5,r3,TLBCAM_SIZE | |
379 | add r3,r5,r4 | |
380 | lwz r4,TLBCAM_MAS0(r3) | |
381 | mtspr SPRN_MAS0,r4 | |
382 | lwz r4,TLBCAM_MAS1(r3) | |
383 | mtspr SPRN_MAS1,r4 | |
384 | PPC_LL r4,TLBCAM_MAS2(r3) | |
385 | mtspr SPRN_MAS2,r4 | |
386 | lwz r4,TLBCAM_MAS3(r3) | |
387 | mtspr SPRN_MAS3,r4 | |
388 | BEGIN_MMU_FTR_SECTION | |
389 | lwz r4,TLBCAM_MAS7(r3) | |
390 | mtspr SPRN_MAS7,r4 | |
391 | END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS) | |
392 | isync | |
393 | tlbwe | |
394 | isync | |
395 | blr | |
396 | #endif |