diff options
author | Benjamin Herrenschmidt <benh@kernel.crashing.org> | 2008-12-18 20:13:54 +0100 |
---|---|---|
committer | Paul Mackerras <paulus@samba.org> | 2008-12-21 04:21:16 +0100 |
commit | 9dce3ce5c55c848f00429005a46fd6246cfabfbe (patch) | |
tree | d70f72b77732c582adfaddfadc658bb461a79d14 | |
parent | powerpc/mm: Rework usage of _PAGE_COHERENT/NO_CACHE/GUARDED (diff) | |
download | linux-9dce3ce5c55c848f00429005a46fd6246cfabfbe.tar.xz linux-9dce3ce5c55c848f00429005a46fd6246cfabfbe.zip |
powerpc/44x: 44x TLB doesn't need "Guarded" set for all pages
After discussing with chip designers, it appears that it's not
necessary to set G everywhere on 440 cores. The various core
errata related to prefetch should be sorted out by firmware by
disabling icache prefetching in CCR0. We add the workaround to
the kernel however just in case oooold firmwares don't do it.
This is valid for -all- 4xx core variants. Later ones hard wire
the absence of prefetch but it doesn't harm to clear the bits
in CCR0 (they should already be cleared anyway).
We still leave G=1 on the linear mapping for now, we need to
stop over-mapping RAM to be able to remove it.
Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Acked-by: Kumar Gala <galak@kernel.crashing.org>
Acked-by: Josh Boyer <jwboyer@linux.vnet.ibm.com>
Signed-off-by: Paul Mackerras <paulus@samba.org>
-rw-r--r-- | arch/powerpc/kernel/head_44x.S | 12 |
1 files changed, 11 insertions, 1 deletions
diff --git a/arch/powerpc/kernel/head_44x.S b/arch/powerpc/kernel/head_44x.S index 26237357a88c..bd4fe9e7278b 100644 --- a/arch/powerpc/kernel/head_44x.S +++ b/arch/powerpc/kernel/head_44x.S @@ -69,6 +69,17 @@ _ENTRY(_start); li r24,0 /* CPU number */ /* + * In case the firmware didn't do it, we apply some workarounds + * that are good for all 440 core variants here + */ + mfspr r3,SPRN_CCR0 + rlwinm r3,r3,0,0,27 /* disable icache prefetch */ + isync + mtspr SPRN_CCR0,r3 + isync + sync + +/* * Set up the initial MMU state * * We are still executing code at the virtual address @@ -570,7 +581,6 @@ finish_tlb_load: rlwimi r10,r12,29,30,30 /* DIRTY -> SW position */ and r11,r12,r10 /* Mask PTE bits to keep */ andi. r10,r12,_PAGE_USER /* User page ? */ - ori r11,r11,_PAGE_GUARDED /* 440 errata, needs G set */ beq 1f /* nope, leave U bits empty */ rlwimi r11,r11,3,26,28 /* yes, copy S bits to U */ 1: tlbwe r11,r13,PPC44x_TLB_ATTRIB /* Write ATTRIB */ |