X-Git-Url: http://ftp.safe.ca/?a=blobdiff_plain;f=arch%2Fsparc64%2Fkernel%2Fktlb.S;h=cef8defcd7a931785acf4a8ccb8f978ee42d86bf;hb=9775369ec06bad8edb2fbd8c77316f49b439c225;hp=2d333ab4b91bc352beecf07b334cde1dbc32903d;hpb=459b6e621e0e15315c25bac47fa7113e5818d45d;p=safe%2Fjmp%2Flinux-2.6 diff --git a/arch/sparc64/kernel/ktlb.S b/arch/sparc64/kernel/ktlb.S index 2d333ab..cef8def 100644 --- a/arch/sparc64/kernel/ktlb.S +++ b/arch/sparc64/kernel/ktlb.S @@ -1,12 +1,11 @@ /* arch/sparc64/kernel/ktlb.S: Kernel mapping TLB miss handling. * - * Copyright (C) 1995, 1997, 2005 David S. Miller + * Copyright (C) 1995, 1997, 2005, 2008 David S. Miller * Copyright (C) 1996 Eddie C. Dost (ecd@brainaid.de) * Copyright (C) 1996 Miguel de Icaza (miguel@nuclecu.unam.mx) * Copyright (C) 1996,98,99 Jakub Jelinek (jj@sunsite.mff.cuni.cz) */ -#include #include #include #include @@ -52,8 +51,10 @@ kvmap_itlb_vmalloc_addr: /* Load and check PTE. */ ldxa [%g5] ASI_PHYS_USE_EC, %g5 + mov 1, %g7 + sllx %g7, TSB_TAG_INVALID_BIT, %g7 brgez,a,pn %g5, kvmap_itlb_longpath - KTSB_STORE(%g1, %g0) + KTSB_STORE(%g1, %g7) KTSB_WRITE(%g1, %g5, %g6) @@ -90,7 +91,7 @@ kvmap_itlb_longpath: wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate .section .sun4v_2insn_patch, "ax" .word 661b - nop + SET_GL(1) nop .previous @@ -119,6 +120,12 @@ kvmap_dtlb_obp: nop .align 32 +kvmap_dtlb_tsb4m_load: + KTSB_LOCK_TAG(%g1, %g2, %g7) + KTSB_WRITE(%g1, %g5, %g6) + ba,pt %xcc, kvmap_dtlb_load + nop + kvmap_dtlb: /* %g6: TAG TARGET */ mov TLB_TAG_ACCESS, %g4 @@ -131,20 +138,51 @@ kvmap_dtlb_4v: brgez,pn %g4, kvmap_dtlb_nonlinear nop -#define KERN_HIGHBITS ((_PAGE_VALID|_PAGE_SZ4MB)^0xfffff80000000000) -#define KERN_LOWBITS (_PAGE_CP | _PAGE_CV | _PAGE_P | _PAGE_W) +#ifdef CONFIG_DEBUG_PAGEALLOC + /* Index through the base page size TSB even for linear + * mappings when using page allocation debugging. + */ + KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load) +#else + /* Correct TAG_TARGET is already in %g6, check 4mb TSB. */ + KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load) +#endif + /* TSB entry address left in %g1, lookup linear PTE. + * Must preserve %g1 and %g6 (TAG). + */ +kvmap_dtlb_tsb4m_miss: + sethi %hi(kpte_linear_bitmap), %g2 + or %g2, %lo(kpte_linear_bitmap), %g2 + + /* Clear the PAGE_OFFSET top virtual bits, then shift + * down to get a 256MB physical address index. + */ + sllx %g4, 21, %g5 + mov 1, %g7 + srlx %g5, 21 + 28, %g5 + + /* Don't try this at home kids... this depends upon srlx + * only taking the low 6 bits of the shift count in %g5. + */ + sllx %g7, %g5, %g7 + + /* Divide by 64 to get the offset into the bitmask. */ + srlx %g5, 6, %g5 + sllx %g5, 3, %g5 - sethi %uhi(KERN_HIGHBITS), %g2 - or %g2, %ulo(KERN_HIGHBITS), %g2 - sllx %g2, 32, %g2 - or %g2, KERN_LOWBITS, %g2 + /* kern_linear_pte_xor[((mask & bit) ? 1 : 0)] */ + ldx [%g2 + %g5], %g2 + andcc %g2, %g7, %g0 + sethi %hi(kern_linear_pte_xor), %g5 + or %g5, %lo(kern_linear_pte_xor), %g5 + bne,a,pt %xcc, 1f + add %g5, 8, %g5 -#undef KERN_HIGHBITS -#undef KERN_LOWBITS +1: ldx [%g5], %g2 .globl kvmap_linear_patch kvmap_linear_patch: - ba,pt %xcc, kvmap_dtlb_load + ba,pt %xcc, kvmap_dtlb_tsb4m_load xor %g2, %g4, %g5 kvmap_dtlb_vmalloc_addr: @@ -154,8 +192,10 @@ kvmap_dtlb_vmalloc_addr: /* Load and check PTE. */ ldxa [%g5] ASI_PHYS_USE_EC, %g5 + mov 1, %g7 + sllx %g7, TSB_TAG_INVALID_BIT, %g7 brgez,a,pn %g5, kvmap_dtlb_longpath - KTSB_STORE(%g1, %g0) + KTSB_STORE(%g1, %g7) KTSB_WRITE(%g1, %g5, %g6) @@ -186,6 +226,17 @@ kvmap_dtlb_load: ba,pt %xcc, sun4v_dtlb_load mov %g5, %g3 +#ifdef CONFIG_SPARSEMEM_VMEMMAP +kvmap_vmemmap: + sub %g4, %g5, %g5 + srlx %g5, 22, %g5 + sethi %hi(vmemmap_table), %g1 + sllx %g5, 3, %g5 + or %g1, %lo(vmemmap_table), %g1 + ba,pt %xcc, kvmap_dtlb_load + ldx [%g1 + %g5], %g5 +#endif + kvmap_dtlb_nonlinear: /* Catch kernel NULL pointer derefs. */ sethi %hi(PAGE_SIZE), %g5 @@ -193,6 +244,15 @@ kvmap_dtlb_nonlinear: bleu,pn %xcc, kvmap_dtlb_longpath nop +#ifdef CONFIG_SPARSEMEM_VMEMMAP + /* Do not use the TSB for vmemmap. */ + mov (VMEMMAP_BASE >> 24), %g5 + sllx %g5, 24, %g5 + cmp %g4,%g5 + bgeu,pn %xcc, kvmap_vmemmap + nop +#endif + KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load) kvmap_dtlb_tsbmiss: @@ -223,8 +283,8 @@ kvmap_dtlb_longpath: wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate .section .sun4v_2insn_patch, "ax" .word 661b - nop - nop + SET_GL(1) + ldxa [%g0] ASI_SCRATCHPAD, %g5 .previous rdpr %tl, %g3 @@ -234,7 +294,7 @@ kvmap_dtlb_longpath: ldxa [%g4] ASI_DMMU, %g5 .section .sun4v_2insn_patch, "ax" .word 661b - mov %g4, %g5 + ldx [%g5 + HV_FAULT_D_ADDR_OFFSET], %g5 nop .previous