[SPARC64]: Sanitize %pstate writes for sun4v.
[safe/jmp/linux-2.6] / arch / sparc64 / kernel / ktlb.S
1 /* arch/sparc64/kernel/ktlb.S: Kernel mapping TLB miss handling.
2  *
3  * Copyright (C) 1995, 1997, 2005 David S. Miller <davem@davemloft.net>
4  * Copyright (C) 1996 Eddie C. Dost        (ecd@brainaid.de)
5  * Copyright (C) 1996 Miguel de Icaza      (miguel@nuclecu.unam.mx)
6  * Copyright (C) 1996,98,99 Jakub Jelinek  (jj@sunsite.mff.cuni.cz)
7  */
8
9 #include <linux/config.h>
10 #include <asm/head.h>
11 #include <asm/asi.h>
12 #include <asm/page.h>
13 #include <asm/pgtable.h>
14 #include <asm/tsb.h>
15
16         .text
17         .align          32
18
19         .globl          kvmap_itlb
20 kvmap_itlb:
21         /* g6: TAG TARGET */
22         mov             TLB_TAG_ACCESS, %g4
23         ldxa            [%g4] ASI_IMMU, %g4
24
25 kvmap_itlb_nonlinear:
26         /* Catch kernel NULL pointer calls.  */
27         sethi           %hi(PAGE_SIZE), %g5
28         cmp             %g4, %g5
29         bleu,pn         %xcc, kvmap_dtlb_longpath
30          nop
31
32         KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_itlb_load)
33
34 kvmap_itlb_tsb_miss:
35         sethi           %hi(LOW_OBP_ADDRESS), %g5
36         cmp             %g4, %g5
37         blu,pn          %xcc, kvmap_itlb_vmalloc_addr
38          mov            0x1, %g5
39         sllx            %g5, 32, %g5
40         cmp             %g4, %g5
41         blu,pn          %xcc, kvmap_itlb_obp
42          nop
43
44 kvmap_itlb_vmalloc_addr:
45         KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath)
46
47         KTSB_LOCK_TAG(%g1, %g2, %g4)
48
49         /* Load and check PTE.  */
50         ldxa            [%g5] ASI_PHYS_USE_EC, %g5
51         brgez,a,pn      %g5, kvmap_itlb_longpath
52          KTSB_STORE(%g1, %g0)
53
54         KTSB_WRITE(%g1, %g5, %g6)
55
56         /* fallthrough to TLB load */
57
58 kvmap_itlb_load:
59         stxa            %g5, [%g0] ASI_ITLB_DATA_IN     ! Reload TLB
60         retry
61
62 kvmap_itlb_longpath:
63
64 661:    rdpr    %pstate, %g5
65         wrpr    %g5, PSTATE_AG | PSTATE_MG, %pstate
66         .section .gl_2insn_patch, "ax"
67         .word   661b
68         nop
69         nop
70         .previous
71
72         rdpr    %tpc, %g5
73         ba,pt   %xcc, sparc64_realfault_common
74          mov    FAULT_CODE_ITLB, %g4
75
76 kvmap_itlb_obp:
77         OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath)
78
79         KTSB_LOCK_TAG(%g1, %g2, %g4)
80
81         KTSB_WRITE(%g1, %g5, %g6)
82
83         ba,pt           %xcc, kvmap_itlb_load
84          nop
85
86 kvmap_dtlb_obp:
87         OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath)
88
89         KTSB_LOCK_TAG(%g1, %g2, %g4)
90
91         KTSB_WRITE(%g1, %g5, %g6)
92
93         ba,pt           %xcc, kvmap_dtlb_load
94          nop
95
96         .align          32
97         .globl          kvmap_dtlb
98 kvmap_dtlb:
99         /* %g6: TAG TARGET */
100         mov             TLB_TAG_ACCESS, %g4
101         ldxa            [%g4] ASI_DMMU, %g4
102         brgez,pn        %g4, kvmap_dtlb_nonlinear
103          nop
104
105 #define KERN_HIGHBITS   ((_PAGE_VALID|_PAGE_SZ4MB)^0xfffff80000000000)
106 #define KERN_LOWBITS    (_PAGE_CP | _PAGE_CV | _PAGE_P | _PAGE_W)
107
108         sethi           %uhi(KERN_HIGHBITS), %g2
109         or              %g2, %ulo(KERN_HIGHBITS), %g2
110         sllx            %g2, 32, %g2
111         or              %g2, KERN_LOWBITS, %g2
112
113 #undef KERN_HIGHBITS
114 #undef KERN_LOWBITS
115
116         .globl          kvmap_linear_patch
117 kvmap_linear_patch:
118         ba,pt           %xcc, kvmap_dtlb_load
119          xor            %g2, %g4, %g5
120
121 kvmap_dtlb_vmalloc_addr:
122         KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath)
123
124         KTSB_LOCK_TAG(%g1, %g2, %g4)
125
126         /* Load and check PTE.  */
127         ldxa            [%g5] ASI_PHYS_USE_EC, %g5
128         brgez,a,pn      %g5, kvmap_dtlb_longpath
129          KTSB_STORE(%g1, %g0)
130
131         KTSB_WRITE(%g1, %g5, %g6)
132
133         /* fallthrough to TLB load */
134
135 kvmap_dtlb_load:
136         stxa            %g5, [%g0] ASI_DTLB_DATA_IN     ! Reload TLB
137         retry
138
139 kvmap_dtlb_nonlinear:
140         /* Catch kernel NULL pointer derefs.  */
141         sethi           %hi(PAGE_SIZE), %g5
142         cmp             %g4, %g5
143         bleu,pn         %xcc, kvmap_dtlb_longpath
144          nop
145
146         KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
147
148 kvmap_dtlb_tsbmiss:
149         sethi           %hi(MODULES_VADDR), %g5
150         cmp             %g4, %g5
151         blu,pn          %xcc, kvmap_dtlb_longpath
152          mov            (VMALLOC_END >> 24), %g5
153         sllx            %g5, 24, %g5
154         cmp             %g4, %g5
155         bgeu,pn         %xcc, kvmap_dtlb_longpath
156          nop
157
158 kvmap_check_obp:
159         sethi           %hi(LOW_OBP_ADDRESS), %g5
160         cmp             %g4, %g5
161         blu,pn          %xcc, kvmap_dtlb_vmalloc_addr
162          mov            0x1, %g5
163         sllx            %g5, 32, %g5
164         cmp             %g4, %g5
165         blu,pn          %xcc, kvmap_dtlb_obp
166          nop
167         ba,pt           %xcc, kvmap_dtlb_vmalloc_addr
168          nop
169
170 kvmap_dtlb_longpath:
171
172 661:    rdpr    %pstate, %g5
173         wrpr    %g5, PSTATE_AG | PSTATE_MG, %pstate
174         .section .gl_2insn_patch, "ax"
175         .word   661b
176         nop
177         nop
178         .previous
179
180         rdpr    %tl, %g4
181         cmp     %g4, 1
182         mov     TLB_TAG_ACCESS, %g4
183         ldxa    [%g4] ASI_DMMU, %g5
184         be,pt   %xcc, sparc64_realfault_common
185          mov    FAULT_CODE_DTLB, %g4
186         ba,pt   %xcc, winfix_trampoline
187          nop