[MIPS] checkfiles: Fix "need space after that ','" errors.
[safe/jmp/linux-2.6] / include / asm-mips / hazards.h
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 2003, 04, 07 Ralf Baechle <ralf@linux-mips.org>
7  * Copyright (C) MIPS Technologies, Inc.
8  *   written by Ralf Baechle <ralf@linux-mips.org>
9  */
10 #ifndef _ASM_HAZARDS_H
11 #define _ASM_HAZARDS_H
12
13
14 #ifdef __ASSEMBLY__
15 #define ASMMACRO(name, code...) .macro name; code; .endm
16 #else
17
18 #define ASMMACRO(name, code...)                                         \
19 __asm__(".macro " #name "; " #code "; .endm");                          \
20                                                                         \
21 static inline void name(void)                                           \
22 {                                                                       \
23         __asm__ __volatile__ (#name);                                   \
24 }
25
26 /*
27  * MIPS R2 instruction hazard barrier.   Needs to be called as a subroutine.
28  */
29 extern void mips_ihb(void);
30
31 #endif
32
33 ASMMACRO(_ssnop,
34          sll    $0, $0, 1
35         )
36
37 ASMMACRO(_ehb,
38          sll    $0, $0, 3
39         )
40
41 /*
42  * TLB hazards
43  */
44 #if defined(CONFIG_CPU_MIPSR2)
45
46 /*
47  * MIPSR2 defines ehb for hazard avoidance
48  */
49
50 ASMMACRO(mtc0_tlbw_hazard,
51          _ehb
52         )
53 ASMMACRO(tlbw_use_hazard,
54          _ehb
55         )
56 ASMMACRO(tlb_probe_hazard,
57          _ehb
58         )
59 ASMMACRO(irq_enable_hazard,
60          _ehb
61         )
62 ASMMACRO(irq_disable_hazard,
63         _ehb
64         )
65 ASMMACRO(back_to_back_c0_hazard,
66          _ehb
67         )
68 /*
69  * gcc has a tradition of misscompiling the previous construct using the
70  * address of a label as argument to inline assembler.  Gas otoh has the
71  * annoying difference between la and dla which are only usable for 32-bit
72  * rsp. 64-bit code, so can't be used without conditional compilation.
73  * The alterantive is switching the assembler to 64-bit code which happens
74  * to work right even for 32-bit code ...
75  */
76 #define instruction_hazard()                                            \
77 do {                                                                    \
78         unsigned long tmp;                                              \
79                                                                         \
80         __asm__ __volatile__(                                           \
81         "       .set    mips64r2                                \n"     \
82         "       dla     %0, 1f                                  \n"     \
83         "       jr.hb   %0                                      \n"     \
84         "       .set    mips0                                   \n"     \
85         "1:                                                     \n"     \
86         : "=r" (tmp));                                                  \
87 } while (0)
88
89 #elif defined(CONFIG_CPU_R10000)
90
91 /*
92  * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
93  */
94
95 ASMMACRO(mtc0_tlbw_hazard,
96         )
97 ASMMACRO(tlbw_use_hazard,
98         )
99 ASMMACRO(tlb_probe_hazard,
100         )
101 ASMMACRO(irq_enable_hazard,
102         )
103 ASMMACRO(irq_disable_hazard,
104         )
105 ASMMACRO(back_to_back_c0_hazard,
106         )
107 #define instruction_hazard() do { } while (0)
108
109 #elif defined(CONFIG_CPU_RM9000)
110
111 /*
112  * RM9000 hazards.  When the JTLB is updated by tlbwi or tlbwr, a subsequent
113  * use of the JTLB for instructions should not occur for 4 cpu cycles and use
114  * for data translations should not occur for 3 cpu cycles.
115  */
116
117 ASMMACRO(mtc0_tlbw_hazard,
118          _ssnop; _ssnop; _ssnop; _ssnop
119         )
120 ASMMACRO(tlbw_use_hazard,
121          _ssnop; _ssnop; _ssnop; _ssnop
122         )
123 ASMMACRO(tlb_probe_hazard,
124          _ssnop; _ssnop; _ssnop; _ssnop
125         )
126 ASMMACRO(irq_enable_hazard,
127         )
128 ASMMACRO(irq_disable_hazard,
129         )
130 ASMMACRO(back_to_back_c0_hazard,
131         )
132 #define instruction_hazard() do { } while (0)
133
134 #elif defined(CONFIG_CPU_SB1)
135
136 /*
137  * Mostly like R4000 for historic reasons
138  */
139 ASMMACRO(mtc0_tlbw_hazard,
140         )
141 ASMMACRO(tlbw_use_hazard,
142         )
143 ASMMACRO(tlb_probe_hazard,
144         )
145 ASMMACRO(irq_enable_hazard,
146         )
147 ASMMACRO(irq_disable_hazard,
148          _ssnop; _ssnop; _ssnop
149         )
150 ASMMACRO(back_to_back_c0_hazard,
151         )
152 #define instruction_hazard() do { } while (0)
153
154 #else
155
156 /*
157  * Finally the catchall case for all other processors including R4000, R4400,
158  * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
159  *
160  * The taken branch will result in a two cycle penalty for the two killed
161  * instructions on R4000 / R4400.  Other processors only have a single cycle
162  * hazard so this is nice trick to have an optimal code for a range of
163  * processors.
164  */
165 ASMMACRO(mtc0_tlbw_hazard,
166         nop; nop
167         )
168 ASMMACRO(tlbw_use_hazard,
169         nop; nop; nop
170         )
171 ASMMACRO(tlb_probe_hazard,
172          nop; nop; nop
173         )
174 ASMMACRO(irq_enable_hazard,
175          _ssnop; _ssnop; _ssnop;
176         )
177 ASMMACRO(irq_disable_hazard,
178         nop; nop; nop
179         )
180 ASMMACRO(back_to_back_c0_hazard,
181          _ssnop; _ssnop; _ssnop;
182         )
183 #define instruction_hazard() do { } while (0)
184
185 #endif
186
187
188 /* FPU hazards */
189
190 #if defined(CONFIG_CPU_SB1)
191 ASMMACRO(enable_fpu_hazard,
192          .set   push;
193          .set   mips64;
194          .set   noreorder;
195          _ssnop;
196          bnezl  $0, .+4;
197          _ssnop;
198          .set   pop
199 )
200 ASMMACRO(disable_fpu_hazard,
201 )
202
203 #elif defined(CONFIG_CPU_MIPSR2)
204 ASMMACRO(enable_fpu_hazard,
205          _ehb
206 )
207 ASMMACRO(disable_fpu_hazard,
208          _ehb
209 )
210 #else
211 ASMMACRO(enable_fpu_hazard,
212          nop; nop; nop; nop
213 )
214 ASMMACRO(disable_fpu_hazard,
215          _ehb
216 )
217 #endif
218
219 #endif /* _ASM_HAZARDS_H */