6c68a51f1cc589e7d9773893fa8807aba452645a
[safe/jmp/linux-2.6] / arch / sh / include / asm / system_32.h
1 #ifndef __ASM_SH_SYSTEM_32_H
2 #define __ASM_SH_SYSTEM_32_H
3
4 #include <linux/types.h>
5
6 #ifdef CONFIG_SH_DSP
7
8 #define is_dsp_enabled(tsk)                                             \
9         (!!(tsk->thread.dsp_status.status & SR_DSP))
10
11 #define __restore_dsp(tsk)                                              \
12 do {                                                                    \
13         register u32 *__ts2 __asm__ ("r2") =                            \
14                         (u32 *)&tsk->thread.dsp_status;                 \
15         __asm__ __volatile__ (                                          \
16                 ".balign 4\n\t"                                         \
17                 "movs.l @r2+, a1\n\t"                                   \
18                 "movs.l @r2+, a0g\n\t"                                  \
19                 "movs.l @r2+, a1g\n\t"                                  \
20                 "movs.l @r2+, m0\n\t"                                   \
21                 "movs.l @r2+, m1\n\t"                                   \
22                 "movs.l @r2+, a0\n\t"                                   \
23                 "movs.l @r2+, x0\n\t"                                   \
24                 "movs.l @r2+, x1\n\t"                                   \
25                 "movs.l @r2+, y0\n\t"                                   \
26                 "movs.l @r2+, y1\n\t"                                   \
27                 "lds.l  @r2+, dsr\n\t"                                  \
28                 "ldc.l  @r2+, rs\n\t"                                   \
29                 "ldc.l  @r2+, re\n\t"                                   \
30                 "ldc.l  @r2+, mod\n\t"                                  \
31                 : : "r" (__ts2));                                       \
32 } while (0)
33
34
35 #define __save_dsp(tsk)                                                 \
36 do {                                                                    \
37         register u32 *__ts2 __asm__ ("r2") =                            \
38                         (u32 *)&tsk->thread.dsp_status + 14;            \
39                                                                         \
40         __asm__ __volatile__ (                                          \
41                 ".balign 4\n\t"                                         \
42                 "stc.l  mod, @-r2\n\t"                          \
43                 "stc.l  re, @-r2\n\t"                                   \
44                 "stc.l  rs, @-r2\n\t"                                   \
45                 "sts.l  dsr, @-r2\n\t"                          \
46                 "sts.l  y1, @-r2\n\t"                                   \
47                 "sts.l  y0, @-r2\n\t"                                   \
48                 "sts.l  x1, @-r2\n\t"                                   \
49                 "sts.l  x0, @-r2\n\t"                                   \
50                 "sts.l  a0, @-r2\n\t"                                   \
51                 ".word  0xf653          ! movs.l        a1, @-r2\n\t"   \
52                 ".word  0xf6f3          ! movs.l        a0g, @-r2\n\t"  \
53                 ".word  0xf6d3          ! movs.l        a1g, @-r2\n\t"  \
54                 ".word  0xf6c3          ! movs.l        m0, @-r2\n\t"   \
55                 ".word  0xf6e3          ! movs.l        m1, @-r2\n\t"   \
56                 : : "r" (__ts2));                                       \
57 } while (0)
58
59 #else
60
61 #define is_dsp_enabled(tsk)     (0)
62 #define __save_dsp(tsk)         do { } while (0)
63 #define __restore_dsp(tsk)      do { } while (0)
64 #endif
65
66 struct task_struct *__switch_to(struct task_struct *prev,
67                                 struct task_struct *next);
68
69 /*
70  *      switch_to() should switch tasks to task nr n, first
71  */
72 #define switch_to(prev, next, last)                             \
73 do {                                                            \
74         register u32 *__ts1 __asm__ ("r1");                     \
75         register u32 *__ts2 __asm__ ("r2");                     \
76         register u32 *__ts4 __asm__ ("r4");                     \
77         register u32 *__ts5 __asm__ ("r5");                     \
78         register u32 *__ts6 __asm__ ("r6");                     \
79         register u32 __ts7 __asm__ ("r7");                      \
80         struct task_struct *__last;                             \
81                                                                 \
82         if (is_dsp_enabled(prev))                               \
83                 __save_dsp(prev);                               \
84                                                                 \
85         __ts1 = (u32 *)&prev->thread.sp;                        \
86         __ts2 = (u32 *)&prev->thread.pc;                        \
87         __ts4 = (u32 *)prev;                                    \
88         __ts5 = (u32 *)next;                                    \
89         __ts6 = (u32 *)&next->thread.sp;                        \
90         __ts7 = next->thread.pc;                                \
91                                                                 \
92         __asm__ __volatile__ (                                  \
93                 ".balign 4\n\t"                                 \
94                 "stc.l  gbr, @-r15\n\t"                         \
95                 "sts.l  pr, @-r15\n\t"                          \
96                 "mov.l  r8, @-r15\n\t"                          \
97                 "mov.l  r9, @-r15\n\t"                          \
98                 "mov.l  r10, @-r15\n\t"                         \
99                 "mov.l  r11, @-r15\n\t"                         \
100                 "mov.l  r12, @-r15\n\t"                         \
101                 "mov.l  r13, @-r15\n\t"                         \
102                 "mov.l  r14, @-r15\n\t"                         \
103                 "mov.l  r15, @r1\t! save SP\n\t"                \
104                 "mov.l  @r6, r15\t! change to new stack\n\t"    \
105                 "mova   1f, %0\n\t"                             \
106                 "mov.l  %0, @r2\t! save PC\n\t"                 \
107                 "mov.l  2f, %0\n\t"                             \
108                 "jmp    @%0\t! call __switch_to\n\t"            \
109                 " lds   r7, pr\t!  with return to new PC\n\t"   \
110                 ".balign        4\n"                            \
111                 "2:\n\t"                                        \
112                 ".long  __switch_to\n"                          \
113                 "1:\n\t"                                        \
114                 "mov.l  @r15+, r14\n\t"                         \
115                 "mov.l  @r15+, r13\n\t"                         \
116                 "mov.l  @r15+, r12\n\t"                         \
117                 "mov.l  @r15+, r11\n\t"                         \
118                 "mov.l  @r15+, r10\n\t"                         \
119                 "mov.l  @r15+, r9\n\t"                          \
120                 "mov.l  @r15+, r8\n\t"                          \
121                 "lds.l  @r15+, pr\n\t"                          \
122                 "ldc.l  @r15+, gbr\n\t"                         \
123                 : "=z" (__last)                                 \
124                 : "r" (__ts1), "r" (__ts2), "r" (__ts4),        \
125                   "r" (__ts5), "r" (__ts6), "r" (__ts7)         \
126                 : "r3", "t");                                   \
127                                                                 \
128         last = __last;                                          \
129 } while (0)
130
131 #define finish_arch_switch(prev)                                \
132 do {                                                            \
133         if (is_dsp_enabled(prev))                               \
134                 __restore_dsp(prev);                            \
135 } while (0)
136
137 #define __uses_jump_to_uncached \
138         noinline __attribute__ ((__section__ (".uncached.text")))
139
140 /*
141  * Jump to uncached area.
142  * When handling TLB or caches, we need to do it from an uncached area.
143  */
144 #define jump_to_uncached()                      \
145 do {                                            \
146         unsigned long __dummy;                  \
147                                                 \
148         __asm__ __volatile__(                   \
149                 "mova   1f, %0\n\t"             \
150                 "add    %1, %0\n\t"             \
151                 "jmp    @%0\n\t"                \
152                 " nop\n\t"                      \
153                 ".balign 4\n"                   \
154                 "1:"                            \
155                 : "=&z" (__dummy)               \
156                 : "r" (cached_to_uncached));    \
157 } while (0)
158
159 /*
160  * Back to cached area.
161  */
162 #define back_to_cached()                                \
163 do {                                                    \
164         unsigned long __dummy;                          \
165         ctrl_barrier();                                 \
166         __asm__ __volatile__(                           \
167                 "mov.l  1f, %0\n\t"                     \
168                 "jmp    @%0\n\t"                        \
169                 " nop\n\t"                              \
170                 ".balign 4\n"                           \
171                 "1:     .long 2f\n"                     \
172                 "2:"                                    \
173                 : "=&r" (__dummy));                     \
174 } while (0)
175
176 #ifdef CONFIG_CPU_HAS_SR_RB
177 #define lookup_exception_vector()       \
178 ({                                      \
179         unsigned long _vec;             \
180                                         \
181         __asm__ __volatile__ (          \
182                 "stc r2_bank, %0\n\t"   \
183                 : "=r" (_vec)           \
184         );                              \
185                                         \
186         _vec;                           \
187 })
188 #else
189 #define lookup_exception_vector()       \
190 ({                                      \
191         unsigned long _vec;             \
192         __asm__ __volatile__ (          \
193                 "mov r4, %0\n\t"        \
194                 : "=r" (_vec)           \
195         );                              \
196                                         \
197         _vec;                           \
198 })
199 #endif
200
201 int handle_unaligned_access(insn_size_t instruction, struct pt_regs *regs,
202                             struct mem_access *ma);
203
204 asmlinkage void do_address_error(struct pt_regs *regs,
205                                  unsigned long writeaccess,
206                                  unsigned long address);
207 asmlinkage void do_divide_error(unsigned long r4, unsigned long r5,
208                                 unsigned long r6, unsigned long r7,
209                                 struct pt_regs __regs);
210 asmlinkage void do_reserved_inst(unsigned long r4, unsigned long r5,
211                                 unsigned long r6, unsigned long r7,
212                                 struct pt_regs __regs);
213 asmlinkage void do_illegal_slot_inst(unsigned long r4, unsigned long r5,
214                                 unsigned long r6, unsigned long r7,
215                                 struct pt_regs __regs);
216 asmlinkage void do_exception_error(unsigned long r4, unsigned long r5,
217                                    unsigned long r6, unsigned long r7,
218                                    struct pt_regs __regs);
219
220 #endif /* __ASM_SH_SYSTEM_32_H */