FRV: Change the timerfd syscalls to be the same as i386
[safe/jmp/linux-2.6] / include / asm-frv / system.h
index d2aea70..b400cea 100644 (file)
 #ifndef _ASM_SYSTEM_H
 #define _ASM_SYSTEM_H
 
-#include <linux/config.h> /* get configuration macros */
+#include <linux/types.h>
 #include <linux/linkage.h>
-#include <asm/atomic.h>
+#include <linux/kernel.h>
 
 struct thread_struct;
 
-#define prepare_to_switch()    do { } while(0)
-
 /*
  * switch_to(prev, next) should switch from task `prev' to `next'
  * `prev' will never be the same as `next'.
@@ -40,8 +38,84 @@ do {                                                                 \
 
 /*
  * interrupt flag manipulation
+ * - use virtual interrupt management since touching the PSR is slow
+ *   - ICC2.Z: T if interrupts virtually disabled
+ *   - ICC2.C: F if interrupts really disabled
+ * - if Z==1 upon interrupt:
+ *   - C is set to 0
+ *   - interrupts are really disabled
+ *   - entry.S returns immediately
+ * - uses TIHI (TRAP if Z==0 && C==0) #2 to really reenable interrupts
+ *   - if taken, the trap:
+ *     - sets ICC2.C
+ *     - enables interrupts
  */
-#define local_irq_disable()                            \
+#define local_irq_disable()                                    \
+do {                                                           \
+       /* set Z flag, but don't change the C flag */           \
+       asm volatile("  andcc   gr0,gr0,gr0,icc2        \n"     \
+                    :                                          \
+                    :                                          \
+                    : "memory", "icc2"                         \
+                    );                                         \
+} while(0)
+
+#define local_irq_enable()                                     \
+do {                                                           \
+       /* clear Z flag and then test the C flag */             \
+       asm volatile("  oricc   gr0,#1,gr0,icc2         \n"     \
+                    "  tihi    icc2,gr0,#2             \n"     \
+                    :                                          \
+                    :                                          \
+                    : "memory", "icc2"                         \
+                    );                                         \
+} while(0)
+
+#define local_save_flags(flags)                                        \
+do {                                                           \
+       typecheck(unsigned long, flags);                        \
+       asm volatile("movsg ccr,%0"                             \
+                    : "=r"(flags)                              \
+                    :                                          \
+                    : "memory");                               \
+                                                               \
+       /* shift ICC2.Z to bit 0 */                             \
+       flags >>= 26;                                           \
+                                                               \
+       /* make flags 1 if interrupts disabled, 0 otherwise */  \
+       flags &= 1UL;                                           \
+} while(0)
+
+#define irqs_disabled() \
+       ({unsigned long flags; local_save_flags(flags); flags; })
+
+#define        local_irq_save(flags)                   \
+do {                                           \
+       typecheck(unsigned long, flags);        \
+       local_save_flags(flags);                \
+       local_irq_disable();                    \
+} while(0)
+
+#define        local_irq_restore(flags)                                        \
+do {                                                                   \
+       typecheck(unsigned long, flags);                                \
+                                                                       \
+       /* load the Z flag by turning 1 if disabled into 0 if disabled  \
+        * and thus setting the Z flag but not the C flag */            \
+       asm volatile("  xoricc  %0,#1,gr0,icc2          \n"             \
+                    /* then test Z=0 and C=0 */                        \
+                    "  tihi    icc2,gr0,#2             \n"             \
+                    :                                                  \
+                    : "r"(flags)                                       \
+                    : "memory", "icc2"                                 \
+                    );                                                 \
+                                                                       \
+} while(0)
+
+/*
+ * real interrupt flag manipulation
+ */
+#define __local_irq_disable()                          \
 do {                                                   \
        unsigned long psr;                              \
        asm volatile("  movsg   psr,%0          \n"     \
@@ -53,7 +127,7 @@ do {                                                 \
                     : "memory");                       \
 } while(0)
 
-#define local_irq_enable()                             \
+#define __local_irq_enable()                           \
 do {                                                   \
        unsigned long psr;                              \
        asm volatile("  movsg   psr,%0          \n"     \
@@ -64,7 +138,7 @@ do {                                                 \
                     : "memory");                       \
 } while(0)
 
-#define local_save_flags(flags)                        \
+#define __local_save_flags(flags)              \
 do {                                           \
        typecheck(unsigned long, flags);        \
        asm("movsg psr,%0"                      \
@@ -73,7 +147,7 @@ do {                                         \
            : "memory");                        \
 } while(0)
 
-#define        local_irq_save(flags)                           \
+#define        __local_irq_save(flags)                         \
 do {                                                   \
        unsigned long npsr;                             \
        typecheck(unsigned long, flags);                \
@@ -86,7 +160,7 @@ do {                                                 \
                     : "memory");                       \
 } while(0)
 
-#define        local_irq_restore(flags)                        \
+#define        __local_irq_restore(flags)                      \
 do {                                                   \
        typecheck(unsigned long, flags);                \
        asm volatile("  movgs   %0,psr          \n"     \
@@ -95,7 +169,7 @@ do {                                                 \
                     : "memory");                       \
 } while(0)
 
-#define irqs_disabled() \
+#define __irqs_disabled() \
        ((__get_PSR() & PSR_PIL) >= PSR_PIL_14)
 
 /*
@@ -106,7 +180,6 @@ do {                                                        \
 #define rmb()                  asm volatile ("membar" : : :"memory")
 #define wmb()                  asm volatile ("membar" : : :"memory")
 #define set_mb(var, value)     do { var = value; mb(); } while (0)
-#define set_wmb(var, value)    do { var = value; wmb(); } while (0)
 
 #define smp_mb()               mb()
 #define smp_rmb()              rmb()
@@ -125,4 +198,100 @@ extern void free_initmem(void);
 
 #define arch_align_stack(x) (x)
 
+/*****************************************************************************/
+/*
+ * compare and conditionally exchange value with memory
+ * - if (*ptr == test) then orig = *ptr; *ptr = test;
+ * - if (*ptr != test) then orig = *ptr;
+ */
+#ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
+
+#define cmpxchg(ptr, test, new)                                                        \
+({                                                                             \
+       __typeof__(ptr) __xg_ptr = (ptr);                                       \
+       __typeof__(*(ptr)) __xg_orig, __xg_tmp;                                 \
+       __typeof__(*(ptr)) __xg_test = (test);                                  \
+       __typeof__(*(ptr)) __xg_new = (new);                                    \
+                                                                               \
+       switch (sizeof(__xg_orig)) {                                            \
+       case 4:                                                                 \
+               asm volatile(                                                   \
+                       "0:                                             \n"     \
+                       "       orcc            gr0,gr0,gr0,icc3        \n"     \
+                       "       ckeq            icc3,cc7                \n"     \
+                       "       ld.p            %M0,%1                  \n"     \
+                       "       orcr            cc7,cc7,cc3             \n"     \
+                       "       sub%I4cc        %1,%4,%2,icc0           \n"     \
+                       "       bne             icc0,#0,1f              \n"     \
+                       "       cst.p           %3,%M0          ,cc3,#1 \n"     \
+                       "       corcc           gr29,gr29,gr0   ,cc3,#1 \n"     \
+                       "       beq             icc3,#0,0b              \n"     \
+                       "1:                                             \n"     \
+                       : "+U"(*__xg_ptr), "=&r"(__xg_orig), "=&r"(__xg_tmp)    \
+                       : "r"(__xg_new), "NPr"(__xg_test)                       \
+                       : "memory", "cc7", "cc3", "icc3", "icc0"                \
+                       );                                                      \
+               break;                                                          \
+                                                                               \
+       default:                                                                \
+               __xg_orig = 0;                                                  \
+               asm volatile("break");                                          \
+               break;                                                          \
+       }                                                                       \
+                                                                               \
+       __xg_orig;                                                              \
+})
+
+#else
+
+extern uint32_t __cmpxchg_32(uint32_t *v, uint32_t test, uint32_t new);
+
+#define cmpxchg(ptr, test, new)                                                        \
+({                                                                             \
+       __typeof__(ptr) __xg_ptr = (ptr);                                       \
+       __typeof__(*(ptr)) __xg_orig;                                           \
+       __typeof__(*(ptr)) __xg_test = (test);                                  \
+       __typeof__(*(ptr)) __xg_new = (new);                                    \
+                                                                               \
+       switch (sizeof(__xg_orig)) {                                            \
+       case 4: __xg_orig = (__force __typeof__(*ptr))                          \
+                       __cmpxchg_32((__force uint32_t *)__xg_ptr,              \
+                                        (__force uint32_t)__xg_test,           \
+                                        (__force uint32_t)__xg_new); break;    \
+       default:                                                                \
+               __xg_orig = 0;                                                  \
+               asm volatile("break");                                          \
+               break;                                                          \
+       }                                                                       \
+                                                                               \
+       __xg_orig;                                                              \
+})
+
+#endif
+
+#include <asm-generic/cmpxchg-local.h>
+
+static inline unsigned long __cmpxchg_local(volatile void *ptr,
+                                     unsigned long old,
+                                     unsigned long new, int size)
+{
+       switch (size) {
+       case 4:
+               return cmpxchg((unsigned long *)ptr, old, new);
+       default:
+               return __cmpxchg_local_generic(ptr, old, new, size);
+       }
+
+       return old;
+}
+
+/*
+ * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
+ * them available.
+ */
+#define cmpxchg_local(ptr, o, n)                                       \
+       ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
+                       (unsigned long)(n), sizeof(*(ptr))))
+#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
+
 #endif /* _ASM_SYSTEM_H */