all-archs: consolidate .data section definition in asm-generic
[safe/jmp/linux-2.6] / include / asm-generic / unaligned.h
index c856a43..09ec447 100644 (file)
@@ -16,9 +16,9 @@
  * The main single-value unaligned transfer routines.
  */
 #define get_unaligned(ptr) \
-       ((__typeof__(*(ptr)))__get_unaligned((ptr), sizeof(*(ptr))))
+       __get_unaligned((ptr), sizeof(*(ptr)))
 #define put_unaligned(x,ptr) \
-       __put_unaligned((unsigned long)(x), (ptr), sizeof(*(ptr)))
+       __put_unaligned((__u64)(x), (ptr), sizeof(*(ptr)))
 
 /*
  * This function doesn't actually exist.  The idea is that when
@@ -36,19 +36,19 @@ struct __una_u16 { __u16 x __attribute__((packed)); };
  * Elemental unaligned loads 
  */
 
-static inline unsigned long __uldq(const __u64 *addr)
+static inline __u64 __uldq(const __u64 *addr)
 {
        const struct __una_u64 *ptr = (const struct __una_u64 *) addr;
        return ptr->x;
 }
 
-static inline unsigned long __uldl(const __u32 *addr)
+static inline __u32 __uldl(const __u32 *addr)
 {
        const struct __una_u32 *ptr = (const struct __una_u32 *) addr;
        return ptr->x;
 }
 
-static inline unsigned long __uldw(const __u16 *addr)
+static inline __u16 __uldw(const __u16 *addr)
 {
        const struct __una_u16 *ptr = (const struct __una_u16 *) addr;
        return ptr->x;
@@ -76,46 +76,47 @@ static inline void __ustw(__u16 val, __u16 *addr)
        ptr->x = val;
 }
 
-static inline unsigned long __get_unaligned(const void *ptr, size_t size)
-{
-       unsigned long val;
-       switch (size) {
-       case 1:
-               val = *(const __u8 *)ptr;
-               break;
-       case 2:
-               val = __uldw((const __u16 *)ptr);
-               break;
-       case 4:
-               val = __uldl((const __u32 *)ptr);
-               break;
-       case 8:
-               val = __uldq((const __u64 *)ptr);
-               break;
-       default:
-               bad_unaligned_access_length();
-       };
-       return val;
-}
-
-static inline void __put_unaligned(unsigned long val, void *ptr, size_t size)
-{
-       switch (size) {
-       case 1:
-               *(__u8 *)ptr = val;
-               break;
-       case 2:
-               __ustw(val, (__u16 *)ptr);
-               break;
-       case 4:
-               __ustl(val, (__u32 *)ptr);
-               break;
-       case 8:
-               __ustq(val, (__u64 *)ptr);
-               break;
-       default:
-               bad_unaligned_access_length();
-       };
-}
+#define __get_unaligned(ptr, size) ({          \
+       const void *__gu_p = ptr;               \
+       __u64 val;                              \
+       switch (size) {                         \
+       case 1:                                 \
+               val = *(const __u8 *)__gu_p;    \
+               break;                          \
+       case 2:                                 \
+               val = __uldw(__gu_p);           \
+               break;                          \
+       case 4:                                 \
+               val = __uldl(__gu_p);           \
+               break;                          \
+       case 8:                                 \
+               val = __uldq(__gu_p);           \
+               break;                          \
+       default:                                \
+               bad_unaligned_access_length();  \
+       };                                      \
+       (__typeof__(*(ptr)))val;                \
+})
+
+#define __put_unaligned(val, ptr, size)                \
+do {                                           \
+       void *__gu_p = ptr;                     \
+       switch (size) {                         \
+       case 1:                                 \
+               *(__u8 *)__gu_p = val;          \
+               break;                          \
+       case 2:                                 \
+               __ustw(val, __gu_p);            \
+               break;                          \
+       case 4:                                 \
+               __ustl(val, __gu_p);            \
+               break;                          \
+       case 8:                                 \
+               __ustq(val, __gu_p);            \
+               break;                          \
+       default:                                \
+               bad_unaligned_access_length();  \
+       };                                      \
+} while(0)
 
 #endif /* _ASM_GENERIC_UNALIGNED_H */