Home | History | Annotate | Download | only in byteorder
      1 #ifndef _LINUX_BYTEORDER_SWAB_H
      2 #define _LINUX_BYTEORDER_SWAB_H
      3 
      4 /*
      5  * linux/byteorder/swab.h
      6  * Byte-swapping, independently from CPU endianness
      7  *	swabXX[ps]?(foo)
      8  *
      9  * Francois-Rene Rideau <fare (at) tunes.org> 19971205
     10  *    separated swab functions from cpu_to_XX,
     11  *    to clean up support for bizarre-endian architectures.
     12  *
     13  * See asm-i386/byteorder.h and suches for examples of how to provide
     14  * architecture-dependent optimized versions
     15  *
     16  */
     17 
     18 /* casts are necessary for constants, because we never know how for sure
     19  * how U/UL/ULL map to __u16, __u32, __u64. At least not in a portable way.
     20  */
     21 #define ___swab16(x) \
     22 	((__u16)( \
     23 		(((__u16)(x) & (__u16)0x00ffU) << 8) | \
     24 		(((__u16)(x) & (__u16)0xff00U) >> 8) ))
     25 #define ___swab32(x) \
     26 	((__u32)( \
     27 		(((__u32)(x) & (__u32)0x000000ffUL) << 24) | \
     28 		(((__u32)(x) & (__u32)0x0000ff00UL) <<  8) | \
     29 		(((__u32)(x) & (__u32)0x00ff0000UL) >>  8) | \
     30 		(((__u32)(x) & (__u32)0xff000000UL) >> 24) ))
     31 #define ___swab64(x) \
     32 	((__u64)( \
     33 		(__u64)(((__u64)(x) & (__u64)0x00000000000000ffULL) << 56) | \
     34 		(__u64)(((__u64)(x) & (__u64)0x000000000000ff00ULL) << 40) | \
     35 		(__u64)(((__u64)(x) & (__u64)0x0000000000ff0000ULL) << 24) | \
     36 		(__u64)(((__u64)(x) & (__u64)0x00000000ff000000ULL) <<  8) | \
     37 		(__u64)(((__u64)(x) & (__u64)0x000000ff00000000ULL) >>  8) | \
     38 		(__u64)(((__u64)(x) & (__u64)0x0000ff0000000000ULL) >> 24) | \
     39 		(__u64)(((__u64)(x) & (__u64)0x00ff000000000000ULL) >> 40) | \
     40 		(__u64)(((__u64)(x) & (__u64)0xff00000000000000ULL) >> 56) ))
     41 
     42 /*
     43  * provide defaults when no architecture-specific optimization is detected
     44  */
     45 #ifndef __arch__swab16
     46 #  define __arch__swab16(x) ___swab16(x)
     47 #endif
     48 #ifndef __arch__swab32
     49 #  define __arch__swab32(x) ___swab32(x)
     50 #endif
     51 #ifndef __arch__swab64
     52 #  define __arch__swab64(x) ___swab64(x)
     53 #endif
     54 
     55 #ifndef __arch__swab16p
     56 #  define __arch__swab16p(x) __swab16(*(x))
     57 #endif
     58 #ifndef __arch__swab32p
     59 #  define __arch__swab32p(x) __swab32(*(x))
     60 #endif
     61 #ifndef __arch__swab64p
     62 #  define __arch__swab64p(x) __swab64(*(x))
     63 #endif
     64 
     65 #ifndef __arch__swab16s
     66 #  define __arch__swab16s(x) do { *(x) = __swab16p((x)); } while (0)
     67 #endif
     68 #ifndef __arch__swab32s
     69 #  define __arch__swab32s(x) do { *(x) = __swab32p((x)); } while (0)
     70 #endif
     71 #ifndef __arch__swab64s
     72 #  define __arch__swab64s(x) do { *(x) = __swab64p((x)); } while (0)
     73 #endif
     74 
     75 
     76 /*
     77  * Allow constant folding
     78  */
     79 #if defined(__GNUC__) && (__GNUC__ >= 2) && defined(__OPTIMIZE__)
     80 #  define __swab16(x) \
     81 (__builtin_constant_p((__u16)(x)) ? \
     82  ___swab16((x)) : \
     83  __fswab16((x)))
     84 #  define __swab32(x) \
     85 (__builtin_constant_p((__u32)(x)) ? \
     86  ___swab32((x)) : \
     87  __fswab32((x)))
     88 #  define __swab64(x) \
     89 (__builtin_constant_p((__u64)(x)) ? \
     90  ___swab64((x)) : \
     91  __fswab64((x)))
     92 #else
     93 #  define __swab16(x) __fswab16(x)
     94 #  define __swab32(x) __fswab32(x)
     95 #  define __swab64(x) __fswab64(x)
     96 #endif /* OPTIMIZE */
     97 
     98 
     99 static __inline__ __attribute__((const)) __u16 __fswab16(__u16 x)
    100 {
    101 	return __arch__swab16(x);
    102 }
    103 static __inline__ __u16 __swab16p(const __u16 *x)
    104 {
    105 	return __arch__swab16p(x);
    106 }
    107 static __inline__ void __swab16s(__u16 *addr)
    108 {
    109 	__arch__swab16s(addr);
    110 }
    111 
    112 static __inline__ __attribute__((const)) __u32 __fswab32(__u32 x)
    113 {
    114 	return __arch__swab32(x);
    115 }
    116 static __inline__ __u32 __swab32p(const __u32 *x)
    117 {
    118 	return __arch__swab32p(x);
    119 }
    120 static __inline__ void __swab32s(__u32 *addr)
    121 {
    122 	__arch__swab32s(addr);
    123 }
    124 
    125 static __inline__ __attribute__((const)) __u64 __fswab64(__u64 x)
    126 {
    127 #  ifdef __SWAB_64_THRU_32__
    128 	__u32 h = x >> 32;
    129 	__u32 l = x & ((1ULL<<32)-1);
    130 	return (((__u64)__swab32(l)) << 32) | ((__u64)(__swab32(h)));
    131 #  else
    132 	return __arch__swab64(x);
    133 #  endif
    134 }
    135 static __inline__ __u64 __swab64p(const __u64 *x)
    136 {
    137 	return __arch__swab64p(x);
    138 }
    139 static __inline__ void __swab64s(__u64 *addr)
    140 {
    141 	__arch__swab64s(addr);
    142 }
    143 
    144 #if defined(__KERNEL__)
    145 #define swab16 __swab16
    146 #define swab32 __swab32
    147 #define swab64 __swab64
    148 #define swab16p __swab16p
    149 #define swab32p __swab32p
    150 #define swab64p __swab64p
    151 #define swab16s __swab16s
    152 #define swab32s __swab32s
    153 #define swab64s __swab64s
    154 #endif
    155 
    156 #endif /* _LINUX_BYTEORDER_SWAB_H */
    157