1 /**************************************************************************** 2 **************************************************************************** 3 *** 4 *** This header was automatically generated from a Linux kernel header 5 *** of the same name, to make information necessary for userspace to 6 *** call into the kernel available to libc. It contains only constants, 7 *** structures, and macros generated from the original header, and thus, 8 *** contains no copyrightable information. 9 *** 10 **************************************************************************** 11 ****************************************************************************/ 12 #ifndef _LINUX_BYTEORDER_SWAB_H 13 #define _LINUX_BYTEORDER_SWAB_H 14 15 #include <linux/compiler.h> 16 17 #define ___swab16(x) ({ __u16 __x = (x); ((__u16)( (((__u16)(__x) & (__u16)0x00ffU) << 8) | (((__u16)(__x) & (__u16)0xff00U) >> 8) )); }) 18 19 #define ___swab32(x) ({ __u32 __x = (x); ((__u32)( (((__u32)(__x) & (__u32)0x000000ffUL) << 24) | (((__u32)(__x) & (__u32)0x0000ff00UL) << 8) | (((__u32)(__x) & (__u32)0x00ff0000UL) >> 8) | (((__u32)(__x) & (__u32)0xff000000UL) >> 24) )); }) 20 21 #define ___swab64(x) ({ __u64 __x = (x); ((__u64)( (__u64)(((__u64)(__x) & (__u64)0x00000000000000ffULL) << 56) | (__u64)(((__u64)(__x) & (__u64)0x000000000000ff00ULL) << 40) | (__u64)(((__u64)(__x) & (__u64)0x0000000000ff0000ULL) << 24) | (__u64)(((__u64)(__x) & (__u64)0x00000000ff000000ULL) << 8) | (__u64)(((__u64)(__x) & (__u64)0x000000ff00000000ULL) >> 8) | (__u64)(((__u64)(__x) & (__u64)0x0000ff0000000000ULL) >> 24) | (__u64)(((__u64)(__x) & (__u64)0x00ff000000000000ULL) >> 40) | (__u64)(((__u64)(__x) & (__u64)0xff00000000000000ULL) >> 56) )); }) 22 23 #define ___constant_swab16(x) ((__u16)( (((__u16)(x) & (__u16)0x00ffU) << 8) | (((__u16)(x) & (__u16)0xff00U) >> 8) )) 24 #define ___constant_swab32(x) ((__u32)( (((__u32)(x) & (__u32)0x000000ffUL) << 24) | (((__u32)(x) & (__u32)0x0000ff00UL) << 8) | (((__u32)(x) & (__u32)0x00ff0000UL) >> 8) | (((__u32)(x) & (__u32)0xff000000UL) >> 24) )) 25 #define ___constant_swab64(x) ((__u64)( (__u64)(((__u64)(x) & (__u64)0x00000000000000ffULL) << 56) | (__u64)(((__u64)(x) & (__u64)0x000000000000ff00ULL) << 40) | (__u64)(((__u64)(x) & (__u64)0x0000000000ff0000ULL) << 24) | (__u64)(((__u64)(x) & (__u64)0x00000000ff000000ULL) << 8) | (__u64)(((__u64)(x) & (__u64)0x000000ff00000000ULL) >> 8) | (__u64)(((__u64)(x) & (__u64)0x0000ff0000000000ULL) >> 24) | (__u64)(((__u64)(x) & (__u64)0x00ff000000000000ULL) >> 40) | (__u64)(((__u64)(x) & (__u64)0xff00000000000000ULL) >> 56) )) 26 27 #ifndef __arch__swab16 28 #define __arch__swab16(x) ({ __u16 __tmp = (x) ; ___swab16(__tmp); }) 29 #endif 30 #ifndef __arch__swab32 31 #define __arch__swab32(x) ({ __u32 __tmp = (x) ; ___swab32(__tmp); }) 32 #endif 33 #ifndef __arch__swab64 34 #define __arch__swab64(x) ({ __u64 __tmp = (x) ; ___swab64(__tmp); }) 35 #endif 36 37 #ifndef __arch__swab16p 38 #define __arch__swab16p(x) __arch__swab16(*(x)) 39 #endif 40 #ifndef __arch__swab32p 41 #define __arch__swab32p(x) __arch__swab32(*(x)) 42 #endif 43 #ifndef __arch__swab64p 44 #define __arch__swab64p(x) __arch__swab64(*(x)) 45 #endif 46 47 #ifndef __arch__swab16s 48 #define __arch__swab16s(x) do { *(x) = __arch__swab16p((x)); } while (0) 49 #endif 50 #ifndef __arch__swab32s 51 #define __arch__swab32s(x) do { *(x) = __arch__swab32p((x)); } while (0) 52 #endif 53 #ifndef __arch__swab64s 54 #define __arch__swab64s(x) do { *(x) = __arch__swab64p((x)); } while (0) 55 #endif 56 57 #if defined(__GNUC__) && defined(__OPTIMIZE__) 58 #define __swab16(x) (__builtin_constant_p((__u16)(x)) ? ___swab16((x)) : __fswab16((x))) 59 #define __swab32(x) (__builtin_constant_p((__u32)(x)) ? ___swab32((x)) : __fswab32((x))) 60 #define __swab64(x) (__builtin_constant_p((__u64)(x)) ? ___swab64((x)) : __fswab64((x))) 61 #else 62 #define __swab16(x) __fswab16(x) 63 #define __swab32(x) __fswab32(x) 64 #define __swab64(x) __fswab64(x) 65 #endif 66 67 #ifdef __BYTEORDER_HAS_U64__ 68 #ifdef __SWAB_64_THRU_32__ 69 #else 70 #endif 71 #endif 72 #endif 73