Home | History | Annotate | Download | only in src
      1 /* lzo_func.h -- functions
      2 
      3    This file is part of the LZO real-time data compression library.
      4 
      5    Copyright (C) 1996-2014 Markus Franz Xaver Johannes Oberhumer
      6    All Rights Reserved.
      7 
      8    The LZO library is free software; you can redistribute it and/or
      9    modify it under the terms of the GNU General Public License as
     10    published by the Free Software Foundation; either version 2 of
     11    the License, or (at your option) any later version.
     12 
     13    The LZO library is distributed in the hope that it will be useful,
     14    but WITHOUT ANY WARRANTY; without even the implied warranty of
     15    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     16    GNU General Public License for more details.
     17 
     18    You should have received a copy of the GNU General Public License
     19    along with the LZO library; see the file COPYING.
     20    If not, write to the Free Software Foundation, Inc.,
     21    51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
     22 
     23    Markus F.X.J. Oberhumer
     24    <markus (at) oberhumer.com>
     25    http://www.oberhumer.com/opensource/lzo/
     26  */
     27 
     28 
     29 /* WARNING: this file should *not* be used by applications. It is
     30    part of the implementation of the library and is subject
     31    to change.
     32  */
     33 
     34 
     35 #ifndef __LZO_FUNC_H
     36 #define __LZO_FUNC_H 1
     37 
     38 
     39 /***********************************************************************
     40 // bitops
     41 ************************************************************************/
     42 
     43 #if !defined(LZO_BITOPS_USE_ASM_BITSCAN) && !defined(LZO_BITOPS_USE_GNUC_BITSCAN) && !defined(LZO_BITOPS_USE_MSC_BITSCAN)
     44 #if 1 && (LZO_ARCH_AMD64) && (LZO_CC_GNUC && (LZO_CC_GNUC < 0x040000ul)) && (LZO_ASM_SYNTAX_GNUC)
     45 #define LZO_BITOPS_USE_ASM_BITSCAN 1
     46 #elif (LZO_CC_CLANG || (LZO_CC_GNUC >= 0x030400ul) || (LZO_CC_INTELC_GNUC && (__INTEL_COMPILER >= 1000)) || (LZO_CC_LLVM && (!defined(__llvm_tools_version__) || (__llvm_tools_version__+0 >= 0x010500ul))))
     47 #define LZO_BITOPS_USE_GNUC_BITSCAN 1
     48 #elif (LZO_OS_WIN32 || LZO_OS_WIN64) && ((LZO_CC_INTELC_MSC && (__INTEL_COMPILER >= 1010)) || (LZO_CC_MSC && (_MSC_VER >= 1400)))
     49 #define LZO_BITOPS_USE_MSC_BITSCAN 1
     50 #if (LZO_CC_MSC) && (LZO_ARCH_AMD64 || LZO_ARCH_I386)
     51 #include <intrin.h>
     52 #endif
     53 #if (LZO_CC_MSC) && (LZO_ARCH_AMD64 || LZO_ARCH_I386)
     54 #pragma intrinsic(_BitScanReverse)
     55 #pragma intrinsic(_BitScanForward)
     56 #endif
     57 #if (LZO_CC_MSC) && (LZO_ARCH_AMD64)
     58 #pragma intrinsic(_BitScanReverse64)
     59 #pragma intrinsic(_BitScanForward64)
     60 #endif
     61 #endif
     62 #endif
     63 
     64 __lzo_static_forceinline unsigned lzo_bitops_ctlz32_func(lzo_uint32_t v)
     65 {
     66 #if (LZO_BITOPS_USE_MSC_BITSCAN) && (LZO_ARCH_AMD64 || LZO_ARCH_I386)
     67     unsigned long r; (void) _BitScanReverse(&r, v); return (unsigned) r ^ 31;
     68 #define lzo_bitops_ctlz32(v)    lzo_bitops_ctlz32_func(v)
     69 #elif (LZO_BITOPS_USE_ASM_BITSCAN) && (LZO_ARCH_AMD64 || LZO_ARCH_I386) && (LZO_ASM_SYNTAX_GNUC)
     70     lzo_uint32_t r;
     71     __asm__("bsr %1,%0" : "=r" (r) : "rm" (v) __LZO_ASM_CLOBBER_LIST_CC);
     72     return (unsigned) r ^ 31;
     73 #define lzo_bitops_ctlz32(v)    lzo_bitops_ctlz32_func(v)
     74 #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_INT == 4)
     75     unsigned r; r = (unsigned) __builtin_clz(v); return r;
     76 #define lzo_bitops_ctlz32(v)    ((unsigned) __builtin_clz(v))
     77 #else
     78     LZO_UNUSED(v); return 0;
     79 #endif
     80 }
     81 
     82 #if defined(lzo_uint64_t)
     83 __lzo_static_forceinline unsigned lzo_bitops_ctlz64_func(lzo_uint64_t v)
     84 {
     85 #if (LZO_BITOPS_USE_MSC_BITSCAN) && (LZO_ARCH_AMD64)
     86     unsigned long r; (void) _BitScanReverse64(&r, v); return (unsigned) r ^ 63;
     87 #define lzo_bitops_ctlz64(v)    lzo_bitops_ctlz64_func(v)
     88 #elif (LZO_BITOPS_USE_ASM_BITSCAN) && (LZO_ARCH_AMD64) && (LZO_ASM_SYNTAX_GNUC)
     89     lzo_uint64_t r;
     90     __asm__("bsr %1,%0" : "=r" (r) : "rm" (v) __LZO_ASM_CLOBBER_LIST_CC);
     91     return (unsigned) r ^ 63;
     92 #define lzo_bitops_ctlz64(v)    lzo_bitops_ctlz64_func(v)
     93 #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_LONG == 8) && (LZO_WORDSIZE >= 8)
     94     unsigned r; r = (unsigned) __builtin_clzl(v); return r;
     95 #define lzo_bitops_ctlz64(v)    ((unsigned) __builtin_clzl(v))
     96 #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_LONG_LONG == 8) && (LZO_WORDSIZE >= 8)
     97     unsigned r; r = (unsigned) __builtin_clzll(v); return r;
     98 #define lzo_bitops_ctlz64(v)    ((unsigned) __builtin_clzll(v))
     99 #else
    100     LZO_UNUSED(v); return 0;
    101 #endif
    102 }
    103 #endif
    104 
    105 __lzo_static_forceinline unsigned lzo_bitops_cttz32_func(lzo_uint32_t v)
    106 {
    107 #if (LZO_BITOPS_USE_MSC_BITSCAN) && (LZO_ARCH_AMD64 || LZO_ARCH_I386)
    108     unsigned long r; (void) _BitScanForward(&r, v); return (unsigned) r;
    109 #define lzo_bitops_cttz32(v)    lzo_bitops_cttz32_func(v)
    110 #elif (LZO_BITOPS_USE_ASM_BITSCAN) && (LZO_ARCH_AMD64 || LZO_ARCH_I386) && (LZO_ASM_SYNTAX_GNUC)
    111     lzo_uint32_t r;
    112     __asm__("bsf %1,%0" : "=r" (r) : "rm" (v) __LZO_ASM_CLOBBER_LIST_CC);
    113     return (unsigned) r;
    114 #define lzo_bitops_cttz32(v)    lzo_bitops_cttz32_func(v)
    115 #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_INT >= 4)
    116     unsigned r; r = (unsigned) __builtin_ctz(v); return r;
    117 #define lzo_bitops_cttz32(v)    ((unsigned) __builtin_ctz(v))
    118 #else
    119     LZO_UNUSED(v); return 0;
    120 #endif
    121 }
    122 
    123 #if defined(lzo_uint64_t)
    124 __lzo_static_forceinline unsigned lzo_bitops_cttz64_func(lzo_uint64_t v)
    125 {
    126 #if (LZO_BITOPS_USE_MSC_BITSCAN) && (LZO_ARCH_AMD64)
    127     unsigned long r; (void) _BitScanForward64(&r, v); return (unsigned) r;
    128 #define lzo_bitops_cttz64(v)    lzo_bitops_cttz64_func(v)
    129 #elif (LZO_BITOPS_USE_ASM_BITSCAN) && (LZO_ARCH_AMD64) && (LZO_ASM_SYNTAX_GNUC)
    130     lzo_uint64_t r;
    131     __asm__("bsf %1,%0" : "=r" (r) : "rm" (v) __LZO_ASM_CLOBBER_LIST_CC);
    132     return (unsigned) r;
    133 #define lzo_bitops_cttz64(v)    lzo_bitops_cttz64_func(v)
    134 #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_LONG >= 8) && (LZO_WORDSIZE >= 8)
    135     unsigned r; r = (unsigned) __builtin_ctzl(v); return r;
    136 #define lzo_bitops_cttz64(v)    ((unsigned) __builtin_ctzl(v))
    137 #elif (LZO_BITOPS_USE_GNUC_BITSCAN) && (LZO_SIZEOF_LONG_LONG >= 8) && (LZO_WORDSIZE >= 8)
    138     unsigned r; r = (unsigned) __builtin_ctzll(v); return r;
    139 #define lzo_bitops_cttz64(v)    ((unsigned) __builtin_ctzll(v))
    140 #else
    141     LZO_UNUSED(v); return 0;
    142 #endif
    143 }
    144 #endif
    145 
    146 #if 1 && (LZO_CC_ARMCC_GNUC || LZO_CC_CLANG || (LZO_CC_GNUC >= 0x020700ul) || LZO_CC_INTELC_GNUC || LZO_CC_LLVM || LZO_CC_PATHSCALE || LZO_CC_PGI)
    147 static void __attribute__((__unused__))
    148 #else
    149 __lzo_static_forceinline void
    150 #endif
    151 lzo_bitops_unused_funcs(void)
    152 {
    153     LZO_UNUSED_FUNC(lzo_bitops_ctlz32_func);
    154     LZO_UNUSED_FUNC(lzo_bitops_cttz32_func);
    155 #if defined(lzo_uint64_t)
    156     LZO_UNUSED_FUNC(lzo_bitops_ctlz64_func);
    157     LZO_UNUSED_FUNC(lzo_bitops_cttz64_func);
    158 #endif
    159     LZO_UNUSED_FUNC(lzo_bitops_unused_funcs);
    160 }
    161 
    162 
    163 /***********************************************************************
    164 // memops
    165 ************************************************************************/
    166 
    167 #if defined(__lzo_alignof) && !(LZO_CFG_NO_UNALIGNED)
    168 #ifndef __lzo_memops_tcheck
    169 #define __lzo_memops_tcheck(t,a,b) ((void)0, sizeof(t) == (a) && __lzo_alignof(t) == (b))
    170 #endif
    171 #endif
    172 #ifndef lzo_memops_TU0p
    173 #define lzo_memops_TU0p void __LZO_MMODEL *
    174 #endif
    175 #ifndef lzo_memops_TU1p
    176 #define lzo_memops_TU1p unsigned char __LZO_MMODEL *
    177 #endif
    178 #ifndef lzo_memops_TU2p
    179 #if (LZO_OPT_UNALIGNED16)
    180 typedef lzo_uint16_t __lzo_may_alias lzo_memops_TU2;
    181 #define lzo_memops_TU2p volatile lzo_memops_TU2 *
    182 #elif defined(__lzo_byte_struct)
    183 __lzo_byte_struct(lzo_memops_TU2_struct,2)
    184 typedef struct lzo_memops_TU2_struct lzo_memops_TU2;
    185 #else
    186 struct lzo_memops_TU2_struct { unsigned char a[2]; } __lzo_may_alias;
    187 typedef struct lzo_memops_TU2_struct lzo_memops_TU2;
    188 #endif
    189 #ifndef lzo_memops_TU2p
    190 #define lzo_memops_TU2p lzo_memops_TU2 *
    191 #endif
    192 #endif
    193 #ifndef lzo_memops_TU4p
    194 #if (LZO_OPT_UNALIGNED32)
    195 typedef lzo_uint32_t __lzo_may_alias lzo_memops_TU4;
    196 #define lzo_memops_TU4p volatile lzo_memops_TU4 __LZO_MMODEL *
    197 #elif defined(__lzo_byte_struct)
    198 __lzo_byte_struct(lzo_memops_TU4_struct,4)
    199 typedef struct lzo_memops_TU4_struct lzo_memops_TU4;
    200 #else
    201 struct lzo_memops_TU4_struct { unsigned char a[4]; } __lzo_may_alias;
    202 typedef struct lzo_memops_TU4_struct lzo_memops_TU4;
    203 #endif
    204 #ifndef lzo_memops_TU4p
    205 #define lzo_memops_TU4p lzo_memops_TU4 __LZO_MMODEL *
    206 #endif
    207 #endif
    208 #ifndef lzo_memops_TU8p
    209 #if (LZO_OPT_UNALIGNED64)
    210 typedef lzo_uint64_t __lzo_may_alias lzo_memops_TU8;
    211 #define lzo_memops_TU8p volatile lzo_memops_TU8 __LZO_MMODEL *
    212 #elif defined(__lzo_byte_struct)
    213 __lzo_byte_struct(lzo_memops_TU8_struct,8)
    214 typedef struct lzo_memops_TU8_struct lzo_memops_TU8;
    215 #else
    216 struct lzo_memops_TU8_struct { unsigned char a[8]; } __lzo_may_alias;
    217 typedef struct lzo_memops_TU8_struct lzo_memops_TU8;
    218 #endif
    219 #ifndef lzo_memops_TU8p
    220 #define lzo_memops_TU8p lzo_memops_TU8 __LZO_MMODEL *
    221 #endif
    222 #endif
    223 #ifndef lzo_memops_set_TU1p
    224 #define lzo_memops_set_TU1p     volatile lzo_memops_TU1p
    225 #endif
    226 #ifndef lzo_memops_move_TU1p
    227 #define lzo_memops_move_TU1p    lzo_memops_TU1p
    228 #endif
    229 #define LZO_MEMOPS_SET1(dd,cc) \
    230     LZO_BLOCK_BEGIN \
    231     lzo_memops_set_TU1p d__1 = (lzo_memops_set_TU1p) (lzo_memops_TU0p) (dd); \
    232     d__1[0] = LZO_BYTE(cc); \
    233     LZO_BLOCK_END
    234 #define LZO_MEMOPS_SET2(dd,cc) \
    235     LZO_BLOCK_BEGIN \
    236     lzo_memops_set_TU1p d__2 = (lzo_memops_set_TU1p) (lzo_memops_TU0p) (dd); \
    237     d__2[0] = LZO_BYTE(cc); d__2[1] = LZO_BYTE(cc); \
    238     LZO_BLOCK_END
    239 #define LZO_MEMOPS_SET3(dd,cc) \
    240     LZO_BLOCK_BEGIN \
    241     lzo_memops_set_TU1p d__3 = (lzo_memops_set_TU1p) (lzo_memops_TU0p) (dd); \
    242     d__3[0] = LZO_BYTE(cc); d__3[1] = LZO_BYTE(cc); d__3[2] = LZO_BYTE(cc); \
    243     LZO_BLOCK_END
    244 #define LZO_MEMOPS_SET4(dd,cc) \
    245     LZO_BLOCK_BEGIN \
    246     lzo_memops_set_TU1p d__4 = (lzo_memops_set_TU1p) (lzo_memops_TU0p) (dd); \
    247     d__4[0] = LZO_BYTE(cc); d__4[1] = LZO_BYTE(cc); d__4[2] = LZO_BYTE(cc); d__4[3] = LZO_BYTE(cc); \
    248     LZO_BLOCK_END
    249 #define LZO_MEMOPS_MOVE1(dd,ss) \
    250     LZO_BLOCK_BEGIN \
    251     lzo_memops_move_TU1p d__1 = (lzo_memops_move_TU1p) (lzo_memops_TU0p) (dd); \
    252     const lzo_memops_move_TU1p s__1 = (const lzo_memops_move_TU1p) (const lzo_memops_TU0p) (ss); \
    253     d__1[0] = s__1[0]; \
    254     LZO_BLOCK_END
    255 #define LZO_MEMOPS_MOVE2(dd,ss) \
    256     LZO_BLOCK_BEGIN \
    257     lzo_memops_move_TU1p d__2 = (lzo_memops_move_TU1p) (lzo_memops_TU0p) (dd); \
    258     const lzo_memops_move_TU1p s__2 = (const lzo_memops_move_TU1p) (const lzo_memops_TU0p) (ss); \
    259     d__2[0] = s__2[0]; d__2[1] = s__2[1]; \
    260     LZO_BLOCK_END
    261 #define LZO_MEMOPS_MOVE3(dd,ss) \
    262     LZO_BLOCK_BEGIN \
    263     lzo_memops_move_TU1p d__3 = (lzo_memops_move_TU1p) (lzo_memops_TU0p) (dd); \
    264     const lzo_memops_move_TU1p s__3 = (const lzo_memops_move_TU1p) (const lzo_memops_TU0p) (ss); \
    265     d__3[0] = s__3[0]; d__3[1] = s__3[1]; d__3[2] = s__3[2]; \
    266     LZO_BLOCK_END
    267 #define LZO_MEMOPS_MOVE4(dd,ss) \
    268     LZO_BLOCK_BEGIN \
    269     lzo_memops_move_TU1p d__4 = (lzo_memops_move_TU1p) (lzo_memops_TU0p) (dd); \
    270     const lzo_memops_move_TU1p s__4 = (const lzo_memops_move_TU1p) (const lzo_memops_TU0p) (ss); \
    271     d__4[0] = s__4[0]; d__4[1] = s__4[1]; d__4[2] = s__4[2]; d__4[3] = s__4[3]; \
    272     LZO_BLOCK_END
    273 #define LZO_MEMOPS_MOVE8(dd,ss) \
    274     LZO_BLOCK_BEGIN \
    275     lzo_memops_move_TU1p d__8 = (lzo_memops_move_TU1p) (lzo_memops_TU0p) (dd); \
    276     const lzo_memops_move_TU1p s__8 = (const lzo_memops_move_TU1p) (const lzo_memops_TU0p) (ss); \
    277     d__8[0] = s__8[0]; d__8[1] = s__8[1]; d__8[2] = s__8[2]; d__8[3] = s__8[3]; \
    278     d__8[4] = s__8[4]; d__8[5] = s__8[5]; d__8[6] = s__8[6]; d__8[7] = s__8[7]; \
    279     LZO_BLOCK_END
    280 LZO_COMPILE_TIME_ASSERT_HEADER(sizeof(*(lzo_memops_TU1p)0)==1)
    281 #define LZO_MEMOPS_COPY1(dd,ss) LZO_MEMOPS_MOVE1(dd,ss)
    282 #if (LZO_OPT_UNALIGNED16)
    283 LZO_COMPILE_TIME_ASSERT_HEADER(sizeof(*(lzo_memops_TU2p)0)==2)
    284 #define LZO_MEMOPS_COPY2(dd,ss) \
    285     * (lzo_memops_TU2p) (lzo_memops_TU0p) (dd) = * (const lzo_memops_TU2p) (const lzo_memops_TU0p) (ss)
    286 #elif defined(__lzo_memops_tcheck)
    287 #define LZO_MEMOPS_COPY2(dd,ss) \
    288     LZO_BLOCK_BEGIN if (__lzo_memops_tcheck(lzo_memops_TU2,2,1)) { \
    289         * (lzo_memops_TU2p) (lzo_memops_TU0p) (dd) = * (const lzo_memops_TU2p) (const lzo_memops_TU0p) (ss); \
    290     } else { LZO_MEMOPS_MOVE2(dd,ss); } LZO_BLOCK_END
    291 #else
    292 #define LZO_MEMOPS_COPY2(dd,ss) LZO_MEMOPS_MOVE2(dd,ss)
    293 #endif
    294 #if (LZO_OPT_UNALIGNED32)
    295 LZO_COMPILE_TIME_ASSERT_HEADER(sizeof(*(lzo_memops_TU4p)0)==4)
    296 #define LZO_MEMOPS_COPY4(dd,ss) \
    297     * (lzo_memops_TU4p) (lzo_memops_TU0p) (dd) = * (const lzo_memops_TU4p) (const lzo_memops_TU0p) (ss)
    298 #elif defined(__lzo_memops_tcheck)
    299 #define LZO_MEMOPS_COPY4(dd,ss) \
    300     LZO_BLOCK_BEGIN if (__lzo_memops_tcheck(lzo_memops_TU4,4,1)) { \
    301         * (lzo_memops_TU4p) (lzo_memops_TU0p) (dd) = * (const lzo_memops_TU4p) (const lzo_memops_TU0p) (ss); \
    302     } else { LZO_MEMOPS_MOVE4(dd,ss); } LZO_BLOCK_END
    303 #else
    304 #define LZO_MEMOPS_COPY4(dd,ss) LZO_MEMOPS_MOVE4(dd,ss)
    305 #endif
    306 #if (LZO_WORDSIZE != 8)
    307 #define LZO_MEMOPS_COPY8(dd,ss) \
    308     LZO_BLOCK_BEGIN LZO_MEMOPS_COPY4(dd,ss); LZO_MEMOPS_COPY4((lzo_memops_TU1p)(lzo_memops_TU0p)(dd)+4,(const lzo_memops_TU1p)(const lzo_memops_TU0p)(ss)+4); LZO_BLOCK_END
    309 #else
    310 #if (LZO_OPT_UNALIGNED64)
    311 LZO_COMPILE_TIME_ASSERT_HEADER(sizeof(*(lzo_memops_TU8p)0)==8)
    312 #define LZO_MEMOPS_COPY8(dd,ss) \
    313     * (lzo_memops_TU8p) (lzo_memops_TU0p) (dd) = * (const lzo_memops_TU8p) (const lzo_memops_TU0p) (ss)
    314 #elif (LZO_OPT_UNALIGNED32)
    315 #define LZO_MEMOPS_COPY8(dd,ss) \
    316     LZO_BLOCK_BEGIN LZO_MEMOPS_COPY4(dd,ss); LZO_MEMOPS_COPY4((lzo_memops_TU1p)(lzo_memops_TU0p)(dd)+4,(const lzo_memops_TU1p)(const lzo_memops_TU0p)(ss)+4); LZO_BLOCK_END
    317 #elif defined(__lzo_memops_tcheck)
    318 #define LZO_MEMOPS_COPY8(dd,ss) \
    319     LZO_BLOCK_BEGIN if (__lzo_memops_tcheck(lzo_memops_TU8,8,1)) { \
    320         * (lzo_memops_TU8p) (lzo_memops_TU0p) (dd) = * (const lzo_memops_TU8p) (const lzo_memops_TU0p) (ss); \
    321     } else { LZO_MEMOPS_MOVE8(dd,ss); } LZO_BLOCK_END
    322 #else
    323 #define LZO_MEMOPS_COPY8(dd,ss) LZO_MEMOPS_MOVE8(dd,ss)
    324 #endif
    325 #endif
    326 #define LZO_MEMOPS_COPYN(dd,ss,nn) \
    327     LZO_BLOCK_BEGIN \
    328     lzo_memops_TU1p d__n = (lzo_memops_TU1p) (lzo_memops_TU0p) (dd); \
    329     const lzo_memops_TU1p s__n = (const lzo_memops_TU1p) (const lzo_memops_TU0p) (ss); \
    330     lzo_uint n__n = (nn); \
    331     while ((void)0, n__n >= 8) { LZO_MEMOPS_COPY8(d__n, s__n); d__n += 8; s__n += 8; n__n -= 8; } \
    332     if ((void)0, n__n >= 4) { LZO_MEMOPS_COPY4(d__n, s__n); d__n += 4; s__n += 4; n__n -= 4; } \
    333     if ((void)0, n__n > 0) do { *d__n++ = *s__n++; } while (--n__n > 0); \
    334     LZO_BLOCK_END
    335 
    336 __lzo_static_forceinline lzo_uint16_t lzo_memops_get_le16(const lzo_voidp ss)
    337 {
    338     lzo_uint16_t v;
    339 #if (LZO_ABI_LITTLE_ENDIAN)
    340     LZO_MEMOPS_COPY2(&v, ss);
    341 #elif (LZO_OPT_UNALIGNED16 && LZO_ARCH_POWERPC && LZO_ABI_BIG_ENDIAN) && (LZO_ASM_SYNTAX_GNUC)
    342     const lzo_memops_TU2p s = (const lzo_memops_TU2p) ss;
    343     unsigned long vv;
    344     __asm__("lhbrx %0,0,%1" : "=r" (vv) : "r" (s), "m" (*s));
    345     v = (lzo_uint16_t) vv;
    346 #else
    347     const lzo_memops_TU1p s = (const lzo_memops_TU1p) ss;
    348     v = (lzo_uint16_t) (((lzo_uint16_t)s[0]) | ((lzo_uint16_t)s[1] << 8));
    349 #endif
    350     return v;
    351 }
    352 #if (LZO_OPT_UNALIGNED16) && (LZO_ABI_LITTLE_ENDIAN)
    353 #define LZO_MEMOPS_GET_LE16(ss)    * (const lzo_memops_TU2p) (const lzo_memops_TU0p) (ss)
    354 #else
    355 #define LZO_MEMOPS_GET_LE16(ss)    lzo_memops_get_le16(ss)
    356 #endif
    357 
    358 __lzo_static_forceinline lzo_uint32_t lzo_memops_get_le32(const lzo_voidp ss)
    359 {
    360     lzo_uint32_t v;
    361 #if (LZO_ABI_LITTLE_ENDIAN)
    362     LZO_MEMOPS_COPY4(&v, ss);
    363 #elif (LZO_OPT_UNALIGNED32 && LZO_ARCH_POWERPC && LZO_ABI_BIG_ENDIAN) && (LZO_ASM_SYNTAX_GNUC)
    364     const lzo_memops_TU4p s = (const lzo_memops_TU4p) ss;
    365     unsigned long vv;
    366     __asm__("lwbrx %0,0,%1" : "=r" (vv) : "r" (s), "m" (*s));
    367     v = (lzo_uint32_t) vv;
    368 #else
    369     const lzo_memops_TU1p s = (const lzo_memops_TU1p) ss;
    370     v = (lzo_uint32_t) (((lzo_uint32_t)s[0] << 24) | ((lzo_uint32_t)s[1] << 16) | ((lzo_uint32_t)s[2] << 8) | ((lzo_uint32_t)s[3]));
    371 #endif
    372     return v;
    373 }
    374 #if (LZO_OPT_UNALIGNED32) && (LZO_ABI_LITTLE_ENDIAN)
    375 #define LZO_MEMOPS_GET_LE32(ss)    * (const lzo_memops_TU4p) (const lzo_memops_TU0p) (ss)
    376 #else
    377 #define LZO_MEMOPS_GET_LE32(ss)    lzo_memops_get_le32(ss)
    378 #endif
    379 
    380 #if (LZO_OPT_UNALIGNED64) && (LZO_ABI_LITTLE_ENDIAN)
    381 #define LZO_MEMOPS_GET_LE64(ss)    * (const lzo_memops_TU8p) (const lzo_memops_TU0p) (ss)
    382 #endif
    383 
    384 __lzo_static_forceinline lzo_uint16_t lzo_memops_get_ne16(const lzo_voidp ss)
    385 {
    386     lzo_uint16_t v;
    387     LZO_MEMOPS_COPY2(&v, ss);
    388     return v;
    389 }
    390 #if (LZO_OPT_UNALIGNED16)
    391 #define LZO_MEMOPS_GET_NE16(ss)    * (const lzo_memops_TU2p) (const lzo_memops_TU0p) (ss)
    392 #else
    393 #define LZO_MEMOPS_GET_NE16(ss)    lzo_memops_get_ne16(ss)
    394 #endif
    395 
    396 __lzo_static_forceinline lzo_uint32_t lzo_memops_get_ne32(const lzo_voidp ss)
    397 {
    398     lzo_uint32_t v;
    399     LZO_MEMOPS_COPY4(&v, ss);
    400     return v;
    401 }
    402 #if (LZO_OPT_UNALIGNED32)
    403 #define LZO_MEMOPS_GET_NE32(ss)    * (const lzo_memops_TU4p) (const lzo_memops_TU0p) (ss)
    404 #else
    405 #define LZO_MEMOPS_GET_NE32(ss)    lzo_memops_get_ne32(ss)
    406 #endif
    407 
    408 #if (LZO_OPT_UNALIGNED64)
    409 #define LZO_MEMOPS_GET_NE64(ss)    * (const lzo_memops_TU8p) (const lzo_memops_TU0p) (ss)
    410 #endif
    411 
    412 __lzo_static_forceinline void lzo_memops_put_le16(lzo_voidp dd, lzo_uint16_t vv)
    413 {
    414 #if (LZO_ABI_LITTLE_ENDIAN)
    415     LZO_MEMOPS_COPY2(dd, &vv);
    416 #elif (LZO_OPT_UNALIGNED16 && LZO_ARCH_POWERPC && LZO_ABI_BIG_ENDIAN) && (LZO_ASM_SYNTAX_GNUC)
    417     lzo_memops_TU2p d = (lzo_memops_TU2p) dd;
    418     unsigned long v = vv;
    419     __asm__("sthbrx %2,0,%1" : "=m" (*d) : "r" (d), "r" (v));
    420 #else
    421     lzo_memops_TU1p d = (lzo_memops_TU1p) dd;
    422     d[0] = LZO_BYTE((vv      ) & 0xff);
    423     d[1] = LZO_BYTE((vv >>  8) & 0xff);
    424 #endif
    425 }
    426 #if (LZO_OPT_UNALIGNED16) && (LZO_ABI_LITTLE_ENDIAN)
    427 #define LZO_MEMOPS_PUT_LE16(dd,vv) (* (lzo_memops_TU2p) (lzo_memops_TU0p) (dd) = (vv))
    428 #else
    429 #define LZO_MEMOPS_PUT_LE16(dd,vv) lzo_memops_put_le16(dd,vv)
    430 #endif
    431 
    432 __lzo_static_forceinline void lzo_memops_put_le32(lzo_voidp dd, lzo_uint32_t vv)
    433 {
    434 #if (LZO_ABI_LITTLE_ENDIAN)
    435     LZO_MEMOPS_COPY4(dd, &vv);
    436 #elif (LZO_OPT_UNALIGNED32 && LZO_ARCH_POWERPC && LZO_ABI_BIG_ENDIAN) && (LZO_ASM_SYNTAX_GNUC)
    437     lzo_memops_TU4p d = (lzo_memops_TU4p) dd;
    438     unsigned long v = vv;
    439     __asm__("stwbrx %2,0,%1" : "=m" (*d) : "r" (d), "r" (v));
    440 #else
    441     lzo_memops_TU1p d = (lzo_memops_TU1p) dd;
    442     d[0] = LZO_BYTE((vv      ) & 0xff);
    443     d[1] = LZO_BYTE((vv >>  8) & 0xff);
    444     d[2] = LZO_BYTE((vv >> 16) & 0xff);
    445     d[3] = LZO_BYTE((vv >> 24) & 0xff);
    446 #endif
    447 }
    448 #if (LZO_OPT_UNALIGNED32) && (LZO_ABI_LITTLE_ENDIAN)
    449 #define LZO_MEMOPS_PUT_LE32(dd,vv) (* (lzo_memops_TU4p) (lzo_memops_TU0p) (dd) = (vv))
    450 #else
    451 #define LZO_MEMOPS_PUT_LE32(dd,vv) lzo_memops_put_le32(dd,vv)
    452 #endif
    453 
    454 __lzo_static_forceinline void lzo_memops_put_ne16(lzo_voidp dd, lzo_uint16_t vv)
    455 {
    456     LZO_MEMOPS_COPY2(dd, &vv);
    457 }
    458 #if (LZO_OPT_UNALIGNED16)
    459 #define LZO_MEMOPS_PUT_NE16(dd,vv) (* (lzo_memops_TU2p) (lzo_memops_TU0p) (dd) = (vv))
    460 #else
    461 #define LZO_MEMOPS_PUT_NE16(dd,vv) lzo_memops_put_ne16(dd,vv)
    462 #endif
    463 
    464 __lzo_static_forceinline void lzo_memops_put_ne32(lzo_voidp dd, lzo_uint32_t vv)
    465 {
    466     LZO_MEMOPS_COPY4(dd, &vv);
    467 }
    468 #if (LZO_OPT_UNALIGNED32)
    469 #define LZO_MEMOPS_PUT_NE32(dd,vv) (* (lzo_memops_TU4p) (lzo_memops_TU0p) (dd) = (vv))
    470 #else
    471 #define LZO_MEMOPS_PUT_NE32(dd,vv) lzo_memops_put_ne32(dd,vv)
    472 #endif
    473 
    474 #if 1 && (LZO_CC_ARMCC_GNUC || LZO_CC_CLANG || (LZO_CC_GNUC >= 0x020700ul) || LZO_CC_INTELC_GNUC || LZO_CC_LLVM || LZO_CC_PATHSCALE || LZO_CC_PGI)
    475 static void __attribute__((__unused__))
    476 #else
    477 __lzo_static_forceinline void
    478 #endif
    479 lzo_memops_unused_funcs(void)
    480 {
    481     LZO_UNUSED_FUNC(lzo_memops_get_le16);
    482     LZO_UNUSED_FUNC(lzo_memops_get_le32);
    483     LZO_UNUSED_FUNC(lzo_memops_get_ne16);
    484     LZO_UNUSED_FUNC(lzo_memops_get_ne32);
    485     LZO_UNUSED_FUNC(lzo_memops_put_le16);
    486     LZO_UNUSED_FUNC(lzo_memops_put_le32);
    487     LZO_UNUSED_FUNC(lzo_memops_put_ne16);
    488     LZO_UNUSED_FUNC(lzo_memops_put_ne32);
    489     LZO_UNUSED_FUNC(lzo_memops_unused_funcs);
    490 }
    491 
    492 #endif /* already included */
    493 
    494 /* vim:set ts=4 sw=4 et: */
    495