Home | History | Annotate | Download | only in Headers
      1 /* ===-------- Intrin.h ---------------------------------------------------===
      2  *
      3  * Permission is hereby granted, free of charge, to any person obtaining a copy
      4  * of this software and associated documentation files (the "Software"), to deal
      5  * in the Software without restriction, including without limitation the rights
      6  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
      7  * copies of the Software, and to permit persons to whom the Software is
      8  * furnished to do so, subject to the following conditions:
      9  *
     10  * The above copyright notice and this permission notice shall be included in
     11  * all copies or substantial portions of the Software.
     12  *
     13  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
     14  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
     15  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
     16  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
     17  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
     18  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
     19  * THE SOFTWARE.
     20  *
     21  *===-----------------------------------------------------------------------===
     22  */
     23 
     24 /* Only include this if we're compiling for the windows platform. */
     25 #ifndef _MSC_VER
     26 #include_next <Intrin.h>
     27 #else
     28 
     29 #ifndef __INTRIN_H
     30 #define __INTRIN_H
     31 
     32 /* First include the standard intrinsics. */
     33 #if defined(__i386__) || defined(__x86_64__)
     34 #include <x86intrin.h>
     35 #endif
     36 
     37 /* For the definition of jmp_buf. */
     38 #if __STDC_HOSTED__
     39 #include <setjmp.h>
     40 #endif
     41 
     42 #ifdef __cplusplus
     43 extern "C" {
     44 #endif
     45 
     46 #if defined(__MMX__)
     47 /* And the random ones that aren't in those files. */
     48 __m64 _m_from_float(float);
     49 __m64 _m_from_int(int _l);
     50 void _m_prefetch(void *);
     51 float _m_to_float(__m64);
     52 int _m_to_int(__m64 _M);
     53 #endif
     54 
     55 /* Other assorted instruction intrinsics. */
     56 void __addfsbyte(unsigned long, unsigned char);
     57 void __addfsdword(unsigned long, unsigned long);
     58 void __addfsword(unsigned long, unsigned short);
     59 void __code_seg(const char *);
     60 static __inline__
     61 void __cpuid(int[4], int);
     62 static __inline__
     63 void __cpuidex(int[4], int, int);
     64 void __debugbreak(void);
     65 __int64 __emul(int, int);
     66 unsigned __int64 __emulu(unsigned int, unsigned int);
     67 void __cdecl __fastfail(unsigned int);
     68 unsigned int __getcallerseflags(void);
     69 static __inline__
     70 void __halt(void);
     71 unsigned char __inbyte(unsigned short);
     72 void __inbytestring(unsigned short, unsigned char *, unsigned long);
     73 void __incfsbyte(unsigned long);
     74 void __incfsdword(unsigned long);
     75 void __incfsword(unsigned long);
     76 unsigned long __indword(unsigned short);
     77 void __indwordstring(unsigned short, unsigned long *, unsigned long);
     78 void __int2c(void);
     79 void __invlpg(void *);
     80 unsigned short __inword(unsigned short);
     81 void __inwordstring(unsigned short, unsigned short *, unsigned long);
     82 void __lidt(void *);
     83 unsigned __int64 __ll_lshift(unsigned __int64, int);
     84 __int64 __ll_rshift(__int64, int);
     85 void __llwpcb(void *);
     86 unsigned char __lwpins32(unsigned int, unsigned int, unsigned int);
     87 void __lwpval32(unsigned int, unsigned int, unsigned int);
     88 unsigned int __lzcnt(unsigned int);
     89 unsigned short __lzcnt16(unsigned short);
     90 static __inline__
     91 void __movsb(unsigned char *, unsigned char const *, size_t);
     92 static __inline__
     93 void __movsd(unsigned long *, unsigned long const *, size_t);
     94 static __inline__
     95 void __movsw(unsigned short *, unsigned short const *, size_t);
     96 void __nop(void);
     97 void __nvreg_restore_fence(void);
     98 void __nvreg_save_fence(void);
     99 void __outbyte(unsigned short, unsigned char);
    100 void __outbytestring(unsigned short, unsigned char *, unsigned long);
    101 void __outdword(unsigned short, unsigned long);
    102 void __outdwordstring(unsigned short, unsigned long *, unsigned long);
    103 void __outword(unsigned short, unsigned short);
    104 void __outwordstring(unsigned short, unsigned short *, unsigned long);
    105 static __inline__
    106 unsigned int __popcnt(unsigned int);
    107 static __inline__
    108 unsigned short __popcnt16(unsigned short);
    109 unsigned long __readcr0(void);
    110 unsigned long __readcr2(void);
    111 static __inline__
    112 unsigned long __readcr3(void);
    113 unsigned long __readcr4(void);
    114 unsigned long __readcr8(void);
    115 unsigned int __readdr(unsigned int);
    116 #ifdef __i386__
    117 static __inline__
    118 unsigned char __readfsbyte(unsigned long);
    119 static __inline__
    120 unsigned long __readfsdword(unsigned long);
    121 static __inline__
    122 unsigned __int64 __readfsqword(unsigned long);
    123 static __inline__
    124 unsigned short __readfsword(unsigned long);
    125 #endif
    126 static __inline__
    127 unsigned __int64 __readmsr(unsigned long);
    128 unsigned __int64 __readpmc(unsigned long);
    129 unsigned long __segmentlimit(unsigned long);
    130 void __sidt(void *);
    131 void *__slwpcb(void);
    132 static __inline__
    133 void __stosb(unsigned char *, unsigned char, size_t);
    134 static __inline__
    135 void __stosd(unsigned long *, unsigned long, size_t);
    136 static __inline__
    137 void __stosw(unsigned short *, unsigned short, size_t);
    138 void __svm_clgi(void);
    139 void __svm_invlpga(void *, int);
    140 void __svm_skinit(int);
    141 void __svm_stgi(void);
    142 void __svm_vmload(size_t);
    143 void __svm_vmrun(size_t);
    144 void __svm_vmsave(size_t);
    145 void __ud2(void);
    146 unsigned __int64 __ull_rshift(unsigned __int64, int);
    147 void __vmx_off(void);
    148 void __vmx_vmptrst(unsigned __int64 *);
    149 void __wbinvd(void);
    150 void __writecr0(unsigned int);
    151 static __inline__
    152 void __writecr3(unsigned int);
    153 void __writecr4(unsigned int);
    154 void __writecr8(unsigned int);
    155 void __writedr(unsigned int, unsigned int);
    156 void __writefsbyte(unsigned long, unsigned char);
    157 void __writefsdword(unsigned long, unsigned long);
    158 void __writefsqword(unsigned long, unsigned __int64);
    159 void __writefsword(unsigned long, unsigned short);
    160 void __writemsr(unsigned long, unsigned __int64);
    161 static __inline__
    162 void *_AddressOfReturnAddress(void);
    163 unsigned int _andn_u32(unsigned int, unsigned int);
    164 unsigned int _bextr_u32(unsigned int, unsigned int, unsigned int);
    165 unsigned int _bextri_u32(unsigned int, unsigned int);
    166 static __inline__
    167 unsigned char _BitScanForward(unsigned long *_Index, unsigned long _Mask);
    168 static __inline__
    169 unsigned char _BitScanReverse(unsigned long *_Index, unsigned long _Mask);
    170 static __inline__
    171 unsigned char _bittest(long const *, long);
    172 static __inline__
    173 unsigned char _bittestandcomplement(long *, long);
    174 static __inline__
    175 unsigned char _bittestandreset(long *, long);
    176 static __inline__
    177 unsigned char _bittestandset(long *, long);
    178 unsigned int _blcfill_u32(unsigned int);
    179 unsigned int _blci_u32(unsigned int);
    180 unsigned int _blcic_u32(unsigned int);
    181 unsigned int _blcmsk_u32(unsigned int);
    182 unsigned int _blcs_u32(unsigned int);
    183 unsigned int _blsfill_u32(unsigned int);
    184 unsigned int _blsi_u32(unsigned int);
    185 unsigned int _blsic_u32(unsigned int);
    186 unsigned int _blsmsk_u32(unsigned int);
    187 unsigned int _blsr_u32(unsigned int);
    188 unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64);
    189 unsigned long __cdecl _byteswap_ulong(unsigned long);
    190 unsigned short __cdecl _byteswap_ushort(unsigned short);
    191 unsigned _bzhi_u32(unsigned int, unsigned int);
    192 void __cdecl _disable(void);
    193 void __cdecl _enable(void);
    194 void __cdecl _fxrstor(void const *);
    195 void __cdecl _fxsave(void *);
    196 long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
    197 static __inline__
    198 long _InterlockedAnd(long volatile *_Value, long _Mask);
    199 static __inline__
    200 short _InterlockedAnd16(short volatile *_Value, short _Mask);
    201 static __inline__
    202 char _InterlockedAnd8(char volatile *_Value, char _Mask);
    203 unsigned char _interlockedbittestandreset(long volatile *, long);
    204 static __inline__
    205 unsigned char _interlockedbittestandset(long volatile *, long);
    206 static __inline__
    207 long __cdecl _InterlockedCompareExchange(long volatile *_Destination,
    208                                          long _Exchange, long _Comparand);
    209 long _InterlockedCompareExchange_HLEAcquire(long volatile *, long, long);
    210 long _InterlockedCompareExchange_HLERelease(long volatile *, long, long);
    211 static __inline__
    212 short _InterlockedCompareExchange16(short volatile *_Destination,
    213                                     short _Exchange, short _Comparand);
    214 static __inline__
    215 __int64 _InterlockedCompareExchange64(__int64 volatile *_Destination,
    216                                       __int64 _Exchange, __int64 _Comparand);
    217 __int64 _InterlockedcompareExchange64_HLEAcquire(__int64 volatile *, __int64,
    218                                                  __int64);
    219 __int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
    220                                                  __int64);
    221 static __inline__
    222 char _InterlockedCompareExchange8(char volatile *_Destination, char _Exchange,
    223                                   char _Comparand);
    224 void *_InterlockedCompareExchangePointer_HLEAcquire(void *volatile *, void *,
    225                                                     void *);
    226 void *_InterlockedCompareExchangePointer_HLERelease(void *volatile *, void *,
    227                                                     void *);
    228 static __inline__
    229 long __cdecl _InterlockedDecrement(long volatile *_Addend);
    230 static __inline__
    231 short _InterlockedDecrement16(short volatile *_Addend);
    232 long _InterlockedExchange(long volatile *_Target, long _Value);
    233 static __inline__
    234 short _InterlockedExchange16(short volatile *_Target, short _Value);
    235 static __inline__
    236 char _InterlockedExchange8(char volatile *_Target, char _Value);
    237 static __inline__
    238 long __cdecl _InterlockedExchangeAdd(long volatile *_Addend, long _Value);
    239 long _InterlockedExchangeAdd_HLEAcquire(long volatile *, long);
    240 long _InterlockedExchangeAdd_HLERelease(long volatile *, long);
    241 static __inline__
    242 short _InterlockedExchangeAdd16(short volatile *_Addend, short _Value);
    243 __int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *, __int64);
    244 __int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *, __int64);
    245 static __inline__
    246 char _InterlockedExchangeAdd8(char volatile *_Addend, char _Value);
    247 static __inline__
    248 long __cdecl _InterlockedIncrement(long volatile *_Addend);
    249 static __inline__
    250 short _InterlockedIncrement16(short volatile *_Addend);
    251 static __inline__
    252 long _InterlockedOr(long volatile *_Value, long _Mask);
    253 static __inline__
    254 short _InterlockedOr16(short volatile *_Value, short _Mask);
    255 static __inline__
    256 char _InterlockedOr8(char volatile *_Value, char _Mask);
    257 static __inline__
    258 long _InterlockedXor(long volatile *_Value, long _Mask);
    259 static __inline__
    260 short _InterlockedXor16(short volatile *_Value, short _Mask);
    261 static __inline__
    262 char _InterlockedXor8(char volatile *_Value, char _Mask);
    263 void __cdecl _invpcid(unsigned int, void *);
    264 static __inline__
    265 unsigned long __cdecl _lrotl(unsigned long, int);
    266 static __inline__
    267 unsigned long __cdecl _lrotr(unsigned long, int);
    268 static __inline__
    269 unsigned int _lzcnt_u32(unsigned int);
    270 static __inline__
    271 void _ReadBarrier(void);
    272 static __inline__
    273 void _ReadWriteBarrier(void);
    274 static __inline__
    275 void *_ReturnAddress(void);
    276 unsigned int _rorx_u32(unsigned int, const unsigned int);
    277 int __cdecl _rdrand16_step(unsigned short *);
    278 int __cdecl _rdrand32_step(unsigned int *);
    279 static __inline__
    280 unsigned int __cdecl _rotl(unsigned int _Value, int _Shift);
    281 static __inline__
    282 unsigned short _rotl16(unsigned short _Value, unsigned char _Shift);
    283 static __inline__
    284 unsigned __int64 __cdecl _rotl64(unsigned __int64 _Value, int _Shift);
    285 static __inline__
    286 unsigned char _rotl8(unsigned char _Value, unsigned char _Shift);
    287 static __inline__
    288 unsigned int __cdecl _rotr(unsigned int _Value, int _Shift);
    289 static __inline__
    290 unsigned short _rotr16(unsigned short _Value, unsigned char _Shift);
    291 static __inline__
    292 unsigned __int64 __cdecl _rotr64(unsigned __int64 _Value, int _Shift);
    293 static __inline__
    294 unsigned char _rotr8(unsigned char _Value, unsigned char _Shift);
    295 int _sarx_i32(int, unsigned int);
    296 #if __STDC_HOSTED__
    297 int __cdecl _setjmp(jmp_buf);
    298 #endif
    299 unsigned int _shlx_u32(unsigned int, unsigned int);
    300 unsigned int _shrx_u32(unsigned int, unsigned int);
    301 void _Store_HLERelease(long volatile *, long);
    302 void _Store64_HLERelease(__int64 volatile *, __int64);
    303 void _StorePointer_HLERelease(void *volatile *, void *);
    304 unsigned int _t1mskc_u32(unsigned int);
    305 unsigned int _tzcnt_u32(unsigned int);
    306 unsigned int _tzmsk_u32(unsigned int);
    307 static __inline__
    308 void _WriteBarrier(void);
    309 void _xabort(const unsigned int imm);
    310 unsigned __int32 xbegin(void);
    311 void _xend(void);
    312 static __inline__
    313 unsigned __int64 __cdecl _xgetbv(unsigned int);
    314 void __cdecl _xrstor(void const *, unsigned __int64);
    315 void __cdecl _xsave(void *, unsigned __int64);
    316 void __cdecl _xsaveopt(void *, unsigned __int64);
    317 void __cdecl _xsetbv(unsigned int, unsigned __int64);
    318 unsigned char _xtest(void);
    319 
    320 /* These additional intrinsics are turned on in x64/amd64/x86_64 mode. */
    321 #ifdef __x86_64__
    322 void __addgsbyte(unsigned long, unsigned char);
    323 void __addgsdword(unsigned long, unsigned long);
    324 void __addgsqword(unsigned long, unsigned __int64);
    325 void __addgsword(unsigned long, unsigned short);
    326 static __inline__
    327 void __faststorefence(void);
    328 void __incgsbyte(unsigned long);
    329 void __incgsdword(unsigned long);
    330 void __incgsqword(unsigned long);
    331 void __incgsword(unsigned long);
    332 unsigned char __lwpins64(unsigned __int64, unsigned int, unsigned int);
    333 void __lwpval64(unsigned __int64, unsigned int, unsigned int);
    334 unsigned __int64 __lzcnt64(unsigned __int64);
    335 static __inline__
    336 void __movsq(unsigned long long *, unsigned long long const *, size_t);
    337 __int64 __mulh(__int64, __int64);
    338 static __inline__
    339 unsigned __int64 __popcnt64(unsigned __int64);
    340 static __inline__
    341 unsigned char __readgsbyte(unsigned long);
    342 static __inline__
    343 unsigned long __readgsdword(unsigned long);
    344 static __inline__
    345 unsigned __int64 __readgsqword(unsigned long);
    346 unsigned short __readgsword(unsigned long);
    347 unsigned __int64 __shiftleft128(unsigned __int64 _LowPart,
    348                                 unsigned __int64 _HighPart,
    349                                 unsigned char _Shift);
    350 unsigned __int64 __shiftright128(unsigned __int64 _LowPart,
    351                                  unsigned __int64 _HighPart,
    352                                  unsigned char _Shift);
    353 static __inline__
    354 void __stosq(unsigned __int64 *, unsigned __int64, size_t);
    355 unsigned __int64 __umulh(unsigned __int64, unsigned __int64);
    356 unsigned char __vmx_on(unsigned __int64 *);
    357 unsigned char __vmx_vmclear(unsigned __int64 *);
    358 unsigned char __vmx_vmlaunch(void);
    359 unsigned char __vmx_vmptrld(unsigned __int64 *);
    360 unsigned char __vmx_vmread(size_t, size_t *);
    361 unsigned char __vmx_vmresume(void);
    362 unsigned char __vmx_vmwrite(size_t, size_t);
    363 void __writegsbyte(unsigned long, unsigned char);
    364 void __writegsdword(unsigned long, unsigned long);
    365 void __writegsqword(unsigned long, unsigned __int64);
    366 void __writegsword(unsigned long, unsigned short);
    367 unsigned __int64 _andn_u64(unsigned __int64, unsigned __int64);
    368 unsigned __int64 _bextr_u64(unsigned __int64, unsigned int, unsigned int);
    369 unsigned __int64 _bextri_u64(unsigned __int64, unsigned int);
    370 static __inline__
    371 unsigned char _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask);
    372 static __inline__
    373 unsigned char _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask);
    374 static __inline__
    375 unsigned char _bittest64(__int64 const *, __int64);
    376 static __inline__
    377 unsigned char _bittestandcomplement64(__int64 *, __int64);
    378 static __inline__
    379 unsigned char _bittestandreset64(__int64 *, __int64);
    380 static __inline__
    381 unsigned char _bittestandset64(__int64 *, __int64);
    382 unsigned __int64 _blcfill_u64(unsigned __int64);
    383 unsigned __int64 _blci_u64(unsigned __int64);
    384 unsigned __int64 _blcic_u64(unsigned __int64);
    385 unsigned __int64 _blcmsk_u64(unsigned __int64);
    386 unsigned __int64 _blcs_u64(unsigned __int64);
    387 unsigned __int64 _blsfill_u64(unsigned __int64);
    388 unsigned __int64 _blsi_u64(unsigned __int64);
    389 unsigned __int64 _blsic_u64(unsigned __int64);
    390 unsigned __int64 _blsmsk_u64(unsigned __int64);
    391 unsigned __int64 _blsr_u64(unsigned __int64);
    392 unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64);
    393 unsigned __int64 _bzhi_u64(unsigned __int64, unsigned int);
    394 void __cdecl _fxrstor64(void const *);
    395 void __cdecl _fxsave64(void *);
    396 long _InterlockedAnd_np(long volatile *_Value, long _Mask);
    397 short _InterlockedAnd16_np(short volatile *_Value, short _Mask);
    398 __int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
    399 char _InterlockedAnd8_np(char volatile *_Value, char _Mask);
    400 unsigned char _interlockedbittestandreset64(__int64 volatile *, __int64);
    401 static __inline__
    402 unsigned char _interlockedbittestandset64(__int64 volatile *, __int64);
    403 long _InterlockedCompareExchange_np(long volatile *_Destination, long _Exchange,
    404                                     long _Comparand);
    405 unsigned char _InterlockedCompareExchange128(__int64 volatile *_Destination,
    406                                              __int64 _ExchangeHigh,
    407                                              __int64 _ExchangeLow,
    408                                              __int64 *_CompareandResult);
    409 unsigned char _InterlockedCompareExchange128_np(__int64 volatile *_Destination,
    410                                                 __int64 _ExchangeHigh,
    411                                                 __int64 _ExchangeLow,
    412                                                 __int64 *_ComparandResult);
    413 short _InterlockedCompareExchange16_np(short volatile *_Destination,
    414                                        short _Exchange, short _Comparand);
    415 __int64 _InterlockedCompareExchange64_HLEAcquire(__int64 volatile *, __int64,
    416                                                  __int64);
    417 __int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
    418                                                  __int64);
    419 __int64 _InterlockedCompareExchange64_np(__int64 volatile *_Destination,
    420                                          __int64 _Exchange, __int64 _Comparand);
    421 void *_InterlockedCompareExchangePointer(void *volatile *_Destination,
    422                                          void *_Exchange, void *_Comparand);
    423 void *_InterlockedCompareExchangePointer_np(void *volatile *_Destination,
    424                                             void *_Exchange, void *_Comparand);
    425 static __inline__
    426 __int64 _InterlockedDecrement64(__int64 volatile *_Addend);
    427 static __inline__
    428 __int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value);
    429 static __inline__
    430 __int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value);
    431 void *_InterlockedExchangePointer(void *volatile *_Target, void *_Value);
    432 static __inline__
    433 __int64 _InterlockedIncrement64(__int64 volatile *_Addend);
    434 long _InterlockedOr_np(long volatile *_Value, long _Mask);
    435 short _InterlockedOr16_np(short volatile *_Value, short _Mask);
    436 static __inline__
    437 __int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask);
    438 __int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
    439 char _InterlockedOr8_np(char volatile *_Value, char _Mask);
    440 long _InterlockedXor_np(long volatile *_Value, long _Mask);
    441 short _InterlockedXor16_np(short volatile *_Value, short _Mask);
    442 static __inline__
    443 __int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask);
    444 __int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
    445 char _InterlockedXor8_np(char volatile *_Value, char _Mask);
    446 static __inline__
    447 unsigned __int64 _lzcnt_u64(unsigned __int64);
    448 __int64 _mul128(__int64 _Multiplier, __int64 _Multiplicand,
    449                 __int64 *_HighProduct);
    450 unsigned int __cdecl _readfsbase_u32(void);
    451 unsigned __int64 __cdecl _readfsbase_u64(void);
    452 unsigned int __cdecl _readgsbase_u32(void);
    453 unsigned __int64 __cdecl _readgsbase_u64(void);
    454 unsigned __int64 _rorx_u64(unsigned __int64, const unsigned int);
    455 __int64 _sarx_i64(__int64, unsigned int);
    456 #if __STDC_HOSTED__
    457 int __cdecl _setjmpex(jmp_buf);
    458 #endif
    459 unsigned __int64 _shlx_u64(unsigned __int64, unsigned int);
    460 unsigned __int64 shrx_u64(unsigned __int64, unsigned int);
    461 unsigned __int64 _tzcnt_u64(unsigned __int64);
    462 unsigned __int64 _tzmsk_u64(unsigned __int64);
    463 unsigned __int64 _umul128(unsigned __int64 _Multiplier,
    464                           unsigned __int64 _Multiplicand,
    465                           unsigned __int64 *_HighProduct);
    466 void __cdecl _writefsbase_u32(unsigned int);
    467 void _cdecl _writefsbase_u64(unsigned __int64);
    468 void __cdecl _writegsbase_u32(unsigned int);
    469 void __cdecl _writegsbase_u64(unsigned __int64);
    470 void __cdecl _xrstor64(void const *, unsigned __int64);
    471 void __cdecl _xsave64(void *, unsigned __int64);
    472 void __cdecl _xsaveopt64(void *, unsigned __int64);
    473 
    474 #endif /* __x86_64__ */
    475 
    476 /*----------------------------------------------------------------------------*\
    477 |* Bit Twiddling
    478 \*----------------------------------------------------------------------------*/
    479 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    480 _rotl8(unsigned char _Value, unsigned char _Shift) {
    481   _Shift &= 0x7;
    482   return _Shift ? (_Value << _Shift) | (_Value >> (8 - _Shift)) : _Value;
    483 }
    484 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    485 _rotr8(unsigned char _Value, unsigned char _Shift) {
    486   _Shift &= 0x7;
    487   return _Shift ? (_Value >> _Shift) | (_Value << (8 - _Shift)) : _Value;
    488 }
    489 static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__))
    490 _rotl16(unsigned short _Value, unsigned char _Shift) {
    491   _Shift &= 0xf;
    492   return _Shift ? (_Value << _Shift) | (_Value >> (16 - _Shift)) : _Value;
    493 }
    494 static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__))
    495 _rotr16(unsigned short _Value, unsigned char _Shift) {
    496   _Shift &= 0xf;
    497   return _Shift ? (_Value >> _Shift) | (_Value << (16 - _Shift)) : _Value;
    498 }
    499 static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__))
    500 _rotl(unsigned int _Value, int _Shift) {
    501   _Shift &= 0x1f;
    502   return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value;
    503 }
    504 static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__))
    505 _rotr(unsigned int _Value, int _Shift) {
    506   _Shift &= 0x1f;
    507   return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value;
    508 }
    509 static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
    510 _lrotl(unsigned long _Value, int _Shift) {
    511   _Shift &= 0x1f;
    512   return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value;
    513 }
    514 static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
    515 _lrotr(unsigned long _Value, int _Shift) {
    516   _Shift &= 0x1f;
    517   return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value;
    518 }
    519 static
    520 __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
    521 _rotl64(unsigned __int64 _Value, int _Shift) {
    522   _Shift &= 0x3f;
    523   return _Shift ? (_Value << _Shift) | (_Value >> (64 - _Shift)) : _Value;
    524 }
    525 static
    526 __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
    527 _rotr64(unsigned __int64 _Value, int _Shift) {
    528   _Shift &= 0x3f;
    529   return _Shift ? (_Value >> _Shift) | (_Value << (64 - _Shift)) : _Value;
    530 }
    531 /*----------------------------------------------------------------------------*\
    532 |* Bit Counting and Testing
    533 \*----------------------------------------------------------------------------*/
    534 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    535 _BitScanForward(unsigned long *_Index, unsigned long _Mask) {
    536   if (!_Mask)
    537     return 0;
    538   *_Index = __builtin_ctzl(_Mask);
    539   return 1;
    540 }
    541 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    542 _BitScanReverse(unsigned long *_Index, unsigned long _Mask) {
    543   if (!_Mask)
    544     return 0;
    545   *_Index = 31 - __builtin_clzl(_Mask);
    546   return 1;
    547 }
    548 static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__))
    549 _lzcnt_u32(unsigned int a) {
    550   if (!a)
    551     return 32;
    552   return __builtin_clzl(a);
    553 }
    554 static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__))
    555 __popcnt16(unsigned short value) {
    556   return __builtin_popcount((int)value);
    557 }
    558 static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__))
    559 __popcnt(unsigned int value) {
    560   return __builtin_popcount(value);
    561 }
    562 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    563 _bittest(long const *a, long b) {
    564   return (*a >> b) & 1;
    565 }
    566 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    567 _bittestandcomplement(long *a, long b) {
    568   unsigned char x = (*a >> b) & 1;
    569   *a = *a ^ (1 << b);
    570   return x;
    571 }
    572 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    573 _bittestandreset(long *a, long b) {
    574   unsigned char x = (*a >> b) & 1;
    575   *a = *a & ~(1 << b);
    576   return x;
    577 }
    578 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    579 _bittestandset(long *a, long b) {
    580   unsigned char x = (*a >> b) & 1;
    581   *a = *a | (1 << b);
    582   return x;
    583 }
    584 #if defined(__i386__) || defined(__x86_64__)
    585 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    586 _interlockedbittestandset(long volatile *__BitBase, long __BitPos) {
    587   unsigned char __Res;
    588   __asm__ ("xor %0, %0\n"
    589            "lock bts %2, %1\n"
    590            "setc %0\n"
    591            : "=r" (__Res), "+m"(*__BitBase)
    592            : "Ir"(__BitPos));
    593   return __Res;
    594 }
    595 #endif
    596 #ifdef __x86_64__
    597 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    598 _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask) {
    599   if (!_Mask)
    600     return 0;
    601   *_Index = __builtin_ctzll(_Mask);
    602   return 1;
    603 }
    604 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    605 _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask) {
    606   if (!_Mask)
    607     return 0;
    608   *_Index = 63 - __builtin_clzll(_Mask);
    609   return 1;
    610 }
    611 static
    612 __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
    613 _lzcnt_u64(unsigned __int64 a) {
    614   if (!a)
    615     return 64;
    616   return __builtin_clzll(a);
    617 }
    618 static __inline__
    619 unsigned __int64 __attribute__((__always_inline__, __nodebug__))
    620  __popcnt64(unsigned __int64 value) {
    621   return __builtin_popcountll(value);
    622 }
    623 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    624 _bittest64(__int64 const *a, __int64 b) {
    625   return (*a >> b) & 1;
    626 }
    627 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    628 _bittestandcomplement64(__int64 *a, __int64 b) {
    629   unsigned char x = (*a >> b) & 1;
    630   *a = *a ^ (1ll << b);
    631   return x;
    632 }
    633 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    634 _bittestandreset64(__int64 *a, __int64 b) {
    635   unsigned char x = (*a >> b) & 1;
    636   *a = *a & ~(1ll << b);
    637   return x;
    638 }
    639 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    640 _bittestandset64(__int64 *a, __int64 b) {
    641   unsigned char x = (*a >> b) & 1;
    642   *a = *a | (1ll << b);
    643   return x;
    644 }
    645 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    646 _interlockedbittestandset64(__int64 volatile *__BitBase, __int64 __BitPos) {
    647   unsigned char __Res;
    648   __asm__ ("xor %0, %0\n"
    649            "lock bts %2, %1\n"
    650            "setc %0\n"
    651            : "=r" (__Res), "+m"(*__BitBase)
    652            : "Ir"(__BitPos));
    653   return __Res;
    654 }
    655 #endif
    656 /*----------------------------------------------------------------------------*\
    657 |* Interlocked Exchange Add
    658 \*----------------------------------------------------------------------------*/
    659 static __inline__ char __attribute__((__always_inline__, __nodebug__))
    660 _InterlockedExchangeAdd8(char volatile *_Addend, char _Value) {
    661   return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
    662 }
    663 static __inline__ short __attribute__((__always_inline__, __nodebug__))
    664 _InterlockedExchangeAdd16(short volatile *_Addend, short _Value) {
    665   return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
    666 }
    667 #ifdef __x86_64__
    668 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
    669 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value) {
    670   return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
    671 }
    672 #endif
    673 /*----------------------------------------------------------------------------*\
    674 |* Interlocked Exchange Sub
    675 \*----------------------------------------------------------------------------*/
    676 static __inline__ char __attribute__((__always_inline__, __nodebug__))
    677 _InterlockedExchangeSub8(char volatile *_Subend, char _Value) {
    678   return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
    679 }
    680 static __inline__ short __attribute__((__always_inline__, __nodebug__))
    681 _InterlockedExchangeSub16(short volatile *_Subend, short _Value) {
    682   return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
    683 }
    684 static __inline__ long __attribute__((__always_inline__, __nodebug__))
    685 _InterlockedExchangeSub(long volatile *_Subend, long _Value) {
    686   return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
    687 }
    688 #ifdef __x86_64__
    689 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
    690 _InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value) {
    691   return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
    692 }
    693 #endif
    694 /*----------------------------------------------------------------------------*\
    695 |* Interlocked Increment
    696 \*----------------------------------------------------------------------------*/
    697 static __inline__ short __attribute__((__always_inline__, __nodebug__))
    698 _InterlockedIncrement16(short volatile *_Value) {
    699   return __atomic_add_fetch(_Value, 1, 0);
    700 }
    701 #ifdef __x86_64__
    702 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
    703 _InterlockedIncrement64(__int64 volatile *_Value) {
    704   return __atomic_add_fetch(_Value, 1, 0);
    705 }
    706 #endif
    707 /*----------------------------------------------------------------------------*\
    708 |* Interlocked Decrement
    709 \*----------------------------------------------------------------------------*/
    710 static __inline__ short __attribute__((__always_inline__, __nodebug__))
    711 _InterlockedDecrement16(short volatile *_Value) {
    712   return __atomic_sub_fetch(_Value, 1, 0);
    713 }
    714 #ifdef __x86_64__
    715 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
    716 _InterlockedDecrement64(__int64 volatile *_Value) {
    717   return __atomic_sub_fetch(_Value, 1, 0);
    718 }
    719 #endif
    720 /*----------------------------------------------------------------------------*\
    721 |* Interlocked And
    722 \*----------------------------------------------------------------------------*/
    723 static __inline__ char __attribute__((__always_inline__, __nodebug__))
    724 _InterlockedAnd8(char volatile *_Value, char _Mask) {
    725   return __atomic_and_fetch(_Value, _Mask, 0);
    726 }
    727 static __inline__ short __attribute__((__always_inline__, __nodebug__))
    728 _InterlockedAnd16(short volatile *_Value, short _Mask) {
    729   return __atomic_and_fetch(_Value, _Mask, 0);
    730 }
    731 static __inline__ long __attribute__((__always_inline__, __nodebug__))
    732 _InterlockedAnd(long volatile *_Value, long _Mask) {
    733   return __atomic_and_fetch(_Value, _Mask, 0);
    734 }
    735 #ifdef __x86_64__
    736 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
    737 _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) {
    738   return __atomic_and_fetch(_Value, _Mask, 0);
    739 }
    740 #endif
    741 /*----------------------------------------------------------------------------*\
    742 |* Interlocked Or
    743 \*----------------------------------------------------------------------------*/
    744 static __inline__ char __attribute__((__always_inline__, __nodebug__))
    745 _InterlockedOr8(char volatile *_Value, char _Mask) {
    746   return __atomic_or_fetch(_Value, _Mask, 0);
    747 }
    748 static __inline__ short __attribute__((__always_inline__, __nodebug__))
    749 _InterlockedOr16(short volatile *_Value, short _Mask) {
    750   return __atomic_or_fetch(_Value, _Mask, 0);
    751 }
    752 static __inline__ long __attribute__((__always_inline__, __nodebug__))
    753 _InterlockedOr(long volatile *_Value, long _Mask) {
    754   return __atomic_or_fetch(_Value, _Mask, 0);
    755 }
    756 #ifdef __x86_64__
    757 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
    758 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) {
    759   return __atomic_or_fetch(_Value, _Mask, 0);
    760 }
    761 #endif
    762 /*----------------------------------------------------------------------------*\
    763 |* Interlocked Xor
    764 \*----------------------------------------------------------------------------*/
    765 static __inline__ char __attribute__((__always_inline__, __nodebug__))
    766 _InterlockedXor8(char volatile *_Value, char _Mask) {
    767   return __atomic_xor_fetch(_Value, _Mask, 0);
    768 }
    769 static __inline__ short __attribute__((__always_inline__, __nodebug__))
    770 _InterlockedXor16(short volatile *_Value, short _Mask) {
    771   return __atomic_xor_fetch(_Value, _Mask, 0);
    772 }
    773 static __inline__ long __attribute__((__always_inline__, __nodebug__))
    774 _InterlockedXor(long volatile *_Value, long _Mask) {
    775   return __atomic_xor_fetch(_Value, _Mask, 0);
    776 }
    777 #ifdef __x86_64__
    778 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
    779 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask) {
    780   return __atomic_xor_fetch(_Value, _Mask, 0);
    781 }
    782 #endif
    783 /*----------------------------------------------------------------------------*\
    784 |* Interlocked Exchange
    785 \*----------------------------------------------------------------------------*/
    786 static __inline__ char __attribute__((__always_inline__, __nodebug__))
    787 _InterlockedExchange8(char volatile *_Target, char _Value) {
    788   __atomic_exchange(_Target, &_Value, &_Value, 0);
    789   return _Value;
    790 }
    791 static __inline__ short __attribute__((__always_inline__, __nodebug__))
    792 _InterlockedExchange16(short volatile *_Target, short _Value) {
    793   __atomic_exchange(_Target, &_Value, &_Value, 0);
    794   return _Value;
    795 }
    796 #ifdef __x86_64__
    797 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
    798 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value) {
    799   __atomic_exchange(_Target, &_Value, &_Value, 0);
    800   return _Value;
    801 }
    802 #endif
    803 /*----------------------------------------------------------------------------*\
    804 |* Interlocked Compare Exchange
    805 \*----------------------------------------------------------------------------*/
    806 static __inline__ char __attribute__((__always_inline__, __nodebug__))
    807 _InterlockedCompareExchange8(char volatile *_Destination,
    808                              char _Exchange, char _Comparand) {
    809   __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 0, 0);
    810   return _Comparand;
    811 }
    812 static __inline__ short __attribute__((__always_inline__, __nodebug__))
    813 _InterlockedCompareExchange16(short volatile *_Destination,
    814                               short _Exchange, short _Comparand) {
    815   __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 0, 0);
    816   return _Comparand;
    817 }
    818 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
    819 _InterlockedCompareExchange64(__int64 volatile *_Destination,
    820                               __int64 _Exchange, __int64 _Comparand) {
    821   __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 0, 0);
    822   return _Comparand;
    823 }
    824 /*----------------------------------------------------------------------------*\
    825 |* Barriers
    826 \*----------------------------------------------------------------------------*/
    827 #if defined(__i386__) || defined(__x86_64__)
    828 static __inline__ void __attribute__((__always_inline__, __nodebug__))
    829 __attribute__((deprecated("use other intrinsics or C++11 atomics instead")))
    830 _ReadWriteBarrier(void) {
    831   __asm__ volatile ("" : : : "memory");
    832 }
    833 static __inline__ void __attribute__((__always_inline__, __nodebug__))
    834 __attribute__((deprecated("use other intrinsics or C++11 atomics instead")))
    835 _ReadBarrier(void) {
    836   __asm__ volatile ("" : : : "memory");
    837 }
    838 static __inline__ void __attribute__((__always_inline__, __nodebug__))
    839 __attribute__((deprecated("use other intrinsics or C++11 atomics instead")))
    840 _WriteBarrier(void) {
    841   __asm__ volatile ("" : : : "memory");
    842 }
    843 #endif
    844 #ifdef __x86_64__
    845 static __inline__ void __attribute__((__always_inline__, __nodebug__))
    846 __faststorefence(void) {
    847   __asm__ volatile("lock orq $0, (%%rsp)" : : : "memory");
    848 }
    849 #endif
    850 /*----------------------------------------------------------------------------*\
    851 |* readfs, readgs
    852 |* (Pointers in address space #256 and #257 are relative to the GS and FS
    853 |* segment registers, respectively.)
    854 \*----------------------------------------------------------------------------*/
    855 #define __ptr_to_addr_space(__addr_space_nbr, __type, __offset)              \
    856     ((volatile __type __attribute__((__address_space__(__addr_space_nbr)))*) \
    857     (__offset))
    858 
    859 #ifdef __i386__
    860 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    861 __readfsbyte(unsigned long __offset) {
    862   return *__ptr_to_addr_space(257, unsigned char, __offset);
    863 }
    864 static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
    865 __readfsdword(unsigned long __offset) {
    866   return *__ptr_to_addr_space(257, unsigned long, __offset);
    867 }
    868 static __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
    869 __readfsqword(unsigned long __offset) {
    870   return *__ptr_to_addr_space(257, unsigned __int64, __offset);
    871 }
    872 static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__))
    873 __readfsword(unsigned long __offset) {
    874   return *__ptr_to_addr_space(257, unsigned short, __offset);
    875 }
    876 #endif
    877 #ifdef __x86_64__
    878 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
    879 __readgsbyte(unsigned long __offset) {
    880   return *__ptr_to_addr_space(256, unsigned char, __offset);
    881 }
    882 static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
    883 __readgsdword(unsigned long __offset) {
    884   return *__ptr_to_addr_space(256, unsigned long, __offset);
    885 }
    886 static __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
    887 __readgsqword(unsigned long __offset) {
    888   return *__ptr_to_addr_space(256, unsigned __int64, __offset);
    889 }
    890 static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__))
    891 __readgsword(unsigned long __offset) {
    892   return *__ptr_to_addr_space(256, unsigned short, __offset);
    893 }
    894 #endif
    895 #undef __ptr_to_addr_space
    896 /*----------------------------------------------------------------------------*\
    897 |* movs, stos
    898 \*----------------------------------------------------------------------------*/
    899 #if defined(__i386__) || defined(__x86_64__)
    900 static __inline__ void __attribute__((__always_inline__, __nodebug__))
    901 __movsb(unsigned char *__dst, unsigned char const *__src, size_t __n) {
    902   __asm__("rep movsb" : : "D"(__dst), "S"(__src), "c"(__n)
    903                         : "%edi", "%esi", "%ecx");
    904 }
    905 static __inline__ void __attribute__((__always_inline__, __nodebug__))
    906 __movsd(unsigned long *__dst, unsigned long const *__src, size_t __n) {
    907   __asm__("rep movsl" : : "D"(__dst), "S"(__src), "c"(__n)
    908                         : "%edi", "%esi", "%ecx");
    909 }
    910 static __inline__ void __attribute__((__always_inline__, __nodebug__))
    911 __movsw(unsigned short *__dst, unsigned short const *__src, size_t __n) {
    912   __asm__("rep movsh" : : "D"(__dst), "S"(__src), "c"(__n)
    913                         : "%edi", "%esi", "%ecx");
    914 }
    915 static __inline__ void __attribute__((__always_inline__, __nodebug__))
    916 __stosb(unsigned char *__dst, unsigned char __x, size_t __n) {
    917   __asm__("rep stosb" : : "D"(__dst), "a"(__x), "c"(__n)
    918                         : "%edi", "%ecx");
    919 }
    920 static __inline__ void __attribute__((__always_inline__, __nodebug__))
    921 __stosd(unsigned long *__dst, unsigned long __x, size_t __n) {
    922   __asm__("rep stosl" : : "D"(__dst), "a"(__x), "c"(__n)
    923                         : "%edi", "%ecx");
    924 }
    925 static __inline__ void __attribute__((__always_inline__, __nodebug__))
    926 __stosw(unsigned short *__dst, unsigned short __x, size_t __n) {
    927   __asm__("rep stosh" : : "D"(__dst), "a"(__x), "c"(__n)
    928                         : "%edi", "%ecx");
    929 }
    930 #endif
    931 #ifdef __x86_64__
    932 static __inline__ void __attribute__((__always_inline__, __nodebug__))
    933 __movsq(unsigned long long *__dst, unsigned long long const *__src, size_t __n) {
    934   __asm__("rep movsq" : : "D"(__dst), "S"(__src), "c"(__n)
    935                         : "%edi", "%esi", "%ecx");
    936 }
    937 static __inline__ void __attribute__((__always_inline__, __nodebug__))
    938 __stosq(unsigned __int64 *__dst, unsigned __int64 __x, size_t __n) {
    939   __asm__("rep stosq" : : "D"(__dst), "a"(__x), "c"(__n)
    940                         : "%edi", "%ecx");
    941 }
    942 #endif
    943 
    944 /*----------------------------------------------------------------------------*\
    945 |* Misc
    946 \*----------------------------------------------------------------------------*/
    947 static __inline__ void * __attribute__((__always_inline__, __nodebug__))
    948 _AddressOfReturnAddress(void) {
    949   return (void*)((char*)__builtin_frame_address(0) + sizeof(void*));
    950 }
    951 static __inline__ void * __attribute__((__always_inline__, __nodebug__))
    952 _ReturnAddress(void) {
    953   return __builtin_return_address(0);
    954 }
    955 #if defined(__i386__) || defined(__x86_64__)
    956 static __inline__ void __attribute__((__always_inline__, __nodebug__))
    957 __cpuid(int __info[4], int __level) {
    958   __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
    959                    : "a"(__level));
    960 }
    961 static __inline__ void __attribute__((__always_inline__, __nodebug__))
    962 __cpuidex(int __info[4], int __level, int __ecx) {
    963   __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
    964                    : "a"(__level), "c"(__ecx));
    965 }
    966 static __inline__ unsigned __int64 __cdecl __attribute__((__always_inline__, __nodebug__))
    967 _xgetbv(unsigned int __xcr_no) {
    968   unsigned int __eax, __edx;
    969   __asm__ ("xgetbv" : "=a" (__eax), "=d" (__edx) : "c" (__xcr_no));
    970   return ((unsigned __int64)__edx << 32) | __eax;
    971 }
    972 static __inline__ void __attribute__((__always_inline__, __nodebug__))
    973 __halt(void) {
    974   __asm__ volatile ("hlt");
    975 }
    976 #endif
    977 
    978 /*----------------------------------------------------------------------------*\
    979 |* Privileged intrinsics
    980 \*----------------------------------------------------------------------------*/
    981 #if defined(__i386__) || defined(__x86_64__)
    982 static __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
    983 __readmsr(unsigned long __register) {
    984   // Loads the contents of a 64-bit model specific register (MSR) specified in
    985   // the ECX register into registers EDX:EAX. The EDX register is loaded with
    986   // the high-order 32 bits of the MSR and the EAX register is loaded with the
    987   // low-order 32 bits. If less than 64 bits are implemented in the MSR being
    988   // read, the values returned to EDX:EAX in unimplemented bit locations are
    989   // undefined.
    990   unsigned long __edx;
    991   unsigned long __eax;
    992   __asm__ ("rdmsr" : "=d"(__edx), "=a"(__eax) : "c"(__register));
    993   return (((unsigned __int64)__edx) << 32) | (unsigned __int64)__eax;
    994 }
    995 
    996 static __inline__ unsigned long __attribute__((always_inline, __nodebug__))
    997 __readcr3(void) {
    998   unsigned long __cr3_val;
    999   __asm__ __volatile__ ("mov %%cr3, %0" : "=q"(__cr3_val) : : "memory");
   1000   return __cr3_val;
   1001 }
   1002 
   1003 static __inline__ void __attribute__((always_inline, __nodebug__))
   1004 __writecr3(unsigned int __cr3_val) {
   1005   __asm__ ("mov %0, %%cr3" : : "q"(__cr3_val) : "memory");
   1006 }
   1007 #endif
   1008 
   1009 #ifdef __cplusplus
   1010 }
   1011 #endif
   1012 
   1013 #endif /* __INTRIN_H */
   1014 #endif /* _MSC_VER */
   1015