Home | History | Annotate | Download | only in include

Lines Matching refs:_Mask

164 unsigned char _BitScanForward(unsigned long *_Index, unsigned long _Mask);
166 unsigned char _BitScanReverse(unsigned long *_Index, unsigned long _Mask);
182 long _InterlockedAnd(long volatile *_Value, long _Mask);
184 short _InterlockedAnd16(short volatile *_Value, short _Mask);
186 char _InterlockedAnd8(char volatile *_Value, char _Mask);
236 long _InterlockedOr(long volatile *_Value, long _Mask);
238 short _InterlockedOr16(short volatile *_Value, short _Mask);
240 char _InterlockedOr8(char volatile *_Value, char _Mask);
242 long _InterlockedXor(long volatile *_Value, long _Mask);
244 short _InterlockedXor16(short volatile *_Value, short _Mask);
246 char _InterlockedXor8(char volatile *_Value, char _Mask);
340 unsigned char _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask);
342 unsigned char _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask);
352 long _InterlockedAnd_np(long volatile *_Value, long _Mask);
353 short _InterlockedAnd16_np(short volatile *_Value, short _Mask);
354 __int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
355 char _InterlockedAnd8_np(char volatile *_Value, char _Mask);
390 long _InterlockedOr_np(long volatile *_Value, long _Mask);
391 short _InterlockedOr16_np(short volatile *_Value, short _Mask);
393 __int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask);
394 __int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
395 char _InterlockedOr8_np(char volatile *_Value, char _Mask);
396 long _InterlockedXor_np(long volatile *_Value, long _Mask);
397 short _InterlockedXor16_np(short volatile *_Value, short _Mask);
399 __int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask);
400 __int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
401 char _InterlockedXor8_np(char volatile *_Value, char _Mask);
503 _BitScanForward(unsigned long *_Index, unsigned long _Mask) {
504 if (!_Mask)
506 *_Index = __builtin_ctzl(_Mask);
510 _BitScanReverse(unsigned long *_Index, unsigned long _Mask) {
511 if (!_Mask)
513 *_Index = 31 - __builtin_clzl(_Mask);
553 _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask) {
554 if (!_Mask)
556 *_Index = __builtin_ctzll(_Mask);
560 _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask) {
561 if (!_Mask)
563 *_Index = 63 - __builtin_clzll(_Mask);
668 _InterlockedAnd8(char volatile *_Value, char _Mask) {
669 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST);
672 _InterlockedAnd16(short volatile *_Value, short _Mask) {
673 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST);
676 _InterlockedAnd(long volatile *_Value, long _Mask) {
677 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST);
681 _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) {
682 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST);
689 _InterlockedOr8(char volatile *_Value, char _Mask) {
690 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST);
693 _InterlockedOr16(short volatile *_Value, short _Mask) {
694 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST);
697 _InterlockedOr(long volatile *_Value, long _Mask) {
698 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST);
702 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) {
703 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST);
710 _InterlockedXor8(char volatile *_Value, char _Mask) {
711 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST);
714 _InterlockedXor16(short volatile *_Value, short _Mask) {
715 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST);
718 _InterlockedXor(long volatile *_Value, long _Mask) {
719 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST);
723 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask) {
724 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST);