Lines Matching defs:_Value

176 long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
178 long _InterlockedAnd(long volatile *_Value, long _Mask);
180 short _InterlockedAnd16(short volatile *_Value, short _Mask);
182 char _InterlockedAnd8(char volatile *_Value, char _Mask);
212 long __cdecl _InterlockedExchange(long volatile *_Target, long _Value);
214 short _InterlockedExchange16(short volatile *_Target, short _Value);
216 char _InterlockedExchange8(char volatile *_Target, char _Value);
218 long __cdecl _InterlockedExchangeAdd(long volatile *_Addend, long _Value);
222 char _InterlockedExchangeAdd8(char volatile *_Addend, char _Value);
228 long _InterlockedOr(long volatile *_Value, long _Mask);
230 short _InterlockedOr16(short volatile *_Value, short _Mask);
232 char _InterlockedOr8(char volatile *_Value, char _Mask);
234 long _InterlockedXor(long volatile *_Value, long _Mask);
236 short _InterlockedXor16(short volatile *_Value, short _Mask);
238 char _InterlockedXor8(char volatile *_Value, char _Mask);
256 unsigned int __cdecl _rotl(unsigned int _Value, int _Shift);
258 unsigned short _rotl16(unsigned short _Value, unsigned char _Shift);
260 unsigned __int64 __cdecl _rotl64(unsigned __int64 _Value, int _Shift);
262 unsigned char _rotl8(unsigned char _Value, unsigned char _Shift);
264 unsigned int __cdecl _rotr(unsigned int _Value, int _Shift);
266 unsigned short _rotr16(unsigned short _Value, unsigned char _Shift);
268 unsigned __int64 __cdecl _rotr64(unsigned __int64 _Value, int _Shift);
270 unsigned char _rotr8(unsigned char _Value, unsigned char _Shift);
345 long _InterlockedAnd_np(long volatile *_Value, long _Mask);
346 short _InterlockedAnd16_np(short volatile *_Value, short _Mask);
347 __int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
348 char _InterlockedAnd8_np(char volatile *_Value, char _Mask);
367 long _InterlockedOr_np(long volatile *_Value, long _Mask);
368 short _InterlockedOr16_np(short volatile *_Value, short _Mask);
369 __int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
370 char _InterlockedOr8_np(char volatile *_Value, char _Mask);
371 long _InterlockedXor_np(long volatile *_Value, long _Mask);
372 short _InterlockedXor16_np(short volatile *_Value, short _Mask);
373 __int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
374 char _InterlockedXor8_np(char volatile *_Value, char _Mask);
402 _rotl8(unsigned char _Value, unsigned char _Shift) {
404 return _Shift ? (_Value << _Shift) | (_Value >> (8 - _Shift)) : _Value;
407 _rotr8(unsigned char _Value, unsigned char _Shift) {
409 return _Shift ? (_Value >> _Shift) | (_Value << (8 - _Shift)) : _Value;
412 _rotl16(unsigned short _Value, unsigned char _Shift) {
414 return _Shift ? (_Value << _Shift) | (_Value >> (16 - _Shift)) : _Value;
417 _rotr16(unsigned short _Value, unsigned char _Shift) {
419 return _Shift ? (_Value >> _Shift) | (_Value << (16 - _Shift)) : _Value;
422 _rotl(unsigned int _Value, int _Shift) {
424 return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value;
427 _rotr(unsigned int _Value, int _Shift) {
429 return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value;
432 _lrotl(unsigned long _Value, int _Shift) {
434 return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value;
437 _lrotr(unsigned long _Value, int _Shift) {
439 return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value;
443 _rotl64(unsigned __int64 _Value, int _Shift) {
445 return _Shift ? (_Value << _Shift) | (_Value >> (64 - _Shift)) : _Value;
449 _rotr64(unsigned __int64 _Value, int _Shift) {
451 return _Shift ? (_Value >> _Shift) | (_Value << (64 - _Shift)) : _Value;
560 _InterlockedExchangeAdd8(char volatile *_Addend, char _Value) {
561 return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
564 _InterlockedExchangeAdd16(short volatile *_Addend, short _Value) {
565 return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
568 _InterlockedExchangeAdd(long volatile *_Addend, long _Value) {
569 return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
573 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value) {
574 return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
581 _InterlockedExchangeSub8(char volatile *_Subend, char _Value) {
582 return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
585 _InterlockedExchangeSub16(short volatile *_Subend, short _Value) {
586 return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
589 _InterlockedExchangeSub(long volatile *_Subend, long _Value) {
590 return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
594 _InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value) {
595 return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
602 _InterlockedIncrement16(char volatile *_Value) {
603 return __atomic_add_fetch(_Value, 1, 0);
606 _InterlockedIncrement(long volatile *_Value) {
607 return __atomic_add_fetch(_Value, 1, 0);
611 _InterlockedIncrement64(__int64 volatile *_Value) {
612 return __atomic_add_fetch(_Value, 1, 0);
619 _InterlockedDecrement16(char volatile *_Value) {
620 return __atomic_sub_fetch(_Value, 1, 0);
623 _InterlockedDecrement(long volatile *_Value) {
624 return __atomic_sub_fetch(_Value, 1, 0);
628 _InterlockedDecrement64(__int64 volatile *_Value) {
629 return __atomic_sub_fetch(_Value, 1, 0);
636 _InterlockedAnd8(char volatile *_Value, char _Mask) {
637 return __atomic_and_fetch(_Value, _Mask, 0);
640 _InterlockedAnd16(short volatile *_Value, short _Mask) {
641 return __atomic_and_fetch(_Value, _Mask, 0);
644 _InterlockedAnd(long volatile *_Value, long _Mask) {
645 return __atomic_and_fetch(_Value, _Mask, 0);
649 _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) {
650 return __atomic_and_fetch(_Value, _Mask, 0);
657 _InterlockedOr8(char volatile *_Value, char _Mask) {
658 return __atomic_or_fetch(_Value, _Mask, 0);
661 _InterlockedOr16(short volatile *_Value, short _Mask) {
662 return __atomic_or_fetch(_Value, _Mask, 0);
665 _InterlockedOr(long volatile *_Value, long _Mask) {
666 return __atomic_or_fetch(_Value, _Mask, 0);
670 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) {
671 return __atomic_or_fetch(_Value, _Mask, 0);
678 _InterlockedXor8(char volatile *_Value, char _Mask) {
679 return __atomic_xor_fetch(_Value, _Mask, 0);
682 _InterlockedXor16(short volatile *_Value, short _Mask) {
683 return __atomic_xor_fetch(_Value, _Mask, 0);
686 _InterlockedXor(long volatile *_Value, long _Mask) {
687 return __atomic_xor_fetch(_Value, _Mask, 0);
691 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask) {
692 return __atomic_xor_fetch(_Value, _Mask, 0);
699 _InterlockedExchange8(char volatile *_Target, char _Value) {
700 __atomic_exchange(_Target, &_Value, &_Value, 0);
701 return _Value;
704 _InterlockedExchange16(short volatile *_Target, short _Value) {
705 __atomic_exchange(_Target, &_Value, &_Value, 0);
706 return _Value;
709 _InterlockedExchange(long volatile *_Target, long _Value) {
710 __atomic_exchange(_Target, &_Value, &_Value, 0);
711 return _Value;
715 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value) {
716 __atomic_exchange(_Target, &_Value, &_Value, 0);
717 return _Value;