STLdoc
STLdocumentation
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros
Macros | Typedefs | Functions | Variables
xatomic.h File Reference
#include <xatomic0.h>
#include <stddef.h>
#include <stdlib.h>
#include <string.h>
#include <intrin.h>
#include <xutility>

Go to the source code of this file.

Macros

#define _XATOMIC_H
 
#define _Compiler_barrier()   _ReadWriteBarrier()
 
#define _CONCATX(x, y)   x ## y
 
#define _CONCAT(x, y)   _CONCATX(x, y)
 
#define ATOMIC_BOOL_LOCK_FREE   (1 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _ATOMIC_CHAR_LOCK_FREE   (1 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _ATOMIC_CHAR16_T_LOCK_FREE   (2 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _ATOMIC_CHAR32_T_LOCK_FREE   (2 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _ATOMIC_WCHAR_T_LOCK_FREE   (_WCHAR_T_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _ATOMIC_SHORT_LOCK_FREE   (_SHORT_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _ATOMIC_INT_LOCK_FREE   (_INT_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _ATOMIC_LONG_LOCK_FREE   (_LONG_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _ATOMIC_LLONG_LOCK_FREE   (_LONGLONG_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define ATOMIC_POINTER_LOCK_FREE   (_ADDR_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _INTRIN_RELAXED(x)   x
 
#define _INTRIN_ACQUIRE(x)   x
 
#define _INTRIN_RELEASE(x)   x
 
#define _INTRIN_SEQ_CST(x)   x
 
#define _ATOMIC_FLAG_TEST_AND_SET   _Atomic_flag_test_and_set
 
#define _ATOMIC_FLAG_CLEAR   _Atomic_flag_clear
 
#define _ATOMIC_THREAD_FENCE   _Atomic_thread_fence
 
#define _ATOMIC_SIGNAL_FENCE   _Atomic_signal_fence
 
#define _INVALID_MEMORY_ORDER
 
#define _YIELD_PROCESSOR
 

Typedefs

typedef unsigned short _Uint2_t
 
typedef unsigned _LONGLONG _Uint8_t
 

Functions

memory_order _Memory_order_upper_bound (memory_order _Order1, memory_order _Order2)
 
void _Validate_compare_exchange_memory_order (memory_order _Success, memory_order _Failure)
 
void _Store_relaxed_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
void _Store_release_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
void _Store_seq_cst_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
void _Atomic_store_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
 
_Uint1_t _Load_seq_cst_1 (volatile _Uint1_t *_Tgt)
 
_Uint1_t _Load_relaxed_1 (volatile _Uint1_t *_Tgt)
 
_Uint1_t _Load_acquire_1 (volatile _Uint1_t *_Tgt)
 
_Uint1_t _Atomic_load_1 (volatile _Uint1_t *_Tgt, memory_order _Order)
 
_Uint1_t _Exchange_seq_cst_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Exchange_relaxed_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Exchange_acquire_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Exchange_release_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Atomic_exchange_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
 
int _Compare_exchange_seq_cst_1 (volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
 
int _Compare_exchange_relaxed_1 (volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
 
int _Compare_exchange_acquire_1 (volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
 
int _Compare_exchange_release_1 (volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
 
int _Atomic_compare_exchange_strong_1 (volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value, memory_order _Order1, memory_order _Order2)
 
int _Atomic_compare_exchange_weak_1 (volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value, memory_order _Order1, memory_order _Order2)
 
_Uint1_t _Fetch_add_seq_cst_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_add_relaxed_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_add_acquire_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_add_release_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Atomic_fetch_add_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
 
_Uint1_t _Atomic_fetch_sub_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
 
_Uint1_t _Fetch_and_seq_cst_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_and_relaxed_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_and_acquire_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_and_release_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Atomic_fetch_and_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
 
_Uint1_t _Fetch_or_seq_cst_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_or_relaxed_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_or_acquire_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_or_release_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Atomic_fetch_or_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
 
_Uint1_t _Fetch_xor_seq_cst_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_xor_relaxed_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_xor_acquire_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_xor_release_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Atomic_fetch_xor_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
 
void _Store_relaxed_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
void _Store_release_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
void _Store_seq_cst_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
void _Atomic_store_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
 
_Uint2_t _Load_seq_cst_2 (volatile _Uint2_t *_Tgt)
 
_Uint2_t _Load_relaxed_2 (volatile _Uint2_t *_Tgt)
 
_Uint2_t _Load_acquire_2 (volatile _Uint2_t *_Tgt)
 
_Uint2_t _Atomic_load_2 (volatile _Uint2_t *_Tgt, memory_order _Order)
 
_Uint2_t _Exchange_seq_cst_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Exchange_relaxed_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Exchange_acquire_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Exchange_release_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Atomic_exchange_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
 
int _Compare_exchange_seq_cst_2 (volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
 
int _Compare_exchange_relaxed_2 (volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
 
int _Compare_exchange_acquire_2 (volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
 
int _Compare_exchange_release_2 (volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
 
int _Atomic_compare_exchange_strong_2 (volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value, memory_order _Order1, memory_order _Order2)
 
int _Atomic_compare_exchange_weak_2 (volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value, memory_order _Order1, memory_order _Order2)
 
_Uint2_t _Fetch_add_seq_cst_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_add_relaxed_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_add_acquire_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_add_release_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Atomic_fetch_add_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
 
_Uint2_t _Atomic_fetch_sub_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
 
_Uint2_t _Fetch_and_seq_cst_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_and_relaxed_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_and_acquire_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_and_release_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Atomic_fetch_and_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
 
_Uint2_t _Fetch_or_seq_cst_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_or_relaxed_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_or_acquire_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_or_release_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Atomic_fetch_or_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
 
_Uint2_t _Fetch_xor_seq_cst_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_xor_relaxed_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_xor_acquire_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_xor_release_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Atomic_fetch_xor_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
 
void _Store_relaxed_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
void _Store_release_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
void _Store_seq_cst_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
void _Atomic_store_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
 
_Uint4_t _Load_seq_cst_4 (volatile _Uint4_t *_Tgt)
 
_Uint4_t _Load_relaxed_4 (volatile _Uint4_t *_Tgt)
 
_Uint4_t _Load_acquire_4 (volatile _Uint4_t *_Tgt)
 
_Uint4_t _Atomic_load_4 (volatile _Uint4_t *_Tgt, memory_order _Order)
 
_Uint4_t _Exchange_seq_cst_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Exchange_relaxed_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Exchange_acquire_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Exchange_release_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Atomic_exchange_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
 
int _Compare_exchange_seq_cst_4 (volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
 
int _Compare_exchange_relaxed_4 (volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
 
int _Compare_exchange_acquire_4 (volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
 
int _Compare_exchange_release_4 (volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
 
int _Atomic_compare_exchange_strong_4 (volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value, memory_order _Order1, memory_order _Order2)
 
int _Atomic_compare_exchange_weak_4 (volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value, memory_order _Order1, memory_order _Order2)
 
_Uint4_t _Fetch_add_seq_cst_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_add_relaxed_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_add_acquire_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_add_release_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Atomic_fetch_add_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
 
_Uint4_t _Atomic_fetch_sub_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
 
_Uint4_t _Fetch_and_seq_cst_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_and_relaxed_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_and_acquire_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_and_release_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Atomic_fetch_and_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
 
_Uint4_t _Fetch_or_seq_cst_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_or_relaxed_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_or_acquire_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_or_release_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Atomic_fetch_or_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
 
_Uint4_t _Fetch_xor_seq_cst_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_xor_relaxed_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_xor_acquire_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_xor_release_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Atomic_fetch_xor_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
 
void _Store_relaxed_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
void _Store_release_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
void _Store_seq_cst_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
void _Atomic_store_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
 
_Uint8_t _Load_seq_cst_8 (volatile _Uint8_t *_Tgt)
 
_Uint8_t _Load_relaxed_8 (volatile _Uint8_t *_Tgt)
 
_Uint8_t _Load_acquire_8 (volatile _Uint8_t *_Tgt)
 
_Uint8_t _Atomic_load_8 (volatile _Uint8_t *_Tgt, memory_order _Order)
 
_Uint8_t _Exchange_seq_cst_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Exchange_relaxed_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Exchange_acquire_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Exchange_release_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Atomic_exchange_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
 
int _Compare_exchange_seq_cst_8 (volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
 
int _Compare_exchange_relaxed_8 (volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
 
int _Compare_exchange_acquire_8 (volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
 
int _Compare_exchange_release_8 (volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
 
int _Atomic_compare_exchange_strong_8 (volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value, memory_order _Order1, memory_order _Order2)
 
int _Atomic_compare_exchange_weak_8 (volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value, memory_order _Order1, memory_order _Order2)
 
_Uint8_t _Fetch_add_seq_cst_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_add_relaxed_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_add_acquire_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_add_release_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Atomic_fetch_add_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
 
_Uint8_t _Atomic_fetch_sub_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
 
_Uint8_t _Fetch_and_seq_cst_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_and_relaxed_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_and_acquire_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_and_release_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Atomic_fetch_and_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
 
_Uint8_t _Fetch_or_seq_cst_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_or_relaxed_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_or_acquire_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_or_release_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Atomic_fetch_or_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
 
_Uint8_t _Fetch_xor_seq_cst_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_xor_relaxed_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_xor_acquire_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_xor_release_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Atomic_fetch_xor_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
 
int _Atomic_flag_test_and_set (volatile _Atomic_flag_t *_Flag, memory_order _Order)
 
void _Atomic_flag_clear (volatile _Atomic_flag_t *_Flag, memory_order _Order)
 
void _Atomic_thread_fence (memory_order _Order)
 
void _Atomic_signal_fence (memory_order _Order)
 
void _Lock_spin_lock (volatile _Atomic_flag_t *_Flag)
 
void _Unlock_spin_lock (volatile _Atomic_flag_t *_Flag)
 
void _Atomic_copy (volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile const void *_Src, memory_order _Order)
 
void _Atomic_exchange (volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile void *_Src, memory_order _Order)
 
int _Atomic_compare_exchange_weak (volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src, memory_order _Order1, memory_order _Order2)
 
int _Atomic_compare_exchange_strong (volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src, memory_order _Order1, memory_order _Order2)
 
int _Atomic_is_lock_free_1 (void)
 
int _Atomic_is_lock_free_2 (void)
 
int _Atomic_is_lock_free_4 (void)
 
int _Atomic_is_lock_free_8 (void)
 
_Atomic_integral_t _Inc_atomic_counter_explicit (_Atomic_counter_t &_Counter, memory_order _Order)
 
_Atomic_integral_t _Inc_atomic_counter (_Atomic_counter_t &_Counter)
 
_Atomic_integral_t _Dec_atomic_counter_explicit (_Atomic_counter_t &_Counter, memory_order _Order)
 
_Atomic_integral_t _Dec_atomic_counter (_Atomic_counter_t &_Counter)
 
_Atomic_integral_t _Load_atomic_counter_explicit (_Atomic_counter_t &_Counter, memory_order _Order)
 
_Atomic_integral_t _Load_atomic_counter (_Atomic_counter_t &_Counter)
 
_Atomic_integral_t _Compare_increment_atomic_counter_explicit (_Atomic_counter_t &_Counter, _Atomic_integral_t _Expected, memory_order _Order)
 
_Atomic_integral_t _Compare_increment_atomic_counter (_Atomic_counter_t &_Counter, _Atomic_integral_t _Expected)
 

Variables

_STD_BEGIN typedef unsigned char _Uint1_t
 

Macro Definition Documentation

#define _ATOMIC_CHAR16_T_LOCK_FREE   (2 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define _ATOMIC_CHAR32_T_LOCK_FREE   (2 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define _ATOMIC_CHAR_LOCK_FREE   (1 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define _ATOMIC_FLAG_CLEAR   _Atomic_flag_clear
#define _ATOMIC_FLAG_TEST_AND_SET   _Atomic_flag_test_and_set
#define _ATOMIC_INT_LOCK_FREE   (_INT_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define _ATOMIC_LLONG_LOCK_FREE   (_LONGLONG_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define _ATOMIC_LONG_LOCK_FREE   (_LONG_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define _ATOMIC_SHORT_LOCK_FREE   (_SHORT_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define _ATOMIC_SIGNAL_FENCE   _Atomic_signal_fence
#define _ATOMIC_THREAD_FENCE   _Atomic_thread_fence
#define _ATOMIC_WCHAR_T_LOCK_FREE   (_WCHAR_T_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define _Compiler_barrier ( )    _ReadWriteBarrier()
#define _CONCAT (   x,
 
)    _CONCATX(x, y)
#define _CONCATX (   x,
 
)    x ## y
#define _INTRIN_ACQUIRE (   x)    x
#define _INTRIN_RELAXED (   x)    x
#define _INTRIN_RELEASE (   x)    x
#define _INTRIN_SEQ_CST (   x)    x
#define _INVALID_MEMORY_ORDER
#define _XATOMIC_H
#define _YIELD_PROCESSOR
#define ATOMIC_BOOL_LOCK_FREE   (1 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define ATOMIC_POINTER_LOCK_FREE   (_ADDR_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)

Typedef Documentation

typedef unsigned short _Uint2_t
typedef unsigned _LONGLONG _Uint8_t

Function Documentation

int _Atomic_compare_exchange_strong ( volatile _Atomic_flag_t _Flag,
size_t  _Size,
volatile void _Tgt,
volatile void _Exp,
const volatile void _Src,
memory_order  _Order1,
memory_order  _Order2 
)
inline
2467  { /* atomically compare and exchange with memory ordering */
2468  return (_Atomic_compare_exchange_weak(_Flag, _Size, _Tgt, _Exp, _Src,
2469  _Order1, _Order2));
2470  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
int _Atomic_compare_exchange_weak(volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:2446
_Size
Definition: vcruntime_string.h:36
int _Atomic_compare_exchange_strong_1 ( volatile _Uint1_t _Tgt,
_Uint1_t _Exp,
_Uint1_t  _Value,
memory_order  _Order1,
memory_order  _Order2 
)
inline
516  { /* compare and exchange values atomically */
517  _Validate_compare_exchange_memory_order(_Order1, _Order2);
518 
519  switch (_Memory_order_upper_bound(_Order1, _Order2))
520  {
522  return (_Compare_exchange_relaxed_1(_Tgt, _Exp, _Value));
523 
526  return (_Compare_exchange_acquire_1(_Tgt, _Exp, _Value));
527 
529  return (_Compare_exchange_release_1(_Tgt, _Exp, _Value));
530 
533  return (_Compare_exchange_seq_cst_1(_Tgt, _Exp, _Value));
534 
535  default:
537  return (0);
538  }
539  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
int _Compare_exchange_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:441
memory_order _Memory_order_upper_bound(memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:188
int _Compare_exchange_release_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:495
Definition: xatomic0.h:20
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
int _Compare_exchange_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:459
_In_ int _Value
Definition: setjmp.h:173
int _Compare_exchange_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:477
void _Validate_compare_exchange_memory_order(memory_order _Success, memory_order _Failure)
Definition: xatomic.h:230
int _Atomic_compare_exchange_strong_2 ( volatile _Uint2_t _Tgt,
_Uint2_t _Exp,
_Uint2_t  _Value,
memory_order  _Order1,
memory_order  _Order2 
)
inline
1026  { /* compare and exchange values atomically */
1027  _Validate_compare_exchange_memory_order(_Order1, _Order2);
1028 
1029  switch (_Memory_order_upper_bound(_Order1, _Order2))
1030  {
1031  case memory_order_relaxed:
1032  return (_Compare_exchange_relaxed_2(_Tgt, _Exp, _Value));
1033 
1034  case memory_order_consume:
1035  case memory_order_acquire:
1036  return (_Compare_exchange_acquire_2(_Tgt, _Exp, _Value));
1037 
1038  case memory_order_release:
1039  return (_Compare_exchange_release_2(_Tgt, _Exp, _Value));
1040 
1041  case memory_order_acq_rel:
1042  case memory_order_seq_cst:
1043  return (_Compare_exchange_seq_cst_2(_Tgt, _Exp, _Value));
1044 
1045  default:
1047  return (0);
1048  }
1049  }
int _Compare_exchange_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:951
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
int _Compare_exchange_release_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:1005
int _Compare_exchange_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:969
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
memory_order _Memory_order_upper_bound(memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:188
int _Compare_exchange_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:987
Definition: xatomic0.h:20
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_In_ int _Value
Definition: setjmp.h:173
void _Validate_compare_exchange_memory_order(memory_order _Success, memory_order _Failure)
Definition: xatomic.h:230
int _Atomic_compare_exchange_strong_4 ( volatile _Uint4_t _Tgt,
_Uint4_t _Exp,
_Uint4_t  _Value,
memory_order  _Order1,
memory_order  _Order2 
)
inline
1536  { /* compare and exchange values atomically */
1537  _Validate_compare_exchange_memory_order(_Order1, _Order2);
1538 
1539  switch (_Memory_order_upper_bound(_Order1, _Order2))
1540  {
1541  case memory_order_relaxed:
1542  return (_Compare_exchange_relaxed_4(_Tgt, _Exp, _Value));
1543 
1544  case memory_order_consume:
1545  case memory_order_acquire:
1546  return (_Compare_exchange_acquire_4(_Tgt, _Exp, _Value));
1547 
1548  case memory_order_release:
1549  return (_Compare_exchange_release_4(_Tgt, _Exp, _Value));
1550 
1551  case memory_order_acq_rel:
1552  case memory_order_seq_cst:
1553  return (_Compare_exchange_seq_cst_4(_Tgt, _Exp, _Value));
1554 
1555  default:
1557  return (0);
1558  }
1559  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
int _Compare_exchange_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1497
memory_order _Memory_order_upper_bound(memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:188
int _Compare_exchange_release_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1515
int _Compare_exchange_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1461
int _Compare_exchange_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1479
Definition: xatomic0.h:20
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_In_ int _Value
Definition: setjmp.h:173
void _Validate_compare_exchange_memory_order(memory_order _Success, memory_order _Failure)
Definition: xatomic.h:230
int _Atomic_compare_exchange_strong_8 ( volatile _Uint8_t _Tgt,
_Uint8_t _Exp,
_Uint8_t  _Value,
memory_order  _Order1,
memory_order  _Order2 
)
inline
2065  { /* compare and exchange values atomically */
2066  _Validate_compare_exchange_memory_order(_Order1, _Order2);
2067 
2068  switch (_Memory_order_upper_bound(_Order1, _Order2))
2069  {
2070  case memory_order_relaxed:
2071  return (_Compare_exchange_relaxed_8(_Tgt, _Exp, _Value));
2072 
2073  case memory_order_consume:
2074  case memory_order_acquire:
2075  return (_Compare_exchange_acquire_8(_Tgt, _Exp, _Value));
2076 
2077  case memory_order_release:
2078  return (_Compare_exchange_release_8(_Tgt, _Exp, _Value));
2079 
2080  case memory_order_acq_rel:
2081  case memory_order_seq_cst:
2082  return (_Compare_exchange_seq_cst_8(_Tgt, _Exp, _Value));
2083 
2084  default:
2086  return (0);
2087  }
2088  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
memory_order _Memory_order_upper_bound(memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:188
int _Compare_exchange_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:1990
int _Compare_exchange_release_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:2044
Definition: xatomic0.h:20
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
int _Compare_exchange_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:2008
_In_ int _Value
Definition: setjmp.h:173
int _Compare_exchange_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:2026
void _Validate_compare_exchange_memory_order(memory_order _Success, memory_order _Failure)
Definition: xatomic.h:230
int _Atomic_compare_exchange_weak ( volatile _Atomic_flag_t _Flag,
size_t  _Size,
volatile void _Tgt,
volatile void _Exp,
const volatile void _Src,
memory_order  _Order1,
memory_order  _Order2 
)
inline
2450  { /* atomically compare and exchange with memory ordering */
2451  int _Result;
2452 
2453  _Lock_spin_lock(_Flag);
2454  _Result = _CSTD memcmp((const void *)_Tgt, (const void *)_Exp, _Size) == 0;
2455  if (_Result != 0)
2456  _CSTD memcpy((void *)_Tgt, (void *)_Src, _Size);
2457  else
2458  _CSTD memcpy((void *)_Exp, (void *)_Tgt, _Size);
2459  _Unlock_spin_lock(_Flag);
2460  return (_Result);
2461  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
_Check_return_ int __cdecl memcmp(_In_reads_bytes_(_Size) void const *_Buf1, _In_reads_bytes_(_Size) void const *_Buf2, _In_ size_t _Size)
void _Lock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2403
#define _CSTD
Definition: yvals.h:570
void _Unlock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2410
_Size
Definition: vcruntime_string.h:36
int _Atomic_compare_exchange_weak_1 ( volatile _Uint1_t _Tgt,
_Uint1_t _Exp,
_Uint1_t  _Value,
memory_order  _Order1,
memory_order  _Order2 
)
inline
544  { /* compare and exchange values atomically */
545  /* No weak compare-exchange is currently available,
546  even for ARM, so fall back to strong */
548  _Order1, _Order2));
549  }
int _Atomic_compare_exchange_strong_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:513
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
_In_ int _Value
Definition: setjmp.h:173
int _Atomic_compare_exchange_weak_2 ( volatile _Uint2_t _Tgt,
_Uint2_t _Exp,
_Uint2_t  _Value,
memory_order  _Order1,
memory_order  _Order2 
)
inline
1054  { /* compare and exchange values atomically */
1055  /* No weak compare-exchange is currently available,
1056  even for ARM, so fall back to strong */
1058  _Order1, _Order2));
1059  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
int _Atomic_compare_exchange_strong_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:1023
_In_ int _Value
Definition: setjmp.h:173
int _Atomic_compare_exchange_weak_4 ( volatile _Uint4_t _Tgt,
_Uint4_t _Exp,
_Uint4_t  _Value,
memory_order  _Order1,
memory_order  _Order2 
)
inline
1564  { /* compare and exchange values atomically */
1565  /* No weak compare-exchange is currently available,
1566  even for ARM, so fall back to strong */
1568  _Order1, _Order2));
1569  }
int _Atomic_compare_exchange_strong_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:1533
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
_In_ int _Value
Definition: setjmp.h:173
int _Atomic_compare_exchange_weak_8 ( volatile _Uint8_t _Tgt,
_Uint8_t _Exp,
_Uint8_t  _Value,
memory_order  _Order1,
memory_order  _Order2 
)
inline
2093  { /* compare and exchange values atomically */
2094  /* No weak compare-exchange is currently available,
2095  even for ARM, so fall back to strong */
2097  _Order1, _Order2));
2098  }
int _Atomic_compare_exchange_strong_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:2062
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
_In_ int _Value
Definition: setjmp.h:173
void _Atomic_copy ( volatile _Atomic_flag_t _Flag,
size_t  _Size,
volatile void _Tgt,
volatile const void _Src,
memory_order  _Order 
)
inline
2422  { /* atomically copy *_Src to *_Tgt with memory ordering */
2423  _Lock_spin_lock(_Flag);
2424  _CSTD memcpy((void *)_Tgt, (void *)_Src, _Size);
2425  _Unlock_spin_lock(_Flag);
2426  }
void _Lock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2403
#define _CSTD
Definition: yvals.h:570
void _Unlock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2410
_Size
Definition: vcruntime_string.h:36
void _Atomic_exchange ( volatile _Atomic_flag_t _Flag,
size_t  _Size,
volatile void _Tgt,
volatile void _Src,
memory_order  _Order 
)
inline
2432  { /* atomically swap *_Src and *_Tgt with memory ordering */
2433  unsigned char *_Left = (unsigned char *)_Tgt;
2434  unsigned char *_Right = (unsigned char *)_Src;
2435 
2436  _Lock_spin_lock(_Flag);
2437  for (; 0 < _Size; --_Size)
2438  { /* copy bytes */
2439  unsigned char _Tmp = *_Left;
2440  *_Left++ = *_Right;
2441  *_Right++ = _Tmp;
2442  }
2443  _Unlock_spin_lock(_Flag);
2444  }
constexpr const _Ty &() _Left
Definition: algorithm:3590
void _Lock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2403
void _Unlock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2410
_Size
Definition: vcruntime_string.h:36
constexpr const _Ty &() _Right
Definition: algorithm:3591
_Uint1_t _Atomic_exchange_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value,
memory_order  _Order 
)
inline
417  { /* exchange _Value and *_Tgt atomically */
418  switch (_Order)
419  {
421  return (_Exchange_relaxed_1(_Tgt, _Value));
422 
425  return (_Exchange_acquire_1(_Tgt, _Value));
426 
428  return (_Exchange_release_1(_Tgt, _Value));
429 
432  return (_Exchange_seq_cst_1(_Tgt, _Value));
433 
434  default:
436  return (0);
437  }
438  }
_Uint1_t _Exchange_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:401
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint1_t _Exchange_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:394
Definition: xatomic0.h:20
_Uint1_t _Exchange_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:408
Definition: xatomic0.h:17
Definition: xatomic0.h:16
_Uint1_t _Exchange_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:387
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Atomic_exchange_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value,
memory_order  _Order 
)
inline
927  { /* exchange _Value and *_Tgt atomically */
928  switch (_Order)
929  {
931  return (_Exchange_relaxed_2(_Tgt, _Value));
932 
935  return (_Exchange_acquire_2(_Tgt, _Value));
936 
938  return (_Exchange_release_2(_Tgt, _Value));
939 
942  return (_Exchange_seq_cst_2(_Tgt, _Value));
943 
944  default:
946  return (0);
947  }
948  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint2_t _Exchange_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:918
_Uint2_t _Exchange_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:911
Definition: xatomic0.h:20
Definition: xatomic0.h:17
_Uint2_t _Exchange_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:897
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
_Uint2_t _Exchange_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:904
Definition: xatomic0.h:18
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Atomic_exchange_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value,
memory_order  _Order 
)
inline
1437  { /* exchange _Value and *_Tgt atomically */
1438  switch (_Order)
1439  {
1440  case memory_order_relaxed:
1441  return (_Exchange_relaxed_4(_Tgt, _Value));
1442 
1443  case memory_order_consume:
1444  case memory_order_acquire:
1445  return (_Exchange_acquire_4(_Tgt, _Value));
1446 
1447  case memory_order_release:
1448  return (_Exchange_release_4(_Tgt, _Value));
1449 
1450  case memory_order_acq_rel:
1451  case memory_order_seq_cst:
1452  return (_Exchange_seq_cst_4(_Tgt, _Value));
1453 
1454  default:
1456  return (0);
1457  }
1458  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint4_t _Exchange_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1407
_Uint4_t _Exchange_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1428
_Uint4_t _Exchange_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1414
Definition: xatomic0.h:20
Definition: xatomic0.h:17
_Uint4_t _Exchange_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1421
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Atomic_exchange_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value,
memory_order  _Order 
)
inline
1966  { /* exchange _Value and *_Tgt atomically */
1967  switch (_Order)
1968  {
1969  case memory_order_relaxed:
1970  return (_Exchange_relaxed_8(_Tgt, _Value));
1971 
1972  case memory_order_consume:
1973  case memory_order_acquire:
1974  return (_Exchange_acquire_8(_Tgt, _Value));
1975 
1976  case memory_order_release:
1977  return (_Exchange_release_8(_Tgt, _Value));
1978 
1979  case memory_order_acq_rel:
1980  case memory_order_seq_cst:
1981  return (_Exchange_seq_cst_8(_Tgt, _Value));
1982 
1983  default:
1985  return (0);
1986  }
1987  }
_Uint8_t _Exchange_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1950
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
Definition: xatomic0.h:20
_Uint8_t _Exchange_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1957
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_Uint8_t _Exchange_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1943
_Uint8_t _Exchange_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1936
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Atomic_fetch_add_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value,
memory_order  _Order 
)
inline
582  { /* add _Value to *_Tgt atomically */
583  switch (_Order)
584  {
586  return (_Fetch_add_relaxed_1(_Tgt, _Value));
587 
590  return (_Fetch_add_acquire_1(_Tgt, _Value));
591 
593  return (_Fetch_add_release_1(_Tgt, _Value));
594 
597  return (_Fetch_add_seq_cst_1(_Tgt, _Value));
598 
599  default:
601  return (0);
602  }
603  }
_Uint1_t _Fetch_add_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:573
_Uint1_t _Fetch_add_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:559
_Uint1_t _Fetch_add_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:566
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
Definition: xatomic0.h:20
Definition: xatomic0.h:17
_Uint1_t _Fetch_add_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:552
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Atomic_fetch_add_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value,
memory_order  _Order 
)
inline
1092  { /* add _Value to *_Tgt atomically */
1093  switch (_Order)
1094  {
1095  case memory_order_relaxed:
1096  return (_Fetch_add_relaxed_2(_Tgt, _Value));
1097 
1098  case memory_order_consume:
1099  case memory_order_acquire:
1100  return (_Fetch_add_acquire_2(_Tgt, _Value));
1101 
1102  case memory_order_release:
1103  return (_Fetch_add_release_2(_Tgt, _Value));
1104 
1105  case memory_order_acq_rel:
1106  case memory_order_seq_cst:
1107  return (_Fetch_add_seq_cst_2(_Tgt, _Value));
1108 
1109  default:
1111  return (0);
1112  }
1113  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint2_t _Fetch_add_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1076
Definition: xatomic0.h:20
_Uint2_t _Fetch_add_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1069
Definition: xatomic0.h:17
_Uint2_t _Fetch_add_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1062
Definition: xatomic0.h:16
_Uint2_t _Fetch_add_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1083
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Atomic_fetch_add_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value,
memory_order  _Order 
)
inline
1602  { /* add _Value to *_Tgt atomically */
1603  switch (_Order)
1604  {
1605  case memory_order_relaxed:
1606  return (_Fetch_add_relaxed_4(_Tgt, _Value));
1607 
1608  case memory_order_consume:
1609  case memory_order_acquire:
1610  return (_Fetch_add_acquire_4(_Tgt, _Value));
1611 
1612  case memory_order_release:
1613  return (_Fetch_add_release_4(_Tgt, _Value));
1614 
1615  case memory_order_acq_rel:
1616  case memory_order_seq_cst:
1617  return (_Fetch_add_seq_cst_4(_Tgt, _Value));
1618 
1619  default:
1621  return (0);
1622  }
1623  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint4_t _Fetch_add_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1593
Definition: xatomic0.h:20
_Uint4_t _Fetch_add_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1572
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_Uint4_t _Fetch_add_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1586
_Uint4_t _Fetch_add_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1579
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Atomic_fetch_add_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value,
memory_order  _Order 
)
inline
2131  { /* add _Value to *_Tgt atomically */
2132  switch (_Order)
2133  {
2134  case memory_order_relaxed:
2135  return (_Fetch_add_relaxed_8(_Tgt, _Value));
2136 
2137  case memory_order_consume:
2138  case memory_order_acquire:
2139  return (_Fetch_add_acquire_8(_Tgt, _Value));
2140 
2141  case memory_order_release:
2142  return (_Fetch_add_release_8(_Tgt, _Value));
2143 
2144  case memory_order_acq_rel:
2145  case memory_order_seq_cst:
2146  return (_Fetch_add_seq_cst_8(_Tgt, _Value));
2147 
2148  default:
2150  return (0);
2151  }
2152  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint8_t _Fetch_add_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2115
_Uint8_t _Fetch_add_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2122
_Uint8_t _Fetch_add_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2108
Definition: xatomic0.h:20
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Fetch_add_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2101
_Uint1_t _Atomic_fetch_and_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value,
memory_order  _Order 
)
inline
642  { /* and _Value with *_Tgt atomically */
643  switch (_Order)
644  {
646  return (_Fetch_and_relaxed_1(_Tgt, _Value));
647 
650  return (_Fetch_and_acquire_1(_Tgt, _Value));
651 
653  return (_Fetch_and_release_1(_Tgt, _Value));
654 
657  return (_Fetch_and_seq_cst_1(_Tgt, _Value));
658 
659  default:
661  return (0);
662  }
663  }
_Uint1_t _Fetch_and_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:619
_Uint1_t _Fetch_and_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:626
_Uint1_t _Fetch_and_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:633
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
Definition: xatomic0.h:20
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Fetch_and_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:612
_Uint2_t _Atomic_fetch_and_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value,
memory_order  _Order 
)
inline
1152  { /* and _Value with *_Tgt atomically */
1153  switch (_Order)
1154  {
1155  case memory_order_relaxed:
1156  return (_Fetch_and_relaxed_2(_Tgt, _Value));
1157 
1158  case memory_order_consume:
1159  case memory_order_acquire:
1160  return (_Fetch_and_acquire_2(_Tgt, _Value));
1161 
1162  case memory_order_release:
1163  return (_Fetch_and_release_2(_Tgt, _Value));
1164 
1165  case memory_order_acq_rel:
1166  case memory_order_seq_cst:
1167  return (_Fetch_and_seq_cst_2(_Tgt, _Value));
1168 
1169  default:
1171  return (0);
1172  }
1173  }
_Uint2_t _Fetch_and_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1129
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint2_t _Fetch_and_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1122
_Uint2_t _Fetch_and_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1143
Definition: xatomic0.h:20
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_Uint2_t _Fetch_and_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1136
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Atomic_fetch_and_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value,
memory_order  _Order 
)
inline
1662  { /* and _Value with *_Tgt atomically */
1663  switch (_Order)
1664  {
1665  case memory_order_relaxed:
1666  return (_Fetch_and_relaxed_4(_Tgt, _Value));
1667 
1668  case memory_order_consume:
1669  case memory_order_acquire:
1670  return (_Fetch_and_acquire_4(_Tgt, _Value));
1671 
1672  case memory_order_release:
1673  return (_Fetch_and_release_4(_Tgt, _Value));
1674 
1675  case memory_order_acq_rel:
1676  case memory_order_seq_cst:
1677  return (_Fetch_and_seq_cst_4(_Tgt, _Value));
1678 
1679  default:
1681  return (0);
1682  }
1683  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint4_t _Fetch_and_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1632
Definition: xatomic0.h:20
Definition: xatomic0.h:17
Definition: xatomic0.h:16
_Uint4_t _Fetch_and_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1653
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_Uint4_t _Fetch_and_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1646
_Uint4_t _Fetch_and_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1639
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Atomic_fetch_and_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value,
memory_order  _Order 
)
inline
2191  { /* and _Value with *_Tgt atomically */
2192  switch (_Order)
2193  {
2194  case memory_order_relaxed:
2195  return (_Fetch_and_relaxed_8(_Tgt, _Value));
2196 
2197  case memory_order_consume:
2198  case memory_order_acquire:
2199  return (_Fetch_and_acquire_8(_Tgt, _Value));
2200 
2201  case memory_order_release:
2202  return (_Fetch_and_release_8(_Tgt, _Value));
2203 
2204  case memory_order_acq_rel:
2205  case memory_order_seq_cst:
2206  return (_Fetch_and_seq_cst_8(_Tgt, _Value));
2207 
2208  default:
2210  return (0);
2211  }
2212  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint8_t _Fetch_and_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2175
_Uint8_t _Fetch_and_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2168
Definition: xatomic0.h:20
_Uint8_t _Fetch_and_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2182
Definition: xatomic0.h:17
_Uint8_t _Fetch_and_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2161
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Atomic_fetch_or_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value,
memory_order  _Order 
)
inline
696  { /* or _Value with *_Tgt atomically */
697  switch (_Order)
698  {
700  return (_Fetch_or_relaxed_1(_Tgt, _Value));
701 
704  return (_Fetch_or_acquire_1(_Tgt, _Value));
705 
707  return (_Fetch_or_release_1(_Tgt, _Value));
708 
711  return (_Fetch_or_seq_cst_1(_Tgt, _Value));
712 
713  default:
715  return (0);
716  }
717  }
_Uint1_t _Fetch_or_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:687
_Uint1_t _Fetch_or_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:680
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint1_t _Fetch_or_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:673
Definition: xatomic0.h:20
_Uint1_t _Fetch_or_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:666
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Atomic_fetch_or_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value,
memory_order  _Order 
)
inline
1206  { /* or _Value with *_Tgt atomically */
1207  switch (_Order)
1208  {
1209  case memory_order_relaxed:
1210  return (_Fetch_or_relaxed_2(_Tgt, _Value));
1211 
1212  case memory_order_consume:
1213  case memory_order_acquire:
1214  return (_Fetch_or_acquire_2(_Tgt, _Value));
1215 
1216  case memory_order_release:
1217  return (_Fetch_or_release_2(_Tgt, _Value));
1218 
1219  case memory_order_acq_rel:
1220  case memory_order_seq_cst:
1221  return (_Fetch_or_seq_cst_2(_Tgt, _Value));
1222 
1223  default:
1225  return (0);
1226  }
1227  }
_Uint2_t _Fetch_or_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1190
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint2_t _Fetch_or_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1183
Definition: xatomic0.h:20
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_Uint2_t _Fetch_or_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1197
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_or_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1176
_Uint4_t _Atomic_fetch_or_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value,
memory_order  _Order 
)
inline
1716  { /* or _Value with *_Tgt atomically */
1717  switch (_Order)
1718  {
1719  case memory_order_relaxed:
1720  return (_Fetch_or_relaxed_4(_Tgt, _Value));
1721 
1722  case memory_order_consume:
1723  case memory_order_acquire:
1724  return (_Fetch_or_acquire_4(_Tgt, _Value));
1725 
1726  case memory_order_release:
1727  return (_Fetch_or_release_4(_Tgt, _Value));
1728 
1729  case memory_order_acq_rel:
1730  case memory_order_seq_cst:
1731  return (_Fetch_or_seq_cst_4(_Tgt, _Value));
1732 
1733  default:
1735  return (0);
1736  }
1737  }
_Uint4_t _Fetch_or_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1693
_Uint4_t _Fetch_or_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1707
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint4_t _Fetch_or_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1686
Definition: xatomic0.h:20
_Uint4_t _Fetch_or_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1700
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Atomic_fetch_or_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value,
memory_order  _Order 
)
inline
2245  { /* or _Value with *_Tgt atomically */
2246  switch (_Order)
2247  {
2248  case memory_order_relaxed:
2249  return (_Fetch_or_relaxed_8(_Tgt, _Value));
2250 
2251  case memory_order_consume:
2252  case memory_order_acquire:
2253  return (_Fetch_or_acquire_8(_Tgt, _Value));
2254 
2255  case memory_order_release:
2256  return (_Fetch_or_release_8(_Tgt, _Value));
2257 
2258  case memory_order_acq_rel:
2259  case memory_order_seq_cst:
2260  return (_Fetch_or_seq_cst_8(_Tgt, _Value));
2261 
2262  default:
2264  return (0);
2265  }
2266  }
_Uint8_t _Fetch_or_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2215
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint8_t _Fetch_or_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2222
_Uint8_t _Fetch_or_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2229
Definition: xatomic0.h:20
Definition: xatomic0.h:17
_Uint8_t _Fetch_or_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2236
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Atomic_fetch_sub_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value,
memory_order  _Order 
)
inline
607  { /* subtract _Value from *_Tgt atomically */
608  return (_Atomic_fetch_add_1(_Tgt, 0 - _Value, _Order));
609  }
_Uint1_t _Atomic_fetch_add_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:580
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Atomic_fetch_sub_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value,
memory_order  _Order 
)
inline
1117  { /* subtract _Value from *_Tgt atomically */
1118  return (_Atomic_fetch_add_2(_Tgt, 0 - _Value, _Order));
1119  }
_Uint2_t _Atomic_fetch_add_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:1090
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Atomic_fetch_sub_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value,
memory_order  _Order 
)
inline
1627  { /* subtract _Value from *_Tgt atomically */
1628  return (_Atomic_fetch_add_4(_Tgt, 0 - _Value, _Order));
1629  }
_Uint4_t _Atomic_fetch_add_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1600
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Atomic_fetch_sub_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value,
memory_order  _Order 
)
inline
2156  { /* subtract _Value from *_Tgt atomically */
2157  return (_Atomic_fetch_add_8(_Tgt, 0 - _Value, _Order));
2158  }
_Uint8_t _Atomic_fetch_add_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2129
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Atomic_fetch_xor_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value,
memory_order  _Order 
)
inline
750  { /* xor _Value with *_Tgt atomically */
751  switch (_Order)
752  {
754  return (_Fetch_xor_relaxed_1(_Tgt, _Value));
755 
758  return (_Fetch_xor_acquire_1(_Tgt, _Value));
759 
761  return (_Fetch_xor_release_1(_Tgt, _Value));
762 
765  return (_Fetch_xor_seq_cst_1(_Tgt, _Value));
766 
767  default:
769  return (0);
770  }
771  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint1_t _Fetch_xor_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:727
_Uint1_t _Fetch_xor_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:720
_Uint1_t _Fetch_xor_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:741
Definition: xatomic0.h:20
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_Uint1_t _Fetch_xor_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:734
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Atomic_fetch_xor_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value,
memory_order  _Order 
)
inline
1260  { /* xor _Value with *_Tgt atomically */
1261  switch (_Order)
1262  {
1263  case memory_order_relaxed:
1264  return (_Fetch_xor_relaxed_2(_Tgt, _Value));
1265 
1266  case memory_order_consume:
1267  case memory_order_acquire:
1268  return (_Fetch_xor_acquire_2(_Tgt, _Value));
1269 
1270  case memory_order_release:
1271  return (_Fetch_xor_release_2(_Tgt, _Value));
1272 
1273  case memory_order_acq_rel:
1274  case memory_order_seq_cst:
1275  return (_Fetch_xor_seq_cst_2(_Tgt, _Value));
1276 
1277  default:
1279  return (0);
1280  }
1281  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint2_t _Fetch_xor_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1230
Definition: xatomic0.h:20
Definition: xatomic0.h:17
_Uint2_t _Fetch_xor_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1244
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_Uint2_t _Fetch_xor_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1237
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_xor_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1251
_Uint4_t _Atomic_fetch_xor_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value,
memory_order  _Order 
)
inline
1770  { /* xor _Value with *_Tgt atomically */
1771  switch (_Order)
1772  {
1773  case memory_order_relaxed:
1774  return (_Fetch_xor_relaxed_4(_Tgt, _Value));
1775 
1776  case memory_order_consume:
1777  case memory_order_acquire:
1778  return (_Fetch_xor_acquire_4(_Tgt, _Value));
1779 
1780  case memory_order_release:
1781  return (_Fetch_xor_release_4(_Tgt, _Value));
1782 
1783  case memory_order_acq_rel:
1784  case memory_order_seq_cst:
1785  return (_Fetch_xor_seq_cst_4(_Tgt, _Value));
1786 
1787  default:
1789  return (0);
1790  }
1791  }
_Uint4_t _Fetch_xor_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1761
_Uint4_t _Fetch_xor_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1740
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint4_t _Fetch_xor_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1754
_Uint4_t _Fetch_xor_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1747
Definition: xatomic0.h:20
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Atomic_fetch_xor_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value,
memory_order  _Order 
)
inline
2299  { /* xor _Value with *_Tgt atomically */
2300  switch (_Order)
2301  {
2302  case memory_order_relaxed:
2303  return (_Fetch_xor_relaxed_8(_Tgt, _Value));
2304 
2305  case memory_order_consume:
2306  case memory_order_acquire:
2307  return (_Fetch_xor_acquire_8(_Tgt, _Value));
2308 
2309  case memory_order_release:
2310  return (_Fetch_xor_release_8(_Tgt, _Value));
2311 
2312  case memory_order_acq_rel:
2313  case memory_order_seq_cst:
2314  return (_Fetch_xor_seq_cst_8(_Tgt, _Value));
2315 
2316  default:
2318  return (0);
2319  }
2320  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint8_t _Fetch_xor_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2276
_Uint8_t _Fetch_xor_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2269
Definition: xatomic0.h:20
Definition: xatomic0.h:17
_Uint8_t _Fetch_xor_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2283
_Uint8_t _Fetch_xor_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2290
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
_In_ int _Value
Definition: setjmp.h:173
void _Atomic_flag_clear ( volatile _Atomic_flag_t _Flag,
memory_order  _Order 
)
inline
2349  { /* atomically clear flag */
2350  static_assert(sizeof(_Atomic_flag_t) == sizeof(_Uint4_t),
2351  "Unexpected _Atomic_flag_t size");
2352 
2353  switch (_Order)
2354  {
2355  case memory_order_relaxed:
2356  case memory_order_release:
2357  case memory_order_seq_cst:
2358  _Atomic_store_4((volatile _Uint4_t *)_Flag, 0, _Order);
2359  break;
2360 
2361  default:
2363  break;
2364  }
2365  }
void _Atomic_store_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1322
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
long _Atomic_flag_t
Definition: xatomic0.h:47
_Uint32t _Uint4_t
Definition: xatomic0.h:24
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
int _Atomic_flag_test_and_set ( volatile _Atomic_flag_t _Flag,
memory_order  _Order 
)
inline
2324  { /* atomically test flag and set to true */
2325  switch (_Order)
2326  {
2327  case memory_order_relaxed:
2328  return (_INTRIN_RELAXED(_interlockedbittestandset)(_Flag, 0));
2329 
2330  case memory_order_consume:
2331  case memory_order_acquire:
2332  return (_INTRIN_ACQUIRE(_interlockedbittestandset)(_Flag, 0));
2333 
2334  case memory_order_release:
2335  return (_INTRIN_RELEASE(_interlockedbittestandset)(_Flag, 0));
2336 
2337  case memory_order_acq_rel:
2338  case memory_order_seq_cst:
2339  return (_INTRIN_SEQ_CST(_interlockedbittestandset)(_Flag, 0));
2340 
2341  default:
2343  return (0);
2344  }
2345  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
Definition: xatomic0.h:20
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
int _Atomic_is_lock_free_1 ( void  )
inline
2474  { /* return true if 1-byte atomic values are lock-free */
2475  return (1 <= _ATOMIC_MAXBYTES_LOCK_FREE);
2476  }
#define _ATOMIC_MAXBYTES_LOCK_FREE
Definition: xatomic0.h:49
int _Atomic_is_lock_free_2 ( void  )
inline
2479  { /* return true if 2-byte atomic values are lock-free */
2480  return (2 <= _ATOMIC_MAXBYTES_LOCK_FREE);
2481  }
#define _ATOMIC_MAXBYTES_LOCK_FREE
Definition: xatomic0.h:49
int _Atomic_is_lock_free_4 ( void  )
inline
2484  { /* return true if 4-byte atomic values are lock-free */
2485  return (4 <= _ATOMIC_MAXBYTES_LOCK_FREE);
2486  }
#define _ATOMIC_MAXBYTES_LOCK_FREE
Definition: xatomic0.h:49
int _Atomic_is_lock_free_8 ( void  )
inline
2489  { /* return true if 8-byte atomic values are lock-free */
2490  return (8 <= _ATOMIC_MAXBYTES_LOCK_FREE);
2491  }
#define _ATOMIC_MAXBYTES_LOCK_FREE
Definition: xatomic0.h:49
_Uint1_t _Atomic_load_1 ( volatile _Uint1_t _Tgt,
memory_order  _Order 
)
inline
367  { /* load from *_Tgt atomically */
368  switch (_Order)
369  {
371  return (_Load_relaxed_1(_Tgt));
372 
375  return (_Load_acquire_1(_Tgt));
376 
378  return (_Load_seq_cst_1(_Tgt));
379 
380  default:
382  return (0);
383  }
384  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint1_t _Load_acquire_1(volatile _Uint1_t *_Tgt)
Definition: xatomic.h:358
_Uint1_t _Load_seq_cst_1(volatile _Uint1_t *_Tgt)
Definition: xatomic.h:326
_Uint1_t _Load_relaxed_1(volatile _Uint1_t *_Tgt)
Definition: xatomic.h:343
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:18
_Uint2_t _Atomic_load_2 ( volatile _Uint2_t _Tgt,
memory_order  _Order 
)
inline
877  { /* load from *_Tgt atomically */
878  switch (_Order)
879  {
881  return (_Load_relaxed_2(_Tgt));
882 
885  return (_Load_acquire_2(_Tgt));
886 
888  return (_Load_seq_cst_2(_Tgt));
889 
890  default:
892  return (0);
893  }
894  }
_Uint2_t _Load_relaxed_2(volatile _Uint2_t *_Tgt)
Definition: xatomic.h:853
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:18
_Uint2_t _Load_acquire_2(volatile _Uint2_t *_Tgt)
Definition: xatomic.h:868
_Uint2_t _Load_seq_cst_2(volatile _Uint2_t *_Tgt)
Definition: xatomic.h:836
_Uint4_t _Atomic_load_4 ( volatile _Uint4_t _Tgt,
memory_order  _Order 
)
inline
1387  { /* load from *_Tgt atomically */
1388  switch (_Order)
1389  {
1390  case memory_order_relaxed:
1391  return (_Load_relaxed_4(_Tgt));
1392 
1393  case memory_order_consume:
1394  case memory_order_acquire:
1395  return (_Load_acquire_4(_Tgt));
1396 
1397  case memory_order_seq_cst:
1398  return (_Load_seq_cst_4(_Tgt));
1399 
1400  default:
1402  return (0);
1403  }
1404  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint4_t _Load_relaxed_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1363
_Uint4_t _Load_seq_cst_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1346
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:18
_Uint4_t _Load_acquire_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1378
_Uint8_t _Atomic_load_8 ( volatile _Uint8_t _Tgt,
memory_order  _Order 
)
inline
1916  { /* load from *_Tgt atomically */
1917  switch (_Order)
1918  {
1919  case memory_order_relaxed:
1920  return (_Load_relaxed_8(_Tgt));
1921 
1922  case memory_order_consume:
1923  case memory_order_acquire:
1924  return (_Load_acquire_8(_Tgt));
1925 
1926  case memory_order_seq_cst:
1927  return (_Load_seq_cst_8(_Tgt));
1928 
1929  default:
1931  return (0);
1932  }
1933  }
_Uint8_t _Load_relaxed_8(volatile _Uint8_t *_Tgt)
Definition: xatomic.h:1886
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint8_t _Load_seq_cst_8(volatile _Uint8_t *_Tgt)
Definition: xatomic.h:1862
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
_Uint8_t _Load_acquire_8(volatile _Uint8_t *_Tgt)
Definition: xatomic.h:1907
Definition: xatomic0.h:18
void _Atomic_signal_fence ( memory_order  _Order)
inline
2387  { /* inhibit compiler reordering */
2389  }
#define _Compiler_barrier()
Definition: xatomic.h:21
void _Atomic_store_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value,
memory_order  _Order 
)
inline
304  { /* store _Value atomically */
305  switch (_Order)
306  {
308  _Store_relaxed_1(_Tgt, _Value);
309  break;
310 
312  _Store_release_1(_Tgt, _Value);
313  break;
314 
316  _Store_seq_cst_1(_Tgt, _Value);
317  break;
318 
319  default:
321  break;
322  }
323  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
void _Store_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:275
void _Store_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:264
void _Store_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:288
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
_In_ int _Value
Definition: setjmp.h:173
void _Atomic_store_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value,
memory_order  _Order 
)
inline
814  { /* store _Value atomically */
815  switch (_Order)
816  {
818  _Store_relaxed_2(_Tgt, _Value);
819  break;
820 
822  _Store_release_2(_Tgt, _Value);
823  break;
824 
826  _Store_seq_cst_2(_Tgt, _Value);
827  break;
828 
829  default:
831  break;
832  }
833  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
void _Store_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:785
void _Store_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:798
void _Store_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:774
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
_In_ int _Value
Definition: setjmp.h:173
void _Atomic_store_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value,
memory_order  _Order 
)
inline
1324  { /* store _Value atomically */
1325  switch (_Order)
1326  {
1327  case memory_order_relaxed:
1328  _Store_relaxed_4(_Tgt, _Value);
1329  break;
1330 
1331  case memory_order_release:
1332  _Store_release_4(_Tgt, _Value);
1333  break;
1334 
1335  case memory_order_seq_cst:
1336  _Store_seq_cst_4(_Tgt, _Value);
1337  break;
1338 
1339  default:
1341  break;
1342  }
1343  }
void _Store_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1284
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
void _Store_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1295
void _Store_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1308
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
_In_ int _Value
Definition: setjmp.h:173
void _Atomic_store_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value,
memory_order  _Order 
)
inline
1840  { /* store _Value atomically */
1841  switch (_Order)
1842  {
1843  case memory_order_relaxed:
1844  _Store_relaxed_8(_Tgt, _Value);
1845  break;
1846 
1847  case memory_order_release:
1848  _Store_release_8(_Tgt, _Value);
1849  break;
1850 
1851  case memory_order_seq_cst:
1852  _Store_seq_cst_8(_Tgt, _Value);
1853  break;
1854 
1855  default:
1857  break;
1858  }
1859  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
void _Store_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1808
void _Store_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1794
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
void _Store_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1824
_In_ int _Value
Definition: setjmp.h:173
void _Atomic_thread_fence ( memory_order  _Order)
inline
2368  { /* force memory visibility and inhibit compiler reordering */
2369  #if defined(_M_ARM) || defined(_M_ARM64)
2370  if (_Order != memory_order_relaxed)
2371  {
2372  _Memory_barrier();
2373  }
2374 
2375  #else
2377  if (_Order == memory_order_seq_cst)
2378  { /* force visibility */
2379  static _Uint4_t _Guard;
2382  }
2383  #endif
2384  }
_Uint4_t _Atomic_exchange_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1435
_Uint32t _Uint4_t
Definition: xatomic0.h:24
Definition: xatomic0.h:16
Definition: xatomic0.h:21
#define _Compiler_barrier()
Definition: xatomic.h:21
int _Compare_exchange_acquire_1 ( volatile _Uint1_t _Tgt,
_Uint1_t _Exp,
_Uint1_t  _Value 
)
inline
479  { /* compare and exchange values atomically with
480  acquire memory order */
481  _Uint1_t _Old_exp = *_Exp; /* read before atomic operation */
482 
483  _Uint1_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange8)((volatile char *)_Tgt,
484  _Value, _Old_exp);
485 
486  if (_Prev == _Old_exp)
487  return (1);
488  else
489  { /* copy old value */
490  *_Exp = _Prev;
491  return (0);
492  }
493  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_STD_BEGIN typedef unsigned char _Uint1_t
Definition: xatomic.h:161
_In_ int _Value
Definition: setjmp.h:173
int _Compare_exchange_acquire_2 ( volatile _Uint2_t _Tgt,
_Uint2_t _Exp,
_Uint2_t  _Value 
)
inline
989  { /* compare and exchange values atomically with
990  acquire memory order */
991  _Uint2_t _Old_exp = *_Exp; /* read before atomic operation */
992 
993  _Uint2_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange16)((volatile short *)_Tgt,
994  _Value, _Old_exp);
995 
996  if (_Prev == _Old_exp)
997  return (1);
998  else
999  { /* copy old value */
1000  *_Exp = _Prev;
1001  return (0);
1002  }
1003  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
unsigned short _Uint2_t
Definition: xatomic.h:162
_In_ int _Value
Definition: setjmp.h:173
int _Compare_exchange_acquire_4 ( volatile _Uint4_t _Tgt,
_Uint4_t _Exp,
_Uint4_t  _Value 
)
inline
1499  { /* compare and exchange values atomically with
1500  acquire memory order */
1501  _Uint4_t _Old_exp = *_Exp; /* read before atomic operation */
1502 
1503  _Uint4_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange)((volatile long *)_Tgt,
1504  _Value, _Old_exp);
1505 
1506  if (_Prev == _Old_exp)
1507  return (1);
1508  else
1509  { /* copy old value */
1510  *_Exp = _Prev;
1511  return (0);
1512  }
1513  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
_Uint32t _Uint4_t
Definition: xatomic0.h:24
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_In_ int _Value
Definition: setjmp.h:173
long __cdecl _InterlockedCompareExchange(long volatile *, long, long)
int _Compare_exchange_acquire_8 ( volatile _Uint8_t _Tgt,
_Uint8_t _Exp,
_Uint8_t  _Value 
)
inline
2028  { /* compare and exchange values atomically with
2029  acquire memory order */
2030  _Uint8_t _Old_exp = *_Exp; /* read before atomic operation */
2031 
2032  _Uint8_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange64)((volatile _LONGLONG *)_Tgt,
2033  _Value, _Old_exp);
2034 
2035  if (_Prev == _Old_exp)
2036  return (1);
2037  else
2038  { /* copy old value */
2039  *_Exp = _Prev;
2040  return (0);
2041  }
2042  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
#define _LONGLONG
Definition: yvals.h:579
unsigned _LONGLONG _Uint8_t
Definition: xatomic.h:164
_In_ int _Value
Definition: setjmp.h:173
int _Compare_exchange_relaxed_1 ( volatile _Uint1_t _Tgt,
_Uint1_t _Exp,
_Uint1_t  _Value 
)
inline
461  { /* compare and exchange values atomically with
462  relaxed memory order */
463  _Uint1_t _Old_exp = *_Exp; /* read before atomic operation */
464 
465  _Uint1_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange8)((volatile char *)_Tgt,
466  _Value, _Old_exp);
467 
468  if (_Prev == _Old_exp)
469  return (1);
470  else
471  { /* copy old value */
472  *_Exp = _Prev;
473  return (0);
474  }
475  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
_STD_BEGIN typedef unsigned char _Uint1_t
Definition: xatomic.h:161
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
int _Compare_exchange_relaxed_2 ( volatile _Uint2_t _Tgt,
_Uint2_t _Exp,
_Uint2_t  _Value 
)
inline
971  { /* compare and exchange values atomically with
972  relaxed memory order */
973  _Uint2_t _Old_exp = *_Exp; /* read before atomic operation */
974 
975  _Uint2_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange16)((volatile short *)_Tgt,
976  _Value, _Old_exp);
977 
978  if (_Prev == _Old_exp)
979  return (1);
980  else
981  { /* copy old value */
982  *_Exp = _Prev;
983  return (0);
984  }
985  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
unsigned short _Uint2_t
Definition: xatomic.h:162
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
int _Compare_exchange_relaxed_4 ( volatile _Uint4_t _Tgt,
_Uint4_t _Exp,
_Uint4_t  _Value 
)
inline
1481  { /* compare and exchange values atomically with
1482  relaxed memory order */
1483  _Uint4_t _Old_exp = *_Exp; /* read before atomic operation */
1484 
1485  _Uint4_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange)((volatile long *)_Tgt,
1486  _Value, _Old_exp);
1487 
1488  if (_Prev == _Old_exp)
1489  return (1);
1490  else
1491  { /* copy old value */
1492  *_Exp = _Prev;
1493  return (0);
1494  }
1495  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
_Uint32t _Uint4_t
Definition: xatomic0.h:24
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
long __cdecl _InterlockedCompareExchange(long volatile *, long, long)
int _Compare_exchange_relaxed_8 ( volatile _Uint8_t _Tgt,
_Uint8_t _Exp,
_Uint8_t  _Value 
)
inline
2010  { /* compare and exchange values atomically with
2011  relaxed memory order */
2012  _Uint8_t _Old_exp = *_Exp; /* read before atomic operation */
2013 
2014  _Uint8_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange64)((volatile _LONGLONG *)_Tgt,
2015  _Value, _Old_exp);
2016 
2017  if (_Prev == _Old_exp)
2018  return (1);
2019  else
2020  { /* copy old value */
2021  *_Exp = _Prev;
2022  return (0);
2023  }
2024  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _LONGLONG
Definition: yvals.h:579
unsigned _LONGLONG _Uint8_t
Definition: xatomic.h:164
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
int _Compare_exchange_release_1 ( volatile _Uint1_t _Tgt,
_Uint1_t _Exp,
_Uint1_t  _Value 
)
inline
497  { /* compare and exchange values atomically with
498  release memory order */
499  _Uint1_t _Old_exp = *_Exp; /* read before atomic operation */
500 
501  _Uint1_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange8)((volatile char *)_Tgt,
502  _Value, _Old_exp);
503 
504  if (_Prev == _Old_exp)
505  return (1);
506  else
507  { /* copy old value */
508  *_Exp = _Prev;
509  return (0);
510  }
511  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_STD_BEGIN typedef unsigned char _Uint1_t
Definition: xatomic.h:161
_In_ int _Value
Definition: setjmp.h:173
int _Compare_exchange_release_2 ( volatile _Uint2_t _Tgt,
_Uint2_t _Exp,
_Uint2_t  _Value 
)
inline
1007  { /* compare and exchange values atomically with
1008  release memory order */
1009  _Uint2_t _Old_exp = *_Exp; /* read before atomic operation */
1010 
1011  _Uint2_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange16)((volatile short *)_Tgt,
1012  _Value, _Old_exp);
1013 
1014  if (_Prev == _Old_exp)
1015  return (1);
1016  else
1017  { /* copy old value */
1018  *_Exp = _Prev;
1019  return (0);
1020  }
1021  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
unsigned short _Uint2_t
Definition: xatomic.h:162
_In_ int _Value
Definition: setjmp.h:173
int _Compare_exchange_release_4 ( volatile _Uint4_t _Tgt,
_Uint4_t _Exp,
_Uint4_t  _Value 
)
inline
1517  { /* compare and exchange values atomically with
1518  release memory order */
1519  _Uint4_t _Old_exp = *_Exp; /* read before atomic operation */
1520 
1521  _Uint4_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange)((volatile long *)_Tgt,
1522  _Value, _Old_exp);
1523 
1524  if (_Prev == _Old_exp)
1525  return (1);
1526  else
1527  { /* copy old value */
1528  *_Exp = _Prev;
1529  return (0);
1530  }
1531  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_Uint32t _Uint4_t
Definition: xatomic0.h:24
_In_ int _Value
Definition: setjmp.h:173
long __cdecl _InterlockedCompareExchange(long volatile *, long, long)
int _Compare_exchange_release_8 ( volatile _Uint8_t _Tgt,
_Uint8_t _Exp,
_Uint8_t  _Value 
)
inline
2046  { /* compare and exchange values atomically with
2047  release memory order */
2048  _Uint8_t _Old_exp = *_Exp; /* read before atomic operation */
2049 
2050  _Uint8_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange64)((volatile _LONGLONG *)_Tgt,
2051  _Value, _Old_exp);
2052 
2053  if (_Prev == _Old_exp)
2054  return (1);
2055  else
2056  { /* copy old value */
2057  *_Exp = _Prev;
2058  return (0);
2059  }
2060  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
#define _LONGLONG
Definition: yvals.h:579
unsigned _LONGLONG _Uint8_t
Definition: xatomic.h:164
_In_ int _Value
Definition: setjmp.h:173
int _Compare_exchange_seq_cst_1 ( volatile _Uint1_t _Tgt,
_Uint1_t _Exp,
_Uint1_t  _Value 
)
inline
443  { /* compare and exchange values atomically with
444  sequentially consistent memory order */
445  _Uint1_t _Old_exp = *_Exp; /* read before atomic operation */
446 
447  _Uint1_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange8)((volatile char *)_Tgt,
448  _Value, _Old_exp);
449 
450  if (_Prev == _Old_exp)
451  return (1);
452  else
453  { /* copy old value */
454  *_Exp = _Prev;
455  return (0);
456  }
457  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_STD_BEGIN typedef unsigned char _Uint1_t
Definition: xatomic.h:161
_In_ int _Value
Definition: setjmp.h:173
int _Compare_exchange_seq_cst_2 ( volatile _Uint2_t _Tgt,
_Uint2_t _Exp,
_Uint2_t  _Value 
)
inline
953  { /* compare and exchange values atomically with
954  sequentially consistent memory order */
955  _Uint2_t _Old_exp = *_Exp; /* read before atomic operation */
956 
957  _Uint2_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange16)((volatile short *)_Tgt,
958  _Value, _Old_exp);
959 
960  if (_Prev == _Old_exp)
961  return (1);
962  else
963  { /* copy old value */
964  *_Exp = _Prev;
965  return (0);
966  }
967  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
unsigned short _Uint2_t
Definition: xatomic.h:162
_In_ int _Value
Definition: setjmp.h:173
int _Compare_exchange_seq_cst_4 ( volatile _Uint4_t _Tgt,
_Uint4_t _Exp,
_Uint4_t  _Value 
)
inline
1463  { /* compare and exchange values atomically with
1464  sequentially consistent memory order */
1465  _Uint4_t _Old_exp = *_Exp; /* read before atomic operation */
1466 
1467  _Uint4_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange)((volatile long *)_Tgt,
1468  _Value, _Old_exp);
1469 
1470  if (_Prev == _Old_exp)
1471  return (1);
1472  else
1473  { /* copy old value */
1474  *_Exp = _Prev;
1475  return (0);
1476  }
1477  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_Uint32t _Uint4_t
Definition: xatomic0.h:24
_In_ int _Value
Definition: setjmp.h:173
long __cdecl _InterlockedCompareExchange(long volatile *, long, long)
int _Compare_exchange_seq_cst_8 ( volatile _Uint8_t _Tgt,
_Uint8_t _Exp,
_Uint8_t  _Value 
)
inline
1992  { /* compare and exchange values atomically with
1993  sequentially consistent memory order */
1994  _Uint8_t _Old_exp = *_Exp; /* read before atomic operation */
1995 
1996  _Uint8_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange64)((volatile _LONGLONG *)_Tgt,
1997  _Value, _Old_exp);
1998 
1999  if (_Prev == _Old_exp)
2000  return (1);
2001  else
2002  { /* copy old value */
2003  *_Exp = _Prev;
2004  return (0);
2005  }
2006  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
#define _LONGLONG
Definition: yvals.h:579
unsigned _LONGLONG _Uint8_t
Definition: xatomic.h:164
_In_ int _Value
Definition: setjmp.h:173
_Atomic_integral_t _Compare_increment_atomic_counter ( _Atomic_counter_t _Counter,
_Atomic_integral_t  _Expected 
)
inline
2540  { // atomically compare and increment counter and return result
2542  _Counter, _Expected, memory_order_seq_cst));
2543  }
_Atomic_integral_t _Compare_increment_atomic_counter_explicit(_Atomic_counter_t &_Counter, _Atomic_integral_t _Expected, memory_order _Order)
Definition: xatomic.h:2528
Definition: xatomic0.h:21
_Atomic_integral_t _Compare_increment_atomic_counter_explicit ( _Atomic_counter_t _Counter,
_Atomic_integral_t  _Expected,
memory_order  _Order 
)
inline
2532  { // atomically compare and increment counter and return result
2534  &_Counter, &_Expected, _Expected + 1,
2535  _Order, _Order));
2536  }
int _Atomic_compare_exchange_weak_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:1561
_Atomic_integral_t _Dec_atomic_counter ( _Atomic_counter_t _Counter)
inline
2513  { // atomically decrement counter and return result
2515  }
_Atomic_integral_t _Dec_atomic_counter_explicit(_Atomic_counter_t &_Counter, memory_order _Order)
Definition: xatomic.h:2506
Definition: xatomic0.h:21
_Atomic_integral_t _Dec_atomic_counter_explicit ( _Atomic_counter_t _Counter,
memory_order  _Order 
)
inline
2508  { // atomically decrement counter and return result
2509  return (_Atomic_fetch_sub_4(&_Counter, 1, _Order) - 1);
2510  }
_Uint4_t _Atomic_fetch_sub_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1625
_Uint1_t _Exchange_acquire_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
402  { /* exchange _Value and *_Tgt atomically with
403  acquire memory order */
404 
405  return (_INTRIN_ACQUIRE(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
406  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Exchange_acquire_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
912  { /* exchange _Value and *_Tgt atomically with
913  acquire memory order */
914 
915  return (_INTRIN_ACQUIRE(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
916  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Exchange_acquire_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1422  { /* exchange _Value and *_Tgt atomically with
1423  acquire memory order */
1424 
1425  return (_INTRIN_ACQUIRE(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1426  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Exchange_acquire_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
1951  { /* exchange _Value and *_Tgt atomically with
1952  acquire memory order */
1953 
1954  return (_INTRIN_ACQUIRE(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value));
1955  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
#define _LONGLONG
Definition: yvals.h:579
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Exchange_relaxed_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
395  { /* exchange _Value and *_Tgt atomically with
396  relaxed memory order */
397 
398  return (_INTRIN_RELAXED(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
399  }
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Exchange_relaxed_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
905  { /* exchange _Value and *_Tgt atomically with
906  relaxed memory order */
907 
908  return (_INTRIN_RELAXED(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
909  }
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Exchange_relaxed_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1415  { /* exchange _Value and *_Tgt atomically with
1416  relaxed memory order */
1417 
1418  return (_INTRIN_RELAXED(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1419  }
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Exchange_relaxed_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
1944  { /* exchange _Value and *_Tgt atomically with
1945  relaxed memory order */
1946 
1947  return (_INTRIN_RELAXED(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value));
1948  }
#define _LONGLONG
Definition: yvals.h:579
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Exchange_release_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
409  { /* exchange _Value and *_Tgt atomically with
410  release memory order */
411 
412  return (_INTRIN_RELEASE(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
413  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Exchange_release_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
919  { /* exchange _Value and *_Tgt atomically with
920  release memory order */
921 
922  return (_INTRIN_RELEASE(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
923  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Exchange_release_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1429  { /* exchange _Value and *_Tgt atomically with
1430  release memory order */
1431 
1432  return (_INTRIN_RELEASE(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1433  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Exchange_release_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
1958  { /* exchange _Value and *_Tgt atomically with
1959  release memory order */
1960 
1961  return (_INTRIN_RELEASE(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value));
1962  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
#define _LONGLONG
Definition: yvals.h:579
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Exchange_seq_cst_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
388  { /* exchange _Value and *_Tgt atomically with
389  sequentially consistent memory order */
390 
391  return (_INTRIN_SEQ_CST(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
392  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Exchange_seq_cst_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
898  { /* exchange _Value and *_Tgt atomically with
899  sequentially consistent memory order */
900 
901  return (_INTRIN_SEQ_CST(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
902  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Exchange_seq_cst_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1408  { /* exchange _Value and *_Tgt atomically with
1409  sequentially consistent memory order */
1410 
1411  return (_INTRIN_SEQ_CST(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1412  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Exchange_seq_cst_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
1937  { /* exchange _Value and *_Tgt atomically with
1938  sequentially consistent memory order */
1939 
1940  return (_INTRIN_SEQ_CST(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value));
1941  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
#define _LONGLONG
Definition: yvals.h:579
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Fetch_add_acquire_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
567  { /* add _Value to *_Tgt atomically with
568  acquire memory order */
569 
570  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
571  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_add_acquire_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1077  { /* add _Value to *_Tgt atomically with
1078  acquire memory order */
1079 
1080  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1081  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Fetch_add_acquire_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1587  { /* add _Value to *_Tgt atomically with
1588  acquire memory order */
1589 
1590  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1591  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Fetch_add_acquire_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2116  { /* add _Value to *_Tgt atomically with
2117  acquire memory order */
2118 
2119  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd64)((volatile _LONGLONG *)_Tgt, _Value));
2120  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
#define _LONGLONG
Definition: yvals.h:579
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Fetch_add_relaxed_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
560  { /* add _Value to *_Tgt atomically with
561  relaxed memory order */
562 
563  return (_INTRIN_RELAXED(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
564  }
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_add_relaxed_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1070  { /* add _Value to *_Tgt atomically with
1071  relaxed memory order */
1072 
1073  return (_INTRIN_RELAXED(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1074  }
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Fetch_add_relaxed_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1580  { /* add _Value to *_Tgt atomically with
1581  relaxed memory order */
1582 
1583  return (_INTRIN_RELAXED(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1584  }
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Fetch_add_relaxed_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2109  { /* add _Value to *_Tgt atomically with
2110  relaxed memory order */
2111 
2112  return (_INTRIN_RELAXED(_InterlockedExchangeAdd64)((volatile _LONGLONG *)_Tgt, _Value));
2113  }
#define _LONGLONG
Definition: yvals.h:579
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Fetch_add_release_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
574  { /* add _Value to *_Tgt atomically with
575  release memory order */
576 
577  return (_INTRIN_RELEASE(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
578  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_add_release_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1084  { /* add _Value to *_Tgt atomically with
1085  release memory order */
1086 
1087  return (_INTRIN_RELEASE(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1088  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Fetch_add_release_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1594  { /* add _Value to *_Tgt atomically with
1595  release memory order */
1596 
1597  return (_INTRIN_RELEASE(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1598  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Fetch_add_release_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2123  { /* add _Value to *_Tgt atomically with
2124  release memory order */
2125 
2126  return (_INTRIN_RELEASE(_InterlockedExchangeAdd64)((volatile _LONGLONG *)_Tgt, _Value));
2127  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
#define _LONGLONG
Definition: yvals.h:579
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Fetch_add_seq_cst_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
553  { /* add _Value to *_Tgt atomically with
554  sequentially consistent memory order */
555 
556  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
557  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_add_seq_cst_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1063  { /* add _Value to *_Tgt atomically with
1064  sequentially consistent memory order */
1065 
1066  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1067  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Fetch_add_seq_cst_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1573  { /* add _Value to *_Tgt atomically with
1574  sequentially consistent memory order */
1575 
1576  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1577  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Fetch_add_seq_cst_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2102  { /* add _Value to *_Tgt atomically with
2103  sequentially consistent memory order */
2104 
2105  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd64)((volatile _LONGLONG *)_Tgt, _Value));
2106  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
#define _LONGLONG
Definition: yvals.h:579
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Fetch_and_acquire_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
627  { /* and _Value with *_Tgt atomically with
628  acquire memory order */
629 
630  return (_INTRIN_ACQUIRE(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
631  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_and_acquire_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1137  { /* and _Value with *_Tgt atomically with
1138  acquire memory order */
1139 
1140  return (_INTRIN_ACQUIRE(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1141  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Fetch_and_acquire_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1647  { /* and _Value with *_Tgt atomically with
1648  acquire memory order */
1649 
1650  return (_INTRIN_ACQUIRE(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1651  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Fetch_and_acquire_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2176  { /* and _Value with *_Tgt atomically with
2177  acquire memory order */
2178 
2179  return (_INTRIN_ACQUIRE(_InterlockedAnd64)((volatile _LONGLONG *)_Tgt, _Value));
2180  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
#define _LONGLONG
Definition: yvals.h:579
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Fetch_and_relaxed_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
620  { /* and _Value with *_Tgt atomically with
621  relaxed memory order */
622 
623  return (_INTRIN_RELAXED(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
624  }
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_and_relaxed_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1130  { /* and _Value with *_Tgt atomically with
1131  relaxed memory order */
1132 
1133  return (_INTRIN_RELAXED(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1134  }
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Fetch_and_relaxed_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1640  { /* and _Value with *_Tgt atomically with
1641  relaxed memory order */
1642 
1643  return (_INTRIN_RELAXED(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1644  }
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Fetch_and_relaxed_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2169  { /* and _Value with *_Tgt atomically with
2170  relaxed memory order */
2171 
2172  return (_INTRIN_RELAXED(_InterlockedAnd64)((volatile _LONGLONG *)_Tgt, _Value));
2173  }
#define _LONGLONG
Definition: yvals.h:579
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Fetch_and_release_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
634  { /* and _Value with *_Tgt atomically with
635  release memory order */
636 
637  return (_INTRIN_RELEASE(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
638  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_and_release_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1144  { /* and _Value with *_Tgt atomically with
1145  release memory order */
1146 
1147  return (_INTRIN_RELEASE(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1148  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Fetch_and_release_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1654  { /* and _Value with *_Tgt atomically with
1655  release memory order */
1656 
1657  return (_INTRIN_RELEASE(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1658  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Fetch_and_release_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2183  { /* and _Value with *_Tgt atomically with
2184  release memory order */
2185 
2186  return (_INTRIN_RELEASE(_InterlockedAnd64)((volatile _LONGLONG *)_Tgt, _Value));
2187  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
#define _LONGLONG
Definition: yvals.h:579
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Fetch_and_seq_cst_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
613  { /* and _Value with *_Tgt atomically with
614  sequentially consistent memory order */
615 
616  return (_INTRIN_SEQ_CST(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
617  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_and_seq_cst_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1123  { /* and _Value with *_Tgt atomically with
1124  sequentially consistent memory order */
1125 
1126  return (_INTRIN_SEQ_CST(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1127  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Fetch_and_seq_cst_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1633  { /* and _Value with *_Tgt atomically with
1634  sequentially consistent memory order */
1635 
1636  return (_INTRIN_SEQ_CST(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1637  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Fetch_and_seq_cst_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2162  { /* and _Value with *_Tgt atomically with
2163  sequentially consistent memory order */
2164 
2165  return (_INTRIN_SEQ_CST(_InterlockedAnd64)((volatile _LONGLONG *)_Tgt, _Value));
2166  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
#define _LONGLONG
Definition: yvals.h:579
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Fetch_or_acquire_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
681  { /* or _Value with *_Tgt atomically with
682  acquire memory order */
683 
684  return (_INTRIN_ACQUIRE(_InterlockedOr8)((volatile char *)_Tgt, _Value));
685  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_or_acquire_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1191  { /* or _Value with *_Tgt atomically with
1192  acquire memory order */
1193 
1194  return (_INTRIN_ACQUIRE(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1195  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Fetch_or_acquire_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1701  { /* or _Value with *_Tgt atomically with
1702  acquire memory order */
1703 
1704  return (_INTRIN_ACQUIRE(_InterlockedOr)((volatile long *)_Tgt, _Value));
1705  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Fetch_or_acquire_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2230  { /* or _Value with *_Tgt atomically with
2231  acquire memory order */
2232 
2233  return (_INTRIN_ACQUIRE(_InterlockedOr64)((volatile _LONGLONG *)_Tgt, _Value));
2234  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
#define _LONGLONG
Definition: yvals.h:579
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Fetch_or_relaxed_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
674  { /* or _Value with *_Tgt atomically with
675  relaxed memory order */
676 
677  return (_INTRIN_RELAXED(_InterlockedOr8)((volatile char *)_Tgt, _Value));
678  }
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_or_relaxed_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1184  { /* or _Value with *_Tgt atomically with
1185  relaxed memory order */
1186 
1187  return (_INTRIN_RELAXED(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1188  }
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Fetch_or_relaxed_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1694  { /* or _Value with *_Tgt atomically with
1695  relaxed memory order */
1696 
1697  return (_INTRIN_RELAXED(_InterlockedOr)((volatile long *)_Tgt, _Value));
1698  }
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Fetch_or_relaxed_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2223  { /* or _Value with *_Tgt atomically with
2224  relaxed memory order */
2225 
2226  return (_INTRIN_RELAXED(_InterlockedOr64)((volatile _LONGLONG *)_Tgt, _Value));
2227  }
#define _LONGLONG
Definition: yvals.h:579
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Fetch_or_release_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
688  { /* or _Value with *_Tgt atomically with
689  release memory order */
690 
691  return (_INTRIN_RELEASE(_InterlockedOr8)((volatile char *)_Tgt, _Value));
692  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_or_release_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1198  { /* or _Value with *_Tgt atomically with
1199  release memory order */
1200 
1201  return (_INTRIN_RELEASE(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1202  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Fetch_or_release_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1708  { /* or _Value with *_Tgt atomically with
1709  release memory order */
1710 
1711  return (_INTRIN_RELEASE(_InterlockedOr)((volatile long *)_Tgt, _Value));
1712  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Fetch_or_release_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2237  { /* or _Value with *_Tgt atomically with
2238  release memory order */
2239 
2240  return (_INTRIN_RELEASE(_InterlockedOr64)((volatile _LONGLONG *)_Tgt, _Value));
2241  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
#define _LONGLONG
Definition: yvals.h:579
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Fetch_or_seq_cst_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
667  { /* or _Value with *_Tgt atomically with
668  sequentially consistent memory order */
669 
670  return (_INTRIN_SEQ_CST(_InterlockedOr8)((volatile char *)_Tgt, _Value));
671  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_or_seq_cst_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1177  { /* or _Value with *_Tgt atomically with
1178  sequentially consistent memory order */
1179 
1180  return (_INTRIN_SEQ_CST(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1181  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Fetch_or_seq_cst_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1687  { /* or _Value with *_Tgt atomically with
1688  sequentially consistent memory order */
1689 
1690  return (_INTRIN_SEQ_CST(_InterlockedOr)((volatile long *)_Tgt, _Value));
1691  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Fetch_or_seq_cst_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2216  { /* or _Value with *_Tgt atomically with
2217  sequentially consistent memory order */
2218 
2219  return (_INTRIN_SEQ_CST(_InterlockedOr64)((volatile _LONGLONG *)_Tgt, _Value));
2220  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
#define _LONGLONG
Definition: yvals.h:579
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Fetch_xor_acquire_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
735  { /* xor _Value with *_Tgt atomically with
736  acquire memory order */
737 
738  return (_INTRIN_ACQUIRE(_InterlockedXor8)((volatile char *)_Tgt, _Value));
739  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_xor_acquire_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1245  { /* xor _Value with *_Tgt atomically with
1246  acquire memory order */
1247 
1248  return (_INTRIN_ACQUIRE(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1249  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Fetch_xor_acquire_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1755  { /* xor _Value with *_Tgt atomically with
1756  acquire memory order */
1757 
1758  return (_INTRIN_ACQUIRE(_InterlockedXor)((volatile long *)_Tgt, _Value));
1759  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Fetch_xor_acquire_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2284  { /* xor _Value with *_Tgt atomically with
2285  acquire memory order */
2286 
2287  return (_INTRIN_ACQUIRE(_InterlockedXor64)((volatile _LONGLONG *)_Tgt, _Value));
2288  }
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
#define _LONGLONG
Definition: yvals.h:579
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Fetch_xor_relaxed_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
728  { /* xor _Value with *_Tgt atomically with
729  relaxed memory order */
730 
731  return (_INTRIN_RELAXED(_InterlockedXor8)((volatile char *)_Tgt, _Value));
732  }
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_xor_relaxed_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1238  { /* xor _Value with *_Tgt atomically with
1239  relaxed memory order */
1240 
1241  return (_INTRIN_RELAXED(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1242  }
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Fetch_xor_relaxed_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1748  { /* xor _Value with *_Tgt atomically with
1749  relaxed memory order */
1750 
1751  return (_INTRIN_RELAXED(_InterlockedXor)((volatile long *)_Tgt, _Value));
1752  }
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Fetch_xor_relaxed_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2277  { /* xor _Value with *_Tgt atomically with
2278  relaxed memory order */
2279 
2280  return (_INTRIN_RELAXED(_InterlockedXor64)((volatile _LONGLONG *)_Tgt, _Value));
2281  }
#define _LONGLONG
Definition: yvals.h:579
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Fetch_xor_release_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
742  { /* xor _Value with *_Tgt atomically with
743  release memory order */
744 
745  return (_INTRIN_RELEASE(_InterlockedXor8)((volatile char *)_Tgt, _Value));
746  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_xor_release_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1252  { /* xor _Value with *_Tgt atomically with
1253  release memory order */
1254 
1255  return (_INTRIN_RELEASE(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1256  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Fetch_xor_release_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1762  { /* xor _Value with *_Tgt atomically with
1763  release memory order */
1764 
1765  return (_INTRIN_RELEASE(_InterlockedXor)((volatile long *)_Tgt, _Value));
1766  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Fetch_xor_release_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2291  { /* xor _Value with *_Tgt atomically with
2292  release memory order */
2293 
2294  return (_INTRIN_RELEASE(_InterlockedXor64)((volatile _LONGLONG *)_Tgt, _Value));
2295  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
#define _LONGLONG
Definition: yvals.h:579
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Fetch_xor_seq_cst_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
721  { /* xor _Value with *_Tgt atomically with
722  sequentially consistent memory order */
723 
724  return (_INTRIN_SEQ_CST(_InterlockedXor8)((volatile char *)_Tgt, _Value));
725  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Fetch_xor_seq_cst_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1231  { /* xor _Value with *_Tgt atomically with
1232  sequentially consistent memory order */
1233 
1234  return (_INTRIN_SEQ_CST(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1235  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Fetch_xor_seq_cst_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1741  { /* xor _Value with *_Tgt atomically with
1742  sequentially consistent memory order */
1743 
1744  return (_INTRIN_SEQ_CST(_InterlockedXor)((volatile long *)_Tgt, _Value));
1745  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Fetch_xor_seq_cst_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2270  { /* xor _Value with *_Tgt atomically with
2271  sequentially consistent memory order */
2272 
2273  return (_INTRIN_SEQ_CST(_InterlockedXor64)((volatile _LONGLONG *)_Tgt, _Value));
2274  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
#define _LONGLONG
Definition: yvals.h:579
_In_ int _Value
Definition: setjmp.h:173
_Atomic_integral_t _Inc_atomic_counter ( _Atomic_counter_t _Counter)
inline
2502  { // atomically increment counter and return result
2504  }
_Atomic_integral_t _Inc_atomic_counter_explicit(_Atomic_counter_t &_Counter, memory_order _Order)
Definition: xatomic.h:2495
Definition: xatomic0.h:21
_Atomic_integral_t _Inc_atomic_counter_explicit ( _Atomic_counter_t _Counter,
memory_order  _Order 
)
inline
2497  { // atomically increment counter and return result
2498  return (_Atomic_fetch_add_4(&_Counter, 1, _Order) + 1);
2499  }
_Uint4_t _Atomic_fetch_add_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1600
_Uint1_t _Load_acquire_1 ( volatile _Uint1_t _Tgt)
inline
359  { /* load from *_Tgt atomically with
360  acquire memory order */
361 
362  return (_Load_seq_cst_1(_Tgt));
363  }
_Uint1_t _Load_seq_cst_1(volatile _Uint1_t *_Tgt)
Definition: xatomic.h:326
_Uint2_t _Load_acquire_2 ( volatile _Uint2_t _Tgt)
inline
869  { /* load from *_Tgt atomically with
870  acquire memory order */
871 
872  return (_Load_seq_cst_2(_Tgt));
873  }
_Uint2_t _Load_seq_cst_2(volatile _Uint2_t *_Tgt)
Definition: xatomic.h:836
_Uint4_t _Load_acquire_4 ( volatile _Uint4_t _Tgt)
inline
1379  { /* load from *_Tgt atomically with
1380  acquire memory order */
1381 
1382  return (_Load_seq_cst_4(_Tgt));
1383  }
_Uint4_t _Load_seq_cst_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1346
_Uint8_t _Load_acquire_8 ( volatile _Uint8_t _Tgt)
inline
1908  { /* load from *_Tgt atomically with
1909  acquire memory order */
1910 
1911  return (_Load_seq_cst_8(_Tgt));
1912  }
_Uint8_t _Load_seq_cst_8(volatile _Uint8_t *_Tgt)
Definition: xatomic.h:1862
_Atomic_integral_t _Load_atomic_counter ( _Atomic_counter_t _Counter)
inline
2524  { // atomically load counter and return result
2526  }
_Atomic_integral_t _Load_atomic_counter_explicit(_Atomic_counter_t &_Counter, memory_order _Order)
Definition: xatomic.h:2517
Definition: xatomic0.h:21
_Atomic_integral_t _Load_atomic_counter_explicit ( _Atomic_counter_t _Counter,
memory_order  _Order 
)
inline
2519  { // atomically load counter and return result
2520  return (_Atomic_load_4(&_Counter, _Order));
2521  }
_Uint4_t _Atomic_load_4(volatile _Uint4_t *_Tgt, memory_order _Order)
Definition: xatomic.h:1385
_Uint1_t _Load_relaxed_1 ( volatile _Uint1_t _Tgt)
inline
344  { /* load from *_Tgt atomically with
345  relaxed memory order */
347 
348  #if defined(_M_ARM) || defined(_M_ARM64)
349  _Value = __iso_volatile_load8((volatile char *)_Tgt);
350 
351  #else
352  _Value = *_Tgt;
353  #endif
354 
355  return (_Value);
356  }
_STD_BEGIN typedef unsigned char _Uint1_t
Definition: xatomic.h:161
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Load_relaxed_2 ( volatile _Uint2_t _Tgt)
inline
854  { /* load from *_Tgt atomically with
855  relaxed memory order */
857 
858  #if defined(_M_ARM) || defined(_M_ARM64)
859  _Value = __iso_volatile_load16((volatile short *)_Tgt);
860 
861  #else
862  _Value = *_Tgt;
863  #endif
864 
865  return (_Value);
866  }
unsigned short _Uint2_t
Definition: xatomic.h:162
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Load_relaxed_4 ( volatile _Uint4_t _Tgt)
inline
1364  { /* load from *_Tgt atomically with
1365  relaxed memory order */
1366  _Uint4_t _Value;
1367 
1368  #if defined(_M_ARM) || defined(_M_ARM64)
1369  _Value = __iso_volatile_load32((volatile int *)_Tgt);
1370 
1371  #else
1372  _Value = *_Tgt;
1373  #endif
1374 
1375  return (_Value);
1376  }
_Uint32t _Uint4_t
Definition: xatomic0.h:24
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Load_relaxed_8 ( volatile _Uint8_t _Tgt)
inline
1887  { /* load from *_Tgt atomically with
1888  relaxed memory order */
1889  _Uint8_t _Value;
1890 
1891  #if defined(_M_X64)
1892  _Value = *_Tgt;
1893 
1894  #elif defined(_M_ARM)
1895  _Value = __ldrexd((volatile _LONGLONG *)_Tgt);
1896 
1897  #elif defined(_M_ARM64)
1898  _Value = __iso_volatile_load64((volatile _LONGLONG *)_Tgt);
1899 
1900  #else
1901  _Value = _InterlockedOr64((volatile _LONGLONG *)_Tgt, 0);
1902  #endif
1903 
1904  return (_Value);
1905  }
#define _LONGLONG
Definition: yvals.h:579
unsigned _LONGLONG _Uint8_t
Definition: xatomic.h:164
_In_ int _Value
Definition: setjmp.h:173
_Uint1_t _Load_seq_cst_1 ( volatile _Uint1_t _Tgt)
inline
327  { /* load from *_Tgt atomically with
328  sequentially consistent memory order */
330 
331  #if defined(_M_ARM) || defined(_M_ARM64)
332  _Value = __iso_volatile_load8((volatile char *)_Tgt);
333  _Memory_barrier();
334 
335  #else
336  _Value = *_Tgt;
338  #endif
339 
340  return (_Value);
341  }
#define _Compiler_barrier()
Definition: xatomic.h:21
_STD_BEGIN typedef unsigned char _Uint1_t
Definition: xatomic.h:161
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Load_seq_cst_2 ( volatile _Uint2_t _Tgt)
inline
837  { /* load from *_Tgt atomically with
838  sequentially consistent memory order */
840 
841  #if defined(_M_ARM) || defined(_M_ARM64)
842  _Value = __iso_volatile_load16((volatile short *)_Tgt);
843  _Memory_barrier();
844 
845  #else
846  _Value = *_Tgt;
848  #endif
849 
850  return (_Value);
851  }
unsigned short _Uint2_t
Definition: xatomic.h:162
#define _Compiler_barrier()
Definition: xatomic.h:21
_In_ int _Value
Definition: setjmp.h:173
_Uint4_t _Load_seq_cst_4 ( volatile _Uint4_t _Tgt)
inline
1347  { /* load from *_Tgt atomically with
1348  sequentially consistent memory order */
1349  _Uint4_t _Value;
1350 
1351  #if defined(_M_ARM) || defined(_M_ARM64)
1352  _Value = __iso_volatile_load32((volatile int *)_Tgt);
1353  _Memory_barrier();
1354 
1355  #else
1356  _Value = *_Tgt;
1358  #endif
1359 
1360  return (_Value);
1361  }
_Uint32t _Uint4_t
Definition: xatomic0.h:24
#define _Compiler_barrier()
Definition: xatomic.h:21
_In_ int _Value
Definition: setjmp.h:173
_Uint8_t _Load_seq_cst_8 ( volatile _Uint8_t _Tgt)
inline
1863  { /* load from *_Tgt atomically with
1864  sequentially consistent memory order */
1865  _Uint8_t _Value;
1866 
1867  #if defined(_M_X64)
1868  _Value = *_Tgt;
1870 
1871  #elif defined(_M_ARM)
1872  _Value = __ldrexd((volatile _LONGLONG *)_Tgt);
1873  _Memory_barrier();
1874 
1875  #elif defined(_M_ARM64)
1876  _Value = __iso_volatile_load64((volatile _LONGLONG *)_Tgt);
1877  _Memory_barrier();
1878 
1879  #else
1880  _Value = _InterlockedOr64((volatile _LONGLONG *)_Tgt, 0);
1881  #endif
1882 
1883  return (_Value);
1884  }
#define _LONGLONG
Definition: yvals.h:579
unsigned _LONGLONG _Uint8_t
Definition: xatomic.h:164
#define _Compiler_barrier()
Definition: xatomic.h:21
_In_ int _Value
Definition: setjmp.h:173
void _Lock_spin_lock ( volatile _Atomic_flag_t _Flag)
inline
2405  { /* spin until _Flag successfully set */
2408  }
#define _ATOMIC_FLAG_TEST_AND_SET
Definition: xatomic.h:166
#define _YIELD_PROCESSOR
Definition: xatomic.h:2395
Definition: xatomic0.h:18
memory_order _Memory_order_upper_bound ( memory_order  _Order1,
memory_order  _Order2 
)
inline
190  { /* find upper bound of two memory orders,
191  based on the following partial order:
192 
193  seq_cst
194  |
195  acq_rel
196  / \
197  acquire release
198  | |
199  consume |
200  \ /
201  relaxed
202 
203  */
204 
205  static const memory_order _Upper[6][6] = { /* combined upper bounds */ // TRANSITION, VSO#202551
217  memory_order_seq_cst, memory_order_seq_cst, memory_order_seq_cst
218  }
219  };
220 
221  if ((_Order1 < 0) || (6 <= _Order1)
222  || (_Order2 < 0) || (6 <= _Order2))
223  { /* launder memory order */
225  return (memory_order_seq_cst);
226  }
227  return (_Upper[_Order1][_Order2]);
228  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
memory_order
Definition: xatomic0.h:15
Definition: xatomic0.h:20
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:19
Definition: xatomic0.h:18
void _Store_relaxed_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
265  { /* store _Value atomically with relaxed memory order */
266 
267  #if defined(_M_ARM) || defined(_M_ARM64)
268  __iso_volatile_store8((volatile char *)_Tgt, _Value);
269 
270  #else
271  *_Tgt = _Value;
272  #endif
273  }
_In_ int _Value
Definition: setjmp.h:173
void _Store_relaxed_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
775  { /* store _Value atomically with relaxed memory order */
776 
777  #if defined(_M_ARM) || defined(_M_ARM64)
778  __iso_volatile_store16((volatile short *)_Tgt, _Value);
779 
780  #else
781  *_Tgt = _Value;
782  #endif
783  }
_In_ int _Value
Definition: setjmp.h:173
void _Store_relaxed_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1285  { /* store _Value atomically with relaxed memory order */
1286 
1287  #if defined(_M_ARM) || defined(_M_ARM64)
1288  __iso_volatile_store32((volatile int *)_Tgt, _Value);
1289 
1290  #else
1291  *_Tgt = _Value;
1292  #endif
1293  }
_In_ int _Value
Definition: setjmp.h:173
void _Store_relaxed_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
1795  { /* store _Value atomically with relaxed memory order */
1796 
1797  #if defined(_M_X64)
1798  *_Tgt = _Value;
1799 
1800  #elif defined(_M_ARM64)
1801  __iso_volatile_store64((volatile _LONGLONG *)_Tgt, _Value);
1802 
1803  #else
1804  _INTRIN_RELAXED(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value);
1805  #endif
1806  }
#define _LONGLONG
Definition: yvals.h:579
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_In_ int _Value
Definition: setjmp.h:173
void _Store_release_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
276  { /* store _Value atomically with release memory order */
277 
278  #if defined(_M_ARM) || defined(_M_ARM64)
279  _Memory_barrier();
280  __iso_volatile_store8((volatile char *)_Tgt, _Value);
281 
282  #else
284  *_Tgt = _Value;
285  #endif
286  }
#define _Compiler_barrier()
Definition: xatomic.h:21
_In_ int _Value
Definition: setjmp.h:173
void _Store_release_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
786  { /* store _Value atomically with release memory order */
787 
788  #if defined(_M_ARM) || defined(_M_ARM64)
789  _Memory_barrier();
790  __iso_volatile_store16((volatile short *)_Tgt, _Value);
791 
792  #else
794  *_Tgt = _Value;
795  #endif
796  }
#define _Compiler_barrier()
Definition: xatomic.h:21
_In_ int _Value
Definition: setjmp.h:173
void _Store_release_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1296  { /* store _Value atomically with release memory order */
1297 
1298  #if defined(_M_ARM) || defined(_M_ARM64)
1299  _Memory_barrier();
1300  __iso_volatile_store32((volatile int *)_Tgt, _Value);
1301 
1302  #else
1304  *_Tgt = _Value;
1305  #endif
1306  }
#define _Compiler_barrier()
Definition: xatomic.h:21
_In_ int _Value
Definition: setjmp.h:173
void _Store_release_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
1809  { /* store _Value atomically with release memory order */
1810 
1811  #if defined(_M_X64)
1813  *_Tgt = _Value;
1814 
1815  #elif defined(_M_ARM64)
1816  _Memory_barrier();
1817  __iso_volatile_store64((volatile _LONGLONG *)_Tgt, _Value);
1818 
1819  #else
1820  _INTRIN_RELEASE(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value);
1821  #endif
1822  }
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
#define _LONGLONG
Definition: yvals.h:579
#define _Compiler_barrier()
Definition: xatomic.h:21
_In_ int _Value
Definition: setjmp.h:173
void _Store_seq_cst_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
289  { /* store _Value atomically with
290  sequentially consistent memory order */
291 
292  #if defined(_M_ARM) || defined(_M_ARM64)
293  _Memory_barrier();
294  __iso_volatile_store8((volatile char *)_Tgt, _Value);
295  _Memory_barrier();
296 
297  #else
298  _INTRIN_SEQ_CST(_InterlockedExchange8)((volatile char *)_Tgt, _Value);
299  #endif
300  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
void _Store_seq_cst_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
799  { /* store _Value atomically with
800  sequentially consistent memory order */
801 
802  #if defined(_M_ARM) || defined(_M_ARM64)
803  _Memory_barrier();
804  __iso_volatile_store16((volatile short *)_Tgt, _Value);
805  _Memory_barrier();
806 
807  #else
808  _INTRIN_SEQ_CST(_InterlockedExchange16)((volatile short *)_Tgt, _Value);
809  #endif
810  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
void _Store_seq_cst_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1309  { /* store _Value atomically with
1310  sequentially consistent memory order */
1311 
1312  #if defined(_M_ARM) || defined(_M_ARM64)
1313  _Memory_barrier();
1314  __iso_volatile_store32((volatile int *)_Tgt, _Value);
1315  _Memory_barrier();
1316 
1317  #else
1318  _INTRIN_SEQ_CST(_InterlockedExchange)((volatile long *)_Tgt, _Value);
1319  #endif
1320  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
_In_ int _Value
Definition: setjmp.h:173
void _Store_seq_cst_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
1825  { /* store _Value atomically with
1826  sequentially consistent memory order */
1827 
1828  #if defined(_M_ARM64)
1829  _Memory_barrier();
1830  __iso_volatile_store64((volatile _LONGLONG *)_Tgt, _Value);
1831  _Memory_barrier();
1832 
1833  #else
1834  _INTRIN_SEQ_CST(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value);
1835  #endif
1836  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
#define _LONGLONG
Definition: yvals.h:579
_In_ int _Value
Definition: setjmp.h:173
void _Unlock_spin_lock ( volatile _Atomic_flag_t _Flag)
inline
2412  { /* release previously obtained lock */
2414  }
#define _ATOMIC_FLAG_CLEAR
Definition: xatomic.h:167
Definition: xatomic0.h:19
void _Validate_compare_exchange_memory_order ( memory_order  _Success,
memory_order  _Failure 
)
inline
232  { /* validate success/failure */
233  /* _Failure may not be memory_order_release or memory_order_acq_rel
234  and may not be stronger than _Success */
235  switch (_Failure)
236  {
238  break;
239 
241  if (_Success != memory_order_seq_cst)
243  break;
244 
246  if ((_Success == memory_order_consume) ||
247  (_Success == memory_order_relaxed))
249  break;
250 
252  if (_Success == memory_order_relaxed)
254  break;
255 
256  default:
258  break;
259  }
260  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
Definition: xatomic0.h:17
Definition: xatomic0.h:16
Definition: xatomic0.h:21
Definition: xatomic0.h:18

Variable Documentation

_STD_BEGIN typedef unsigned char _Uint1_t