STLdoc
STLdocumentation
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros
Macros | Typedefs | Functions | Variables
xatomic.h File Reference
#include <xatomic0.h>
#include <stddef.h>
#include <stdlib.h>
#include <string.h>
#include <intrin.h>
#include <xutility>

Go to the source code of this file.

Macros

#define _XATOMIC_H
 
#define _Compiler_barrier()   _ReadWriteBarrier()
 
#define _CONCATX(x, y)   x ## y
 
#define _CONCAT(x, y)   _CONCATX(x, y)
 
#define ATOMIC_BOOL_LOCK_FREE   (1 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _ATOMIC_CHAR_LOCK_FREE   (1 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _ATOMIC_CHAR16_T_LOCK_FREE   (2 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _ATOMIC_CHAR32_T_LOCK_FREE   (2 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _ATOMIC_WCHAR_T_LOCK_FREE   (_WCHAR_T_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _ATOMIC_SHORT_LOCK_FREE   (_SHORT_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _ATOMIC_INT_LOCK_FREE   (_INT_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _ATOMIC_LONG_LOCK_FREE   (_LONG_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _ATOMIC_LLONG_LOCK_FREE   (_LONGLONG_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define ATOMIC_POINTER_LOCK_FREE   (_ADDR_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
 
#define _INTRIN_RELAXED(x)   x
 
#define _INTRIN_ACQUIRE(x)   x
 
#define _INTRIN_RELEASE(x)   x
 
#define _INTRIN_SEQ_CST(x)   x
 
#define _ATOMIC_FLAG_TEST_AND_SET   _Atomic_flag_test_and_set
 
#define _ATOMIC_FLAG_CLEAR   _Atomic_flag_clear
 
#define _ATOMIC_THREAD_FENCE   _Atomic_thread_fence
 
#define _ATOMIC_SIGNAL_FENCE   _Atomic_signal_fence
 
#define _INVALID_MEMORY_ORDER
 
#define _YIELD_PROCESSOR
 

Typedefs

typedef unsigned short _Uint2_t
 
typedef unsigned _LONGLONG _Uint8_t
 

Functions

memory_order _Memory_order_upper_bound (memory_order _Order1, memory_order _Order2)
 
void _Validate_compare_exchange_memory_order (memory_order _Success, memory_order _Failure)
 
void _Store_relaxed_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
void _Store_release_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
void _Store_seq_cst_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
void _Atomic_store_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
 
_Uint1_t _Load_seq_cst_1 (volatile _Uint1_t *_Tgt)
 
_Uint1_t _Load_relaxed_1 (volatile _Uint1_t *_Tgt)
 
_Uint1_t _Load_acquire_1 (volatile _Uint1_t *_Tgt)
 
_Uint1_t _Atomic_load_1 (volatile _Uint1_t *_Tgt, memory_order _Order)
 
_Uint1_t _Exchange_seq_cst_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Exchange_relaxed_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Exchange_acquire_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Exchange_release_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Atomic_exchange_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
 
int _Compare_exchange_seq_cst_1 (volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
 
int _Compare_exchange_relaxed_1 (volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
 
int _Compare_exchange_acquire_1 (volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
 
int _Compare_exchange_release_1 (volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
 
int _Atomic_compare_exchange_strong_1 (volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value, memory_order _Order1, memory_order _Order2)
 
int _Atomic_compare_exchange_weak_1 (volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value, memory_order _Order1, memory_order _Order2)
 
_Uint1_t _Fetch_add_seq_cst_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_add_relaxed_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_add_acquire_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_add_release_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Atomic_fetch_add_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
 
_Uint1_t _Atomic_fetch_sub_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
 
_Uint1_t _Fetch_and_seq_cst_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_and_relaxed_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_and_acquire_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_and_release_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Atomic_fetch_and_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
 
_Uint1_t _Fetch_or_seq_cst_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_or_relaxed_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_or_acquire_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_or_release_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Atomic_fetch_or_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
 
_Uint1_t _Fetch_xor_seq_cst_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_xor_relaxed_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_xor_acquire_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Fetch_xor_release_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value)
 
_Uint1_t _Atomic_fetch_xor_1 (volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
 
void _Store_relaxed_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
void _Store_release_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
void _Store_seq_cst_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
void _Atomic_store_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
 
_Uint2_t _Load_seq_cst_2 (volatile _Uint2_t *_Tgt)
 
_Uint2_t _Load_relaxed_2 (volatile _Uint2_t *_Tgt)
 
_Uint2_t _Load_acquire_2 (volatile _Uint2_t *_Tgt)
 
_Uint2_t _Atomic_load_2 (volatile _Uint2_t *_Tgt, memory_order _Order)
 
_Uint2_t _Exchange_seq_cst_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Exchange_relaxed_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Exchange_acquire_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Exchange_release_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Atomic_exchange_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
 
int _Compare_exchange_seq_cst_2 (volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
 
int _Compare_exchange_relaxed_2 (volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
 
int _Compare_exchange_acquire_2 (volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
 
int _Compare_exchange_release_2 (volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
 
int _Atomic_compare_exchange_strong_2 (volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value, memory_order _Order1, memory_order _Order2)
 
int _Atomic_compare_exchange_weak_2 (volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value, memory_order _Order1, memory_order _Order2)
 
_Uint2_t _Fetch_add_seq_cst_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_add_relaxed_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_add_acquire_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_add_release_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Atomic_fetch_add_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
 
_Uint2_t _Atomic_fetch_sub_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
 
_Uint2_t _Fetch_and_seq_cst_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_and_relaxed_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_and_acquire_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_and_release_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Atomic_fetch_and_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
 
_Uint2_t _Fetch_or_seq_cst_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_or_relaxed_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_or_acquire_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_or_release_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Atomic_fetch_or_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
 
_Uint2_t _Fetch_xor_seq_cst_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_xor_relaxed_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_xor_acquire_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Fetch_xor_release_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value)
 
_Uint2_t _Atomic_fetch_xor_2 (volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
 
void _Store_relaxed_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
void _Store_release_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
void _Store_seq_cst_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
void _Atomic_store_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
 
_Uint4_t _Load_seq_cst_4 (volatile _Uint4_t *_Tgt)
 
_Uint4_t _Load_relaxed_4 (volatile _Uint4_t *_Tgt)
 
_Uint4_t _Load_acquire_4 (volatile _Uint4_t *_Tgt)
 
_Uint4_t _Atomic_load_4 (volatile _Uint4_t *_Tgt, memory_order _Order)
 
_Uint4_t _Exchange_seq_cst_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Exchange_relaxed_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Exchange_acquire_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Exchange_release_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Atomic_exchange_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
 
int _Compare_exchange_seq_cst_4 (volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
 
int _Compare_exchange_relaxed_4 (volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
 
int _Compare_exchange_acquire_4 (volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
 
int _Compare_exchange_release_4 (volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
 
int _Atomic_compare_exchange_strong_4 (volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value, memory_order _Order1, memory_order _Order2)
 
int _Atomic_compare_exchange_weak_4 (volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value, memory_order _Order1, memory_order _Order2)
 
_Uint4_t _Fetch_add_seq_cst_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_add_relaxed_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_add_acquire_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_add_release_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Atomic_fetch_add_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
 
_Uint4_t _Atomic_fetch_sub_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
 
_Uint4_t _Fetch_and_seq_cst_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_and_relaxed_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_and_acquire_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_and_release_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Atomic_fetch_and_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
 
_Uint4_t _Fetch_or_seq_cst_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_or_relaxed_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_or_acquire_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_or_release_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Atomic_fetch_or_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
 
_Uint4_t _Fetch_xor_seq_cst_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_xor_relaxed_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_xor_acquire_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Fetch_xor_release_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value)
 
_Uint4_t _Atomic_fetch_xor_4 (volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
 
void _Store_relaxed_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
void _Store_release_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
void _Store_seq_cst_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
void _Atomic_store_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
 
_Uint8_t _Load_seq_cst_8 (volatile _Uint8_t *_Tgt)
 
_Uint8_t _Load_relaxed_8 (volatile _Uint8_t *_Tgt)
 
_Uint8_t _Load_acquire_8 (volatile _Uint8_t *_Tgt)
 
_Uint8_t _Atomic_load_8 (volatile _Uint8_t *_Tgt, memory_order _Order)
 
_Uint8_t _Exchange_seq_cst_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Exchange_relaxed_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Exchange_acquire_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Exchange_release_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Atomic_exchange_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
 
int _Compare_exchange_seq_cst_8 (volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
 
int _Compare_exchange_relaxed_8 (volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
 
int _Compare_exchange_acquire_8 (volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
 
int _Compare_exchange_release_8 (volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
 
int _Atomic_compare_exchange_strong_8 (volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value, memory_order _Order1, memory_order _Order2)
 
int _Atomic_compare_exchange_weak_8 (volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value, memory_order _Order1, memory_order _Order2)
 
_Uint8_t _Fetch_add_seq_cst_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_add_relaxed_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_add_acquire_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_add_release_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Atomic_fetch_add_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
 
_Uint8_t _Atomic_fetch_sub_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
 
_Uint8_t _Fetch_and_seq_cst_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_and_relaxed_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_and_acquire_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_and_release_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Atomic_fetch_and_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
 
_Uint8_t _Fetch_or_seq_cst_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_or_relaxed_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_or_acquire_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_or_release_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Atomic_fetch_or_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
 
_Uint8_t _Fetch_xor_seq_cst_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_xor_relaxed_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_xor_acquire_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Fetch_xor_release_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value)
 
_Uint8_t _Atomic_fetch_xor_8 (volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
 
int _Atomic_flag_test_and_set (volatile _Atomic_flag_t *_Flag, memory_order _Order)
 
void _Atomic_flag_clear (volatile _Atomic_flag_t *_Flag, memory_order _Order)
 
void _Atomic_thread_fence (memory_order _Order)
 
void _Atomic_signal_fence (memory_order _Order)
 
void _Lock_spin_lock (volatile _Atomic_flag_t *_Flag)
 
void _Unlock_spin_lock (volatile _Atomic_flag_t *_Flag)
 
void _Atomic_copy (volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile const void *_Src, memory_order _Order)
 
void _Atomic_exchange (volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile void *_Src, memory_order _Order)
 
int _Atomic_compare_exchange_weak (volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src, memory_order _Order1, memory_order _Order2)
 
int _Atomic_compare_exchange_strong (volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src, memory_order _Order1, memory_order _Order2)
 
int _Atomic_is_lock_free_1 (void)
 
int _Atomic_is_lock_free_2 (void)
 
int _Atomic_is_lock_free_4 (void)
 
int _Atomic_is_lock_free_8 (void)
 

Variables

_STD_BEGIN typedef unsigned char _Uint1_t
 

Macro Definition Documentation

#define _ATOMIC_CHAR16_T_LOCK_FREE   (2 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define _ATOMIC_CHAR32_T_LOCK_FREE   (2 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define _ATOMIC_CHAR_LOCK_FREE   (1 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define _ATOMIC_FLAG_CLEAR   _Atomic_flag_clear
#define _ATOMIC_FLAG_TEST_AND_SET   _Atomic_flag_test_and_set
#define _ATOMIC_INT_LOCK_FREE   (_INT_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define _ATOMIC_LLONG_LOCK_FREE   (_LONGLONG_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define _ATOMIC_LONG_LOCK_FREE   (_LONG_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define _ATOMIC_SHORT_LOCK_FREE   (_SHORT_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define _ATOMIC_SIGNAL_FENCE   _Atomic_signal_fence
#define _ATOMIC_THREAD_FENCE   _Atomic_thread_fence
#define _ATOMIC_WCHAR_T_LOCK_FREE   (_WCHAR_T_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define _Compiler_barrier ( )    _ReadWriteBarrier()
#define _CONCAT (   x,
 
)    _CONCATX(x, y)
#define _CONCATX (   x,
 
)    x ## y
#define _INTRIN_ACQUIRE (   x)    x
#define _INTRIN_RELAXED (   x)    x
#define _INTRIN_RELEASE (   x)    x
#define _INTRIN_SEQ_CST (   x)    x
#define _INVALID_MEMORY_ORDER
#define _XATOMIC_H
#define _YIELD_PROCESSOR
#define ATOMIC_BOOL_LOCK_FREE   (1 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
#define ATOMIC_POINTER_LOCK_FREE   (_ADDR_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)

Typedef Documentation

typedef unsigned short _Uint2_t
typedef unsigned _LONGLONG _Uint8_t

Function Documentation

int _Atomic_compare_exchange_strong ( volatile _Atomic_flag_t _Flag,
size_t  _Size,
volatile void _Tgt,
volatile void _Exp,
const volatile void _Src,
memory_order  _Order1,
memory_order  _Order2 
)
inline
2520  { /* atomically compare and exchange with memory ordering */
2521  return (_Atomic_compare_exchange_weak(_Flag, _Size, _Tgt, _Exp, _Src,
2522  _Order1, _Order2));
2523  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
_In_ size_t _In_z_ const unsigned char * _Src
Definition: mbstring.h:95
int _Atomic_compare_exchange_weak(volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:2499
_Check_return_ _In_ long _Size
Definition: io.h:325
int _Atomic_compare_exchange_strong_1 ( volatile _Uint1_t _Tgt,
_Uint1_t _Exp,
_Uint1_t  _Value,
memory_order  _Order1,
memory_order  _Order2 
)
inline
570  { /* compare and exchange values atomically */
571  _Validate_compare_exchange_memory_order(_Order1, _Order2);
572 
573  switch (_Memory_order_upper_bound(_Order1, _Order2))
574  {
576  return (_Compare_exchange_relaxed_1(_Tgt, _Exp, _Value));
577 
580  return (_Compare_exchange_acquire_1(_Tgt, _Exp, _Value));
581 
583  return (_Compare_exchange_release_1(_Tgt, _Exp, _Value));
584 
587  return (_Compare_exchange_seq_cst_1(_Tgt, _Exp, _Value));
588 
589  default:
591  return (0);
592  }
593  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
int _Compare_exchange_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:486
memory_order _Memory_order_upper_bound(memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:236
int _Compare_exchange_release_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:547
Definition: xatomic0.h:24
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
int _Compare_exchange_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:507
int _Compare_exchange_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:527
void _Validate_compare_exchange_memory_order(memory_order _Success, memory_order _Failure)
Definition: xatomic.h:278
int _Atomic_compare_exchange_strong_2 ( volatile _Uint2_t _Tgt,
_Uint2_t _Exp,
_Uint2_t  _Value,
memory_order  _Order1,
memory_order  _Order2 
)
inline
1087  { /* compare and exchange values atomically */
1088  _Validate_compare_exchange_memory_order(_Order1, _Order2);
1089 
1090  switch (_Memory_order_upper_bound(_Order1, _Order2))
1091  {
1092  case memory_order_relaxed:
1093  return (_Compare_exchange_relaxed_2(_Tgt, _Exp, _Value));
1094 
1095  case memory_order_consume:
1096  case memory_order_acquire:
1097  return (_Compare_exchange_acquire_2(_Tgt, _Exp, _Value));
1098 
1099  case memory_order_release:
1100  return (_Compare_exchange_release_2(_Tgt, _Exp, _Value));
1101 
1102  case memory_order_acq_rel:
1103  case memory_order_seq_cst:
1104  return (_Compare_exchange_seq_cst_2(_Tgt, _Exp, _Value));
1105 
1106  default:
1108  return (0);
1109  }
1110  }
int _Compare_exchange_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:1003
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
int _Compare_exchange_release_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:1064
int _Compare_exchange_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:1024
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
memory_order _Memory_order_upper_bound(memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:236
int _Compare_exchange_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:1044
Definition: xatomic0.h:24
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
void _Validate_compare_exchange_memory_order(memory_order _Success, memory_order _Failure)
Definition: xatomic.h:278
int _Atomic_compare_exchange_strong_4 ( volatile _Uint4_t _Tgt,
_Uint4_t _Exp,
_Uint4_t  _Value,
memory_order  _Order1,
memory_order  _Order2 
)
inline
1604  { /* compare and exchange values atomically */
1605  _Validate_compare_exchange_memory_order(_Order1, _Order2);
1606 
1607  switch (_Memory_order_upper_bound(_Order1, _Order2))
1608  {
1609  case memory_order_relaxed:
1610  return (_Compare_exchange_relaxed_4(_Tgt, _Exp, _Value));
1611 
1612  case memory_order_consume:
1613  case memory_order_acquire:
1614  return (_Compare_exchange_acquire_4(_Tgt, _Exp, _Value));
1615 
1616  case memory_order_release:
1617  return (_Compare_exchange_release_4(_Tgt, _Exp, _Value));
1618 
1619  case memory_order_acq_rel:
1620  case memory_order_seq_cst:
1621  return (_Compare_exchange_seq_cst_4(_Tgt, _Exp, _Value));
1622 
1623  default:
1625  return (0);
1626  }
1627  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
int _Compare_exchange_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1561
memory_order _Memory_order_upper_bound(memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:236
int _Compare_exchange_release_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1581
int _Compare_exchange_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1520
int _Compare_exchange_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1541
Definition: xatomic0.h:24
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
void _Validate_compare_exchange_memory_order(memory_order _Success, memory_order _Failure)
Definition: xatomic.h:278
int _Atomic_compare_exchange_strong_8 ( volatile _Uint8_t _Tgt,
_Uint8_t _Exp,
_Uint8_t  _Value,
memory_order  _Order1,
memory_order  _Order2 
)
inline
2118  { /* compare and exchange values atomically */
2119  _Validate_compare_exchange_memory_order(_Order1, _Order2);
2120 
2121  switch (_Memory_order_upper_bound(_Order1, _Order2))
2122  {
2123  case memory_order_relaxed:
2124  return (_Compare_exchange_relaxed_8(_Tgt, _Exp, _Value));
2125 
2126  case memory_order_consume:
2127  case memory_order_acquire:
2128  return (_Compare_exchange_acquire_8(_Tgt, _Exp, _Value));
2129 
2130  case memory_order_release:
2131  return (_Compare_exchange_release_8(_Tgt, _Exp, _Value));
2132 
2133  case memory_order_acq_rel:
2134  case memory_order_seq_cst:
2135  return (_Compare_exchange_seq_cst_8(_Tgt, _Exp, _Value));
2136 
2137  default:
2139  return (0);
2140  }
2141  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
memory_order _Memory_order_upper_bound(memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:236
int _Compare_exchange_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:2034
int _Compare_exchange_release_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:2095
Definition: xatomic0.h:24
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
int _Compare_exchange_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:2055
int _Compare_exchange_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:2075
void _Validate_compare_exchange_memory_order(memory_order _Success, memory_order _Failure)
Definition: xatomic.h:278
int _Atomic_compare_exchange_weak ( volatile _Atomic_flag_t _Flag,
size_t  _Size,
volatile void _Tgt,
volatile void _Exp,
const volatile void _Src,
memory_order  _Order1,
memory_order  _Order2 
)
inline
2503  { /* atomically compare and exchange with memory ordering */
2504  int _Result;
2505 
2506  _Lock_spin_lock(_Flag);
2507  _Result = memcmp((const void *)_Tgt, (const void *)_Exp, _Size) == 0;
2508  if (_Result != 0)
2509  memcpy((void *)_Tgt, (void *)_Src, _Size);
2510  else
2511  memcpy((void *)_Exp, (void *)_Tgt, _Size);
2512  _Unlock_spin_lock(_Flag);
2513  return (_Result);
2514  }
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
_In_ size_t _In_z_ const unsigned char * _Src
Definition: mbstring.h:95
void _Lock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2456
void _Unlock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2463
_Check_return_ _In_ long _Size
Definition: io.h:325
_Check_return_ int __cdecl memcmp(_In_reads_bytes_(_Size) const void *_Buf1, _In_reads_bytes_(_Size) const void *_Buf2, _In_ size_t _Size)
int _Atomic_compare_exchange_weak_1 ( volatile _Uint1_t _Tgt,
_Uint1_t _Exp,
_Uint1_t  _Value,
memory_order  _Order1,
memory_order  _Order2 
)
inline
598  { /* compare and exchange values atomically */
599  /* No weak compare-exchange is currently available,
600  even for ARM, so fall back to strong */
602  _Order1, _Order2));
603  }
int _Atomic_compare_exchange_strong_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:567
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
int _Atomic_compare_exchange_weak_2 ( volatile _Uint2_t _Tgt,
_Uint2_t _Exp,
_Uint2_t  _Value,
memory_order  _Order1,
memory_order  _Order2 
)
inline
1115  { /* compare and exchange values atomically */
1116  /* No weak compare-exchange is currently available,
1117  even for ARM, so fall back to strong */
1119  _Order1, _Order2));
1120  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
int _Atomic_compare_exchange_strong_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:1084
int _Atomic_compare_exchange_weak_4 ( volatile _Uint4_t _Tgt,
_Uint4_t _Exp,
_Uint4_t  _Value,
memory_order  _Order1,
memory_order  _Order2 
)
inline
1632  { /* compare and exchange values atomically */
1633  /* No weak compare-exchange is currently available,
1634  even for ARM, so fall back to strong */
1636  _Order1, _Order2));
1637  }
int _Atomic_compare_exchange_strong_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:1601
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
int _Atomic_compare_exchange_weak_8 ( volatile _Uint8_t _Tgt,
_Uint8_t _Exp,
_Uint8_t  _Value,
memory_order  _Order1,
memory_order  _Order2 
)
inline
2146  { /* compare and exchange values atomically */
2147  /* No weak compare-exchange is currently available,
2148  even for ARM, so fall back to strong */
2150  _Order1, _Order2));
2151  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
int _Atomic_compare_exchange_strong_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:2115
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
void _Atomic_copy ( volatile _Atomic_flag_t _Flag,
size_t  _Size,
volatile void _Tgt,
volatile const void _Src,
memory_order  _Order 
)
inline
2475  { /* atomically copy *_Src to *_Tgt with memory ordering */
2476  _Lock_spin_lock(_Flag);
2477  memcpy((void *)_Tgt, (void *)_Src, _Size);
2478  _Unlock_spin_lock(_Flag);
2479  }
_In_ size_t _In_z_ const unsigned char * _Src
Definition: mbstring.h:95
void _Lock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2456
void _Unlock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2463
_Check_return_ _In_ long _Size
Definition: io.h:325
void _Atomic_exchange ( volatile _Atomic_flag_t _Flag,
size_t  _Size,
volatile void _Tgt,
volatile void _Src,
memory_order  _Order 
)
inline
2485  { /* atomically swap *_Src and *_Tgt with memory ordering */
2486  unsigned char *_Left = (unsigned char *)_Tgt;
2487  unsigned char *_Right = (unsigned char *)_Src;
2488 
2489  _Lock_spin_lock(_Flag);
2490  for (; 0 < _Size; --_Size)
2491  { /* copy bytes */
2492  unsigned char _Tmp = *_Left;
2493  *_Left++ = *_Right;
2494  *_Right++ = _Tmp;
2495  }
2496  _Unlock_spin_lock(_Flag);
2497  }
_In_ size_t _In_z_ const unsigned char * _Src
Definition: mbstring.h:95
void _Lock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2456
void _Unlock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2463
_Check_return_ _In_ long _Size
Definition: io.h:325
const _Ty & _Right
Definition: algorithm:4087
_Uint1_t _Atomic_exchange_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value,
memory_order  _Order 
)
inline
462  { /* exchange _Value and *_Tgt atomically */
463  switch (_Order)
464  {
466  return (_Exchange_relaxed_1(_Tgt, _Value));
467 
470  return (_Exchange_acquire_1(_Tgt, _Value));
471 
473  return (_Exchange_release_1(_Tgt, _Value));
474 
477  return (_Exchange_seq_cst_1(_Tgt, _Value));
478 
479  default:
481  return (0);
482  }
483  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_Uint1_t _Exchange_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:446
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint1_t _Exchange_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:439
Definition: xatomic0.h:24
_Uint1_t _Exchange_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:453
Definition: xatomic0.h:21
Definition: xatomic0.h:20
_Uint1_t _Exchange_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:432
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint2_t _Atomic_exchange_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value,
memory_order  _Order 
)
inline
979  { /* exchange _Value and *_Tgt atomically */
980  switch (_Order)
981  {
983  return (_Exchange_relaxed_2(_Tgt, _Value));
984 
987  return (_Exchange_acquire_2(_Tgt, _Value));
988 
990  return (_Exchange_release_2(_Tgt, _Value));
991 
994  return (_Exchange_seq_cst_2(_Tgt, _Value));
995 
996  default:
998  return (0);
999  }
1000  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint2_t _Exchange_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:970
_Uint2_t _Exchange_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:963
Definition: xatomic0.h:24
Definition: xatomic0.h:21
_Uint2_t _Exchange_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:949
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
_Uint2_t _Exchange_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:956
Definition: xatomic0.h:22
_Uint4_t _Atomic_exchange_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value,
memory_order  _Order 
)
inline
1496  { /* exchange _Value and *_Tgt atomically */
1497  switch (_Order)
1498  {
1499  case memory_order_relaxed:
1500  return (_Exchange_relaxed_4(_Tgt, _Value));
1501 
1502  case memory_order_consume:
1503  case memory_order_acquire:
1504  return (_Exchange_acquire_4(_Tgt, _Value));
1505 
1506  case memory_order_release:
1507  return (_Exchange_release_4(_Tgt, _Value));
1508 
1509  case memory_order_acq_rel:
1510  case memory_order_seq_cst:
1511  return (_Exchange_seq_cst_4(_Tgt, _Value));
1512 
1513  default:
1515  return (0);
1516  }
1517  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint4_t _Exchange_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1466
_Uint4_t _Exchange_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1487
_Uint4_t _Exchange_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1473
Definition: xatomic0.h:24
Definition: xatomic0.h:21
_Uint4_t _Exchange_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1480
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint8_t _Atomic_exchange_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value,
memory_order  _Order 
)
inline
2010  { /* exchange _Value and *_Tgt atomically */
2011  switch (_Order)
2012  {
2013  case memory_order_relaxed:
2014  return (_Exchange_relaxed_8(_Tgt, _Value));
2015 
2016  case memory_order_consume:
2017  case memory_order_acquire:
2018  return (_Exchange_acquire_8(_Tgt, _Value));
2019 
2020  case memory_order_release:
2021  return (_Exchange_release_8(_Tgt, _Value));
2022 
2023  case memory_order_acq_rel:
2024  case memory_order_seq_cst:
2025  return (_Exchange_seq_cst_8(_Tgt, _Value));
2026 
2027  default:
2029  return (0);
2030  }
2031  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_Uint8_t _Exchange_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1994
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
Definition: xatomic0.h:24
_Uint8_t _Exchange_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2001
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint8_t _Exchange_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1987
_Uint8_t _Exchange_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1980
_Uint1_t _Atomic_fetch_add_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value,
memory_order  _Order 
)
inline
636  { /* add _Value to *_Tgt atomically */
637  switch (_Order)
638  {
640  return (_Fetch_add_relaxed_1(_Tgt, _Value));
641 
644  return (_Fetch_add_acquire_1(_Tgt, _Value));
645 
647  return (_Fetch_add_release_1(_Tgt, _Value));
648 
651  return (_Fetch_add_seq_cst_1(_Tgt, _Value));
652 
653  default:
655  return (0);
656  }
657  }
_Uint1_t _Fetch_add_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:627
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_Uint1_t _Fetch_add_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:613
_Uint1_t _Fetch_add_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:620
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
Definition: xatomic0.h:24
Definition: xatomic0.h:21
_Uint1_t _Fetch_add_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:606
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint2_t _Atomic_fetch_add_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value,
memory_order  _Order 
)
inline
1153  { /* add _Value to *_Tgt atomically */
1154  switch (_Order)
1155  {
1156  case memory_order_relaxed:
1157  return (_Fetch_add_relaxed_2(_Tgt, _Value));
1158 
1159  case memory_order_consume:
1160  case memory_order_acquire:
1161  return (_Fetch_add_acquire_2(_Tgt, _Value));
1162 
1163  case memory_order_release:
1164  return (_Fetch_add_release_2(_Tgt, _Value));
1165 
1166  case memory_order_acq_rel:
1167  case memory_order_seq_cst:
1168  return (_Fetch_add_seq_cst_2(_Tgt, _Value));
1169 
1170  default:
1172  return (0);
1173  }
1174  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint2_t _Fetch_add_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1137
Definition: xatomic0.h:24
_Uint2_t _Fetch_add_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1130
Definition: xatomic0.h:21
_Uint2_t _Fetch_add_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1123
Definition: xatomic0.h:20
_Uint2_t _Fetch_add_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1144
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint4_t _Atomic_fetch_add_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value,
memory_order  _Order 
)
inline
1670  { /* add _Value to *_Tgt atomically */
1671  switch (_Order)
1672  {
1673  case memory_order_relaxed:
1674  return (_Fetch_add_relaxed_4(_Tgt, _Value));
1675 
1676  case memory_order_consume:
1677  case memory_order_acquire:
1678  return (_Fetch_add_acquire_4(_Tgt, _Value));
1679 
1680  case memory_order_release:
1681  return (_Fetch_add_release_4(_Tgt, _Value));
1682 
1683  case memory_order_acq_rel:
1684  case memory_order_seq_cst:
1685  return (_Fetch_add_seq_cst_4(_Tgt, _Value));
1686 
1687  default:
1689  return (0);
1690  }
1691  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint4_t _Fetch_add_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1661
Definition: xatomic0.h:24
_Uint4_t _Fetch_add_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1640
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint4_t _Fetch_add_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1654
_Uint4_t _Fetch_add_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1647
_Uint8_t _Atomic_fetch_add_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value,
memory_order  _Order 
)
inline
2184  { /* add _Value to *_Tgt atomically */
2185  switch (_Order)
2186  {
2187  case memory_order_relaxed:
2188  return (_Fetch_add_relaxed_8(_Tgt, _Value));
2189 
2190  case memory_order_consume:
2191  case memory_order_acquire:
2192  return (_Fetch_add_acquire_8(_Tgt, _Value));
2193 
2194  case memory_order_release:
2195  return (_Fetch_add_release_8(_Tgt, _Value));
2196 
2197  case memory_order_acq_rel:
2198  case memory_order_seq_cst:
2199  return (_Fetch_add_seq_cst_8(_Tgt, _Value));
2200 
2201  default:
2203  return (0);
2204  }
2205  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint8_t _Fetch_add_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2168
_Uint8_t _Fetch_add_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2175
_Uint8_t _Fetch_add_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2161
Definition: xatomic0.h:24
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint8_t _Fetch_add_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2154
_Uint1_t _Atomic_fetch_and_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value,
memory_order  _Order 
)
inline
696  { /* and _Value with *_Tgt atomically */
697  switch (_Order)
698  {
700  return (_Fetch_and_relaxed_1(_Tgt, _Value));
701 
704  return (_Fetch_and_acquire_1(_Tgt, _Value));
705 
707  return (_Fetch_and_release_1(_Tgt, _Value));
708 
711  return (_Fetch_and_seq_cst_1(_Tgt, _Value));
712 
713  default:
715  return (0);
716  }
717  }
_Uint1_t _Fetch_and_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:673
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_Uint1_t _Fetch_and_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:680
_Uint1_t _Fetch_and_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:687
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
Definition: xatomic0.h:24
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint1_t _Fetch_and_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:666
_Uint2_t _Atomic_fetch_and_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value,
memory_order  _Order 
)
inline
1213  { /* and _Value with *_Tgt atomically */
1214  switch (_Order)
1215  {
1216  case memory_order_relaxed:
1217  return (_Fetch_and_relaxed_2(_Tgt, _Value));
1218 
1219  case memory_order_consume:
1220  case memory_order_acquire:
1221  return (_Fetch_and_acquire_2(_Tgt, _Value));
1222 
1223  case memory_order_release:
1224  return (_Fetch_and_release_2(_Tgt, _Value));
1225 
1226  case memory_order_acq_rel:
1227  case memory_order_seq_cst:
1228  return (_Fetch_and_seq_cst_2(_Tgt, _Value));
1229 
1230  default:
1232  return (0);
1233  }
1234  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_Uint2_t _Fetch_and_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1190
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint2_t _Fetch_and_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1183
_Uint2_t _Fetch_and_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1204
Definition: xatomic0.h:24
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint2_t _Fetch_and_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1197
_Uint4_t _Atomic_fetch_and_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value,
memory_order  _Order 
)
inline
1730  { /* and _Value with *_Tgt atomically */
1731  switch (_Order)
1732  {
1733  case memory_order_relaxed:
1734  return (_Fetch_and_relaxed_4(_Tgt, _Value));
1735 
1736  case memory_order_consume:
1737  case memory_order_acquire:
1738  return (_Fetch_and_acquire_4(_Tgt, _Value));
1739 
1740  case memory_order_release:
1741  return (_Fetch_and_release_4(_Tgt, _Value));
1742 
1743  case memory_order_acq_rel:
1744  case memory_order_seq_cst:
1745  return (_Fetch_and_seq_cst_4(_Tgt, _Value));
1746 
1747  default:
1749  return (0);
1750  }
1751  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint4_t _Fetch_and_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1700
Definition: xatomic0.h:24
Definition: xatomic0.h:21
Definition: xatomic0.h:20
_Uint4_t _Fetch_and_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1721
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint4_t _Fetch_and_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1714
_Uint4_t _Fetch_and_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1707
_Uint8_t _Atomic_fetch_and_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value,
memory_order  _Order 
)
inline
2244  { /* and _Value with *_Tgt atomically */
2245  switch (_Order)
2246  {
2247  case memory_order_relaxed:
2248  return (_Fetch_and_relaxed_8(_Tgt, _Value));
2249 
2250  case memory_order_consume:
2251  case memory_order_acquire:
2252  return (_Fetch_and_acquire_8(_Tgt, _Value));
2253 
2254  case memory_order_release:
2255  return (_Fetch_and_release_8(_Tgt, _Value));
2256 
2257  case memory_order_acq_rel:
2258  case memory_order_seq_cst:
2259  return (_Fetch_and_seq_cst_8(_Tgt, _Value));
2260 
2261  default:
2263  return (0);
2264  }
2265  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint8_t _Fetch_and_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2228
_Uint8_t _Fetch_and_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2221
Definition: xatomic0.h:24
_Uint8_t _Fetch_and_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2235
Definition: xatomic0.h:21
_Uint8_t _Fetch_and_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2214
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint1_t _Atomic_fetch_or_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value,
memory_order  _Order 
)
inline
750  { /* or _Value with *_Tgt atomically */
751  switch (_Order)
752  {
754  return (_Fetch_or_relaxed_1(_Tgt, _Value));
755 
758  return (_Fetch_or_acquire_1(_Tgt, _Value));
759 
761  return (_Fetch_or_release_1(_Tgt, _Value));
762 
765  return (_Fetch_or_seq_cst_1(_Tgt, _Value));
766 
767  default:
769  return (0);
770  }
771  }
_Uint1_t _Fetch_or_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:741
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_Uint1_t _Fetch_or_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:734
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint1_t _Fetch_or_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:727
Definition: xatomic0.h:24
_Uint1_t _Fetch_or_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:720
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint2_t _Atomic_fetch_or_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value,
memory_order  _Order 
)
inline
1267  { /* or _Value with *_Tgt atomically */
1268  switch (_Order)
1269  {
1270  case memory_order_relaxed:
1271  return (_Fetch_or_relaxed_2(_Tgt, _Value));
1272 
1273  case memory_order_consume:
1274  case memory_order_acquire:
1275  return (_Fetch_or_acquire_2(_Tgt, _Value));
1276 
1277  case memory_order_release:
1278  return (_Fetch_or_release_2(_Tgt, _Value));
1279 
1280  case memory_order_acq_rel:
1281  case memory_order_seq_cst:
1282  return (_Fetch_or_seq_cst_2(_Tgt, _Value));
1283 
1284  default:
1286  return (0);
1287  }
1288  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_Uint2_t _Fetch_or_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1251
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint2_t _Fetch_or_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1244
Definition: xatomic0.h:24
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint2_t _Fetch_or_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1258
_Uint2_t _Fetch_or_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1237
_Uint4_t _Atomic_fetch_or_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value,
memory_order  _Order 
)
inline
1784  { /* or _Value with *_Tgt atomically */
1785  switch (_Order)
1786  {
1787  case memory_order_relaxed:
1788  return (_Fetch_or_relaxed_4(_Tgt, _Value));
1789 
1790  case memory_order_consume:
1791  case memory_order_acquire:
1792  return (_Fetch_or_acquire_4(_Tgt, _Value));
1793 
1794  case memory_order_release:
1795  return (_Fetch_or_release_4(_Tgt, _Value));
1796 
1797  case memory_order_acq_rel:
1798  case memory_order_seq_cst:
1799  return (_Fetch_or_seq_cst_4(_Tgt, _Value));
1800 
1801  default:
1803  return (0);
1804  }
1805  }
_Uint4_t _Fetch_or_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1761
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_Uint4_t _Fetch_or_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1775
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint4_t _Fetch_or_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1754
Definition: xatomic0.h:24
_Uint4_t _Fetch_or_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1768
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint8_t _Atomic_fetch_or_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value,
memory_order  _Order 
)
inline
2298  { /* or _Value with *_Tgt atomically */
2299  switch (_Order)
2300  {
2301  case memory_order_relaxed:
2302  return (_Fetch_or_relaxed_8(_Tgt, _Value));
2303 
2304  case memory_order_consume:
2305  case memory_order_acquire:
2306  return (_Fetch_or_acquire_8(_Tgt, _Value));
2307 
2308  case memory_order_release:
2309  return (_Fetch_or_release_8(_Tgt, _Value));
2310 
2311  case memory_order_acq_rel:
2312  case memory_order_seq_cst:
2313  return (_Fetch_or_seq_cst_8(_Tgt, _Value));
2314 
2315  default:
2317  return (0);
2318  }
2319  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_Uint8_t _Fetch_or_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2268
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint8_t _Fetch_or_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2275
_Uint8_t _Fetch_or_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2282
Definition: xatomic0.h:24
Definition: xatomic0.h:21
_Uint8_t _Fetch_or_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2289
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint1_t _Atomic_fetch_sub_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value,
memory_order  _Order 
)
inline
661  { /* subtract _Value from *_Tgt atomically */
662  return (_Atomic_fetch_add_1(_Tgt, 0 - _Value, _Order));
663  }
_Uint1_t _Atomic_fetch_add_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:634
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_Uint2_t _Atomic_fetch_sub_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value,
memory_order  _Order 
)
inline
1178  { /* subtract _Value from *_Tgt atomically */
1179  return (_Atomic_fetch_add_2(_Tgt, 0 - _Value, _Order));
1180  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_Uint2_t _Atomic_fetch_add_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:1151
_Uint4_t _Atomic_fetch_sub_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value,
memory_order  _Order 
)
inline
1695  { /* subtract _Value from *_Tgt atomically */
1696  return (_Atomic_fetch_add_4(_Tgt, 0 - _Value, _Order));
1697  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_Uint4_t _Atomic_fetch_add_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1668
_Uint8_t _Atomic_fetch_sub_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value,
memory_order  _Order 
)
inline
2209  { /* subtract _Value from *_Tgt atomically */
2210  return (_Atomic_fetch_add_8(_Tgt, 0 - _Value, _Order));
2211  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_Uint8_t _Atomic_fetch_add_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2182
_Uint1_t _Atomic_fetch_xor_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value,
memory_order  _Order 
)
inline
804  { /* xor _Value with *_Tgt atomically */
805  switch (_Order)
806  {
808  return (_Fetch_xor_relaxed_1(_Tgt, _Value));
809 
812  return (_Fetch_xor_acquire_1(_Tgt, _Value));
813 
815  return (_Fetch_xor_release_1(_Tgt, _Value));
816 
819  return (_Fetch_xor_seq_cst_1(_Tgt, _Value));
820 
821  default:
823  return (0);
824  }
825  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint1_t _Fetch_xor_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:781
_Uint1_t _Fetch_xor_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:774
_Uint1_t _Fetch_xor_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:795
Definition: xatomic0.h:24
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint1_t _Fetch_xor_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:788
_Uint2_t _Atomic_fetch_xor_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value,
memory_order  _Order 
)
inline
1321  { /* xor _Value with *_Tgt atomically */
1322  switch (_Order)
1323  {
1324  case memory_order_relaxed:
1325  return (_Fetch_xor_relaxed_2(_Tgt, _Value));
1326 
1327  case memory_order_consume:
1328  case memory_order_acquire:
1329  return (_Fetch_xor_acquire_2(_Tgt, _Value));
1330 
1331  case memory_order_release:
1332  return (_Fetch_xor_release_2(_Tgt, _Value));
1333 
1334  case memory_order_acq_rel:
1335  case memory_order_seq_cst:
1336  return (_Fetch_xor_seq_cst_2(_Tgt, _Value));
1337 
1338  default:
1340  return (0);
1341  }
1342  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint2_t _Fetch_xor_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1291
Definition: xatomic0.h:24
Definition: xatomic0.h:21
_Uint2_t _Fetch_xor_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1305
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint2_t _Fetch_xor_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1298
_Uint2_t _Fetch_xor_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1312
_Uint4_t _Atomic_fetch_xor_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value,
memory_order  _Order 
)
inline
1838  { /* xor _Value with *_Tgt atomically */
1839  switch (_Order)
1840  {
1841  case memory_order_relaxed:
1842  return (_Fetch_xor_relaxed_4(_Tgt, _Value));
1843 
1844  case memory_order_consume:
1845  case memory_order_acquire:
1846  return (_Fetch_xor_acquire_4(_Tgt, _Value));
1847 
1848  case memory_order_release:
1849  return (_Fetch_xor_release_4(_Tgt, _Value));
1850 
1851  case memory_order_acq_rel:
1852  case memory_order_seq_cst:
1853  return (_Fetch_xor_seq_cst_4(_Tgt, _Value));
1854 
1855  default:
1857  return (0);
1858  }
1859  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_Uint4_t _Fetch_xor_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1829
_Uint4_t _Fetch_xor_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1808
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint4_t _Fetch_xor_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1822
_Uint4_t _Fetch_xor_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1815
Definition: xatomic0.h:24
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint8_t _Atomic_fetch_xor_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value,
memory_order  _Order 
)
inline
2352  { /* xor _Value with *_Tgt atomically */
2353  switch (_Order)
2354  {
2355  case memory_order_relaxed:
2356  return (_Fetch_xor_relaxed_8(_Tgt, _Value));
2357 
2358  case memory_order_consume:
2359  case memory_order_acquire:
2360  return (_Fetch_xor_acquire_8(_Tgt, _Value));
2361 
2362  case memory_order_release:
2363  return (_Fetch_xor_release_8(_Tgt, _Value));
2364 
2365  case memory_order_acq_rel:
2366  case memory_order_seq_cst:
2367  return (_Fetch_xor_seq_cst_8(_Tgt, _Value));
2368 
2369  default:
2371  return (0);
2372  }
2373  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint8_t _Fetch_xor_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2329
_Uint8_t _Fetch_xor_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2322
Definition: xatomic0.h:24
Definition: xatomic0.h:21
_Uint8_t _Fetch_xor_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2336
_Uint8_t _Fetch_xor_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2343
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
void _Atomic_flag_clear ( volatile _Atomic_flag_t _Flag,
memory_order  _Order 
)
inline
2402  { /* atomically clear flag */
2403  static_assert(sizeof(_Atomic_flag_t) == sizeof(_Uint4_t),
2404  "Unexpected _Atomic_flag_t size");
2405 
2406  switch (_Order)
2407  {
2408  case memory_order_relaxed:
2409  case memory_order_release:
2410  case memory_order_seq_cst:
2411  _Atomic_store_4((volatile _Uint4_t *)_Flag, 0, _Order);
2412  break;
2413 
2414  default:
2416  break;
2417  }
2418  }
void _Atomic_store_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1381
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
long _Atomic_flag_t
Definition: xatomic0.h:70
_Uint32t _Uint4_t
Definition: xatomic0.h:28
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
int _Atomic_flag_test_and_set ( volatile _Atomic_flag_t _Flag,
memory_order  _Order 
)
inline
2377  { /* atomically test flag and set to true */
2378  switch (_Order)
2379  {
2380  case memory_order_relaxed:
2381  return (_INTRIN_RELAXED(_interlockedbittestandset)(_Flag, 0));
2382 
2383  case memory_order_consume:
2384  case memory_order_acquire:
2385  return (_INTRIN_ACQUIRE(_interlockedbittestandset)(_Flag, 0));
2386 
2387  case memory_order_release:
2388  return (_INTRIN_RELEASE(_interlockedbittestandset)(_Flag, 0));
2389 
2390  case memory_order_acq_rel:
2391  case memory_order_seq_cst:
2392  return (_INTRIN_SEQ_CST(_interlockedbittestandset)(_Flag, 0));
2393 
2394  default:
2396  return (0);
2397  }
2398  }
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
Definition: xatomic0.h:24
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
int _Atomic_is_lock_free_1 ( void  )
inline
2527  { /* return true if 1-byte atomic values are lock-free */
2528  return (1 <= _ATOMIC_MAXBYTES_LOCK_FREE);
2529  }
#define _ATOMIC_MAXBYTES_LOCK_FREE
Definition: xatomic0.h:72
int _Atomic_is_lock_free_2 ( void  )
inline
2532  { /* return true if 2-byte atomic values are lock-free */
2533  return (2 <= _ATOMIC_MAXBYTES_LOCK_FREE);
2534  }
#define _ATOMIC_MAXBYTES_LOCK_FREE
Definition: xatomic0.h:72
int _Atomic_is_lock_free_4 ( void  )
inline
2537  { /* return true if 4-byte atomic values are lock-free */
2538  return (4 <= _ATOMIC_MAXBYTES_LOCK_FREE);
2539  }
#define _ATOMIC_MAXBYTES_LOCK_FREE
Definition: xatomic0.h:72
int _Atomic_is_lock_free_8 ( void  )
inline
2542  { /* return true if 8-byte atomic values are lock-free */
2543  return (8 <= _ATOMIC_MAXBYTES_LOCK_FREE);
2544  }
#define _ATOMIC_MAXBYTES_LOCK_FREE
Definition: xatomic0.h:72
_Uint1_t _Atomic_load_1 ( volatile _Uint1_t _Tgt,
memory_order  _Order 
)
inline
412  { /* load from *_Tgt atomically */
413  switch (_Order)
414  {
416  return (_Load_relaxed_1(_Tgt));
417 
420  return (_Load_acquire_1(_Tgt));
421 
423  return (_Load_seq_cst_1(_Tgt));
424 
425  default:
427  return (0);
428  }
429  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint1_t _Load_acquire_1(volatile _Uint1_t *_Tgt)
Definition: xatomic.h:403
_Uint1_t _Load_seq_cst_1(volatile _Uint1_t *_Tgt)
Definition: xatomic.h:371
_Uint1_t _Load_relaxed_1(volatile _Uint1_t *_Tgt)
Definition: xatomic.h:388
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:22
_Uint2_t _Atomic_load_2 ( volatile _Uint2_t _Tgt,
memory_order  _Order 
)
inline
929  { /* load from *_Tgt atomically */
930  switch (_Order)
931  {
933  return (_Load_relaxed_2(_Tgt));
934 
937  return (_Load_acquire_2(_Tgt));
938 
940  return (_Load_seq_cst_2(_Tgt));
941 
942  default:
944  return (0);
945  }
946  }
_Uint2_t _Load_relaxed_2(volatile _Uint2_t *_Tgt)
Definition: xatomic.h:905
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:22
_Uint2_t _Load_acquire_2(volatile _Uint2_t *_Tgt)
Definition: xatomic.h:920
_Uint2_t _Load_seq_cst_2(volatile _Uint2_t *_Tgt)
Definition: xatomic.h:888
_Uint4_t _Atomic_load_4 ( volatile _Uint4_t _Tgt,
memory_order  _Order 
)
inline
1446  { /* load from *_Tgt atomically */
1447  switch (_Order)
1448  {
1449  case memory_order_relaxed:
1450  return (_Load_relaxed_4(_Tgt));
1451 
1452  case memory_order_consume:
1453  case memory_order_acquire:
1454  return (_Load_acquire_4(_Tgt));
1455 
1456  case memory_order_seq_cst:
1457  return (_Load_seq_cst_4(_Tgt));
1458 
1459  default:
1461  return (0);
1462  }
1463  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint4_t _Load_relaxed_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1422
_Uint4_t _Load_seq_cst_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1405
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:22
_Uint4_t _Load_acquire_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1437
_Uint8_t _Atomic_load_8 ( volatile _Uint8_t _Tgt,
memory_order  _Order 
)
inline
1960  { /* load from *_Tgt atomically */
1961  switch (_Order)
1962  {
1963  case memory_order_relaxed:
1964  return (_Load_relaxed_8(_Tgt));
1965 
1966  case memory_order_consume:
1967  case memory_order_acquire:
1968  return (_Load_acquire_8(_Tgt));
1969 
1970  case memory_order_seq_cst:
1971  return (_Load_seq_cst_8(_Tgt));
1972 
1973  default:
1975  return (0);
1976  }
1977  }
_Uint8_t _Load_relaxed_8(volatile _Uint8_t *_Tgt)
Definition: xatomic.h:1933
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint8_t _Load_seq_cst_8(volatile _Uint8_t *_Tgt)
Definition: xatomic.h:1913
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
_Uint8_t _Load_acquire_8(volatile _Uint8_t *_Tgt)
Definition: xatomic.h:1951
Definition: xatomic0.h:22
void _Atomic_signal_fence ( memory_order  _Order)
inline
2440  { /* inhibit compiler reordering */
2442  }
#define _Compiler_barrier()
Definition: xatomic.h:29
void _Atomic_store_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value,
memory_order  _Order 
)
inline
349  { /* store _Value atomically */
350  switch (_Order)
351  {
353  _Store_relaxed_1(_Tgt, _Value);
354  break;
355 
357  _Store_release_1(_Tgt, _Value);
358  break;
359 
361  _Store_seq_cst_1(_Tgt, _Value);
362  break;
363 
364  default:
366  break;
367  }
368  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
void _Store_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:321
void _Store_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:311
void _Store_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:333
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
void _Atomic_store_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value,
memory_order  _Order 
)
inline
866  { /* store _Value atomically */
867  switch (_Order)
868  {
870  _Store_relaxed_2(_Tgt, _Value);
871  break;
872 
874  _Store_release_2(_Tgt, _Value);
875  break;
876 
878  _Store_seq_cst_2(_Tgt, _Value);
879  break;
880 
881  default:
883  break;
884  }
885  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
void _Store_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:838
void _Store_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:850
void _Store_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:828
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
void _Atomic_store_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value,
memory_order  _Order 
)
inline
1383  { /* store _Value atomically */
1384  switch (_Order)
1385  {
1386  case memory_order_relaxed:
1387  _Store_relaxed_4(_Tgt, _Value);
1388  break;
1389 
1390  case memory_order_release:
1391  _Store_release_4(_Tgt, _Value);
1392  break;
1393 
1394  case memory_order_seq_cst:
1395  _Store_seq_cst_4(_Tgt, _Value);
1396  break;
1397 
1398  default:
1400  break;
1401  }
1402  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
void _Store_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1345
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
void _Store_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1355
void _Store_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1367
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
void _Atomic_store_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value,
memory_order  _Order 
)
inline
1891  { /* store _Value atomically */
1892  switch (_Order)
1893  {
1894  case memory_order_relaxed:
1895  _Store_relaxed_8(_Tgt, _Value);
1896  break;
1897 
1898  case memory_order_release:
1899  _Store_release_8(_Tgt, _Value);
1900  break;
1901 
1902  case memory_order_seq_cst:
1903  _Store_seq_cst_8(_Tgt, _Value);
1904  break;
1905 
1906  default:
1908  break;
1909  }
1910  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
void _Store_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1872
void _Store_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1862
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
void _Store_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1883
void _Atomic_thread_fence ( memory_order  _Order)
inline
2421  { /* force memory visibility and inhibit compiler reordering */
2422  #if defined(_M_ARM)
2423  if (_Order != memory_order_relaxed)
2424  {
2425  _Memory_barrier();
2426  }
2427 
2428  #else
2430  if (_Order == memory_order_seq_cst)
2431  { /* force visibility */
2432  static _Uint4_t _Guard;
2435  }
2436  #endif
2437  }
_Uint4_t _Atomic_exchange_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1494
_Uint32t _Uint4_t
Definition: xatomic0.h:28
Definition: xatomic0.h:20
Definition: xatomic0.h:25
#define _Compiler_barrier()
Definition: xatomic.h:29
int _Compare_exchange_acquire_1 ( volatile _Uint1_t _Tgt,
_Uint1_t _Exp,
_Uint1_t  _Value 
)
inline
529  { /* compare and exchange values atomically with
530  acquire memory order */
531  int _Res;
532 
533  _Uint1_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange8)((volatile char *)_Tgt,
534  _Value, *_Exp);
535 
536  if (_Prev == *_Exp)
537  _Res = 1;
538  else
539  { /* copy old value */
540  _Res = 0;
541  *_Exp = _Prev;
542  }
543 
544  return (_Res);
545  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
_STD_BEGIN typedef unsigned char _Uint1_t
Definition: xatomic.h:209
int _Compare_exchange_acquire_2 ( volatile _Uint2_t _Tgt,
_Uint2_t _Exp,
_Uint2_t  _Value 
)
inline
1046  { /* compare and exchange values atomically with
1047  acquire memory order */
1048  int _Res;
1049 
1050  _Uint2_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange16)((volatile short *)_Tgt,
1051  _Value, *_Exp);
1052 
1053  if (_Prev == *_Exp)
1054  _Res = 1;
1055  else
1056  { /* copy old value */
1057  _Res = 0;
1058  *_Exp = _Prev;
1059  }
1060 
1061  return (_Res);
1062  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
unsigned short _Uint2_t
Definition: xatomic.h:210
int _Compare_exchange_acquire_4 ( volatile _Uint4_t _Tgt,
_Uint4_t _Exp,
_Uint4_t  _Value 
)
inline
1563  { /* compare and exchange values atomically with
1564  acquire memory order */
1565  int _Res;
1566 
1567  _Uint4_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange)((volatile long *)_Tgt,
1568  _Value, *_Exp);
1569 
1570  if (_Prev == *_Exp)
1571  _Res = 1;
1572  else
1573  { /* copy old value */
1574  _Res = 0;
1575  *_Exp = _Prev;
1576  }
1577 
1578  return (_Res);
1579  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
_Uint32t _Uint4_t
Definition: xatomic0.h:28
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
long __cdecl _InterlockedCompareExchange(long volatile *, long, long)
int _Compare_exchange_acquire_8 ( volatile _Uint8_t _Tgt,
_Uint8_t _Exp,
_Uint8_t  _Value 
)
inline
2077  { /* compare and exchange values atomically with
2078  acquire memory order */
2079  int _Res;
2080 
2081  _Uint8_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange64)((volatile _LONGLONG *)_Tgt,
2082  _Value, *_Exp);
2083 
2084  if (_Prev == *_Exp)
2085  _Res = 1;
2086  else
2087  { /* copy old value */
2088  _Res = 0;
2089  *_Exp = _Prev;
2090  }
2091 
2092  return (_Res);
2093  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
#define _LONGLONG
Definition: yvals.h:574
unsigned _LONGLONG _Uint8_t
Definition: xatomic.h:212
int _Compare_exchange_relaxed_1 ( volatile _Uint1_t _Tgt,
_Uint1_t _Exp,
_Uint1_t  _Value 
)
inline
509  { /* compare and exchange values atomically with
510  relaxed memory order */
511  int _Res;
512 
513  _Uint1_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange8)((volatile char *)_Tgt,
514  _Value, *_Exp);
515 
516  if (_Prev == *_Exp)
517  _Res = 1;
518  else
519  { /* copy old value */
520  _Res = 0;
521  *_Exp = _Prev;
522  }
523 
524  return (_Res);
525  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
_STD_BEGIN typedef unsigned char _Uint1_t
Definition: xatomic.h:209
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
int _Compare_exchange_relaxed_2 ( volatile _Uint2_t _Tgt,
_Uint2_t _Exp,
_Uint2_t  _Value 
)
inline
1026  { /* compare and exchange values atomically with
1027  relaxed memory order */
1028  int _Res;
1029 
1030  _Uint2_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange16)((volatile short *)_Tgt,
1031  _Value, *_Exp);
1032 
1033  if (_Prev == *_Exp)
1034  _Res = 1;
1035  else
1036  { /* copy old value */
1037  _Res = 0;
1038  *_Exp = _Prev;
1039  }
1040 
1041  return (_Res);
1042  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
unsigned short _Uint2_t
Definition: xatomic.h:210
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
int _Compare_exchange_relaxed_4 ( volatile _Uint4_t _Tgt,
_Uint4_t _Exp,
_Uint4_t  _Value 
)
inline
1543  { /* compare and exchange values atomically with
1544  relaxed memory order */
1545  int _Res;
1546 
1547  _Uint4_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange)((volatile long *)_Tgt,
1548  _Value, *_Exp);
1549 
1550  if (_Prev == *_Exp)
1551  _Res = 1;
1552  else
1553  { /* copy old value */
1554  _Res = 0;
1555  *_Exp = _Prev;
1556  }
1557 
1558  return (_Res);
1559  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
_Uint32t _Uint4_t
Definition: xatomic0.h:28
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
long __cdecl _InterlockedCompareExchange(long volatile *, long, long)
int _Compare_exchange_relaxed_8 ( volatile _Uint8_t _Tgt,
_Uint8_t _Exp,
_Uint8_t  _Value 
)
inline
2057  { /* compare and exchange values atomically with
2058  relaxed memory order */
2059  int _Res;
2060 
2061  _Uint8_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange64)((volatile _LONGLONG *)_Tgt,
2062  _Value, *_Exp);
2063 
2064  if (_Prev == *_Exp)
2065  _Res = 1;
2066  else
2067  { /* copy old value */
2068  _Res = 0;
2069  *_Exp = _Prev;
2070  }
2071 
2072  return (_Res);
2073  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _LONGLONG
Definition: yvals.h:574
unsigned _LONGLONG _Uint8_t
Definition: xatomic.h:212
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
int _Compare_exchange_release_1 ( volatile _Uint1_t _Tgt,
_Uint1_t _Exp,
_Uint1_t  _Value 
)
inline
549  { /* compare and exchange values atomically with
550  release memory order */
551  int _Res;
552 
553  _Uint1_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange8)((volatile char *)_Tgt,
554  _Value, *_Exp);
555 
556  if (_Prev == *_Exp)
557  _Res = 1;
558  else
559  { /* copy old value */
560  _Res = 0;
561  *_Exp = _Prev;
562  }
563 
564  return (_Res);
565  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_STD_BEGIN typedef unsigned char _Uint1_t
Definition: xatomic.h:209
int _Compare_exchange_release_2 ( volatile _Uint2_t _Tgt,
_Uint2_t _Exp,
_Uint2_t  _Value 
)
inline
1066  { /* compare and exchange values atomically with
1067  release memory order */
1068  int _Res;
1069 
1070  _Uint2_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange16)((volatile short *)_Tgt,
1071  _Value, *_Exp);
1072 
1073  if (_Prev == *_Exp)
1074  _Res = 1;
1075  else
1076  { /* copy old value */
1077  _Res = 0;
1078  *_Exp = _Prev;
1079  }
1080 
1081  return (_Res);
1082  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
unsigned short _Uint2_t
Definition: xatomic.h:210
int _Compare_exchange_release_4 ( volatile _Uint4_t _Tgt,
_Uint4_t _Exp,
_Uint4_t  _Value 
)
inline
1583  { /* compare and exchange values atomically with
1584  release memory order */
1585  int _Res;
1586 
1587  _Uint4_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange)((volatile long *)_Tgt,
1588  _Value, *_Exp);
1589 
1590  if (_Prev == *_Exp)
1591  _Res = 1;
1592  else
1593  { /* copy old value */
1594  _Res = 0;
1595  *_Exp = _Prev;
1596  }
1597 
1598  return (_Res);
1599  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_Uint32t _Uint4_t
Definition: xatomic0.h:28
long __cdecl _InterlockedCompareExchange(long volatile *, long, long)
int _Compare_exchange_release_8 ( volatile _Uint8_t _Tgt,
_Uint8_t _Exp,
_Uint8_t  _Value 
)
inline
2097  { /* compare and exchange values atomically with
2098  release memory order */
2099  int _Res;
2100 
2101  _Uint8_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange64)((volatile _LONGLONG *)_Tgt,
2102  _Value, *_Exp);
2103 
2104  if (_Prev == *_Exp)
2105  _Res = 1;
2106  else
2107  { /* copy old value */
2108  _Res = 0;
2109  *_Exp = _Prev;
2110  }
2111 
2112  return (_Res);
2113  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
#define _LONGLONG
Definition: yvals.h:574
unsigned _LONGLONG _Uint8_t
Definition: xatomic.h:212
int _Compare_exchange_seq_cst_1 ( volatile _Uint1_t _Tgt,
_Uint1_t _Exp,
_Uint1_t  _Value 
)
inline
488  { /* compare and exchange values atomically with
489  sequentially consistent memory order */
490 
491  int _Res;
492 
493  _Uint1_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange8)((volatile char *)_Tgt,
494  _Value, *_Exp);
495 
496  if (_Prev == *_Exp)
497  _Res = 1;
498  else
499  { /* copy old value */
500  _Res = 0;
501  *_Exp = _Prev;
502  }
503 
504  return (_Res);
505  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
_STD_BEGIN typedef unsigned char _Uint1_t
Definition: xatomic.h:209
int _Compare_exchange_seq_cst_2 ( volatile _Uint2_t _Tgt,
_Uint2_t _Exp,
_Uint2_t  _Value 
)
inline
1005  { /* compare and exchange values atomically with
1006  sequentially consistent memory order */
1007 
1008  int _Res;
1009 
1010  _Uint2_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange16)((volatile short *)_Tgt,
1011  _Value, *_Exp);
1012 
1013  if (_Prev == *_Exp)
1014  _Res = 1;
1015  else
1016  { /* copy old value */
1017  _Res = 0;
1018  *_Exp = _Prev;
1019  }
1020 
1021  return (_Res);
1022  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
unsigned short _Uint2_t
Definition: xatomic.h:210
int _Compare_exchange_seq_cst_4 ( volatile _Uint4_t _Tgt,
_Uint4_t _Exp,
_Uint4_t  _Value 
)
inline
1522  { /* compare and exchange values atomically with
1523  sequentially consistent memory order */
1524 
1525  int _Res;
1526 
1527  _Uint4_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange)((volatile long *)_Tgt,
1528  _Value, *_Exp);
1529 
1530  if (_Prev == *_Exp)
1531  _Res = 1;
1532  else
1533  { /* copy old value */
1534  _Res = 0;
1535  *_Exp = _Prev;
1536  }
1537 
1538  return (_Res);
1539  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
_Uint32t _Uint4_t
Definition: xatomic0.h:28
long __cdecl _InterlockedCompareExchange(long volatile *, long, long)
int _Compare_exchange_seq_cst_8 ( volatile _Uint8_t _Tgt,
_Uint8_t _Exp,
_Uint8_t  _Value 
)
inline
2036  { /* compare and exchange values atomically with
2037  sequentially consistent memory order */
2038 
2039  int _Res;
2040 
2041  _Uint8_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange64)((volatile _LONGLONG *)_Tgt,
2042  _Value, *_Exp);
2043 
2044  if (_Prev == *_Exp)
2045  _Res = 1;
2046  else
2047  { /* copy old value */
2048  _Res = 0;
2049  *_Exp = _Prev;
2050  }
2051 
2052  return (_Res);
2053  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
#define _LONGLONG
Definition: yvals.h:574
unsigned _LONGLONG _Uint8_t
Definition: xatomic.h:212
_Uint1_t _Exchange_acquire_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
447  { /* exchange _Value and *_Tgt atomically with
448  acquire memory order */
449 
450  return (_INTRIN_ACQUIRE(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
451  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
_Uint2_t _Exchange_acquire_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
964  { /* exchange _Value and *_Tgt atomically with
965  acquire memory order */
966 
967  return (_INTRIN_ACQUIRE(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
968  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
_Uint4_t _Exchange_acquire_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1481  { /* exchange _Value and *_Tgt atomically with
1482  acquire memory order */
1483 
1484  return (_INTRIN_ACQUIRE(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1485  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
_Uint8_t _Exchange_acquire_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
1995  { /* exchange _Value and *_Tgt atomically with
1996  acquire memory order */
1997 
1998  return (_INTRIN_ACQUIRE(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value));
1999  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
#define _LONGLONG
Definition: yvals.h:574
_Uint1_t _Exchange_relaxed_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
440  { /* exchange _Value and *_Tgt atomically with
441  relaxed memory order */
442 
443  return (_INTRIN_RELAXED(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
444  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint2_t _Exchange_relaxed_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
957  { /* exchange _Value and *_Tgt atomically with
958  relaxed memory order */
959 
960  return (_INTRIN_RELAXED(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
961  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint4_t _Exchange_relaxed_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1474  { /* exchange _Value and *_Tgt atomically with
1475  relaxed memory order */
1476 
1477  return (_INTRIN_RELAXED(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1478  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint8_t _Exchange_relaxed_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
1988  { /* exchange _Value and *_Tgt atomically with
1989  relaxed memory order */
1990 
1991  return (_INTRIN_RELAXED(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value));
1992  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _LONGLONG
Definition: yvals.h:574
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint1_t _Exchange_release_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
454  { /* exchange _Value and *_Tgt atomically with
455  release memory order */
456 
457  return (_INTRIN_RELEASE(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
458  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_Uint2_t _Exchange_release_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
971  { /* exchange _Value and *_Tgt atomically with
972  release memory order */
973 
974  return (_INTRIN_RELEASE(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
975  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_Uint4_t _Exchange_release_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1488  { /* exchange _Value and *_Tgt atomically with
1489  release memory order */
1490 
1491  return (_INTRIN_RELEASE(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1492  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_Uint8_t _Exchange_release_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2002  { /* exchange _Value and *_Tgt atomically with
2003  release memory order */
2004 
2005  return (_INTRIN_RELEASE(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value));
2006  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
#define _LONGLONG
Definition: yvals.h:574
_Uint1_t _Exchange_seq_cst_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
433  { /* exchange _Value and *_Tgt atomically with
434  sequentially consistent memory order */
435 
436  return (_INTRIN_SEQ_CST(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
437  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
_Uint2_t _Exchange_seq_cst_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
950  { /* exchange _Value and *_Tgt atomically with
951  sequentially consistent memory order */
952 
953  return (_INTRIN_SEQ_CST(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
954  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
_Uint4_t _Exchange_seq_cst_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1467  { /* exchange _Value and *_Tgt atomically with
1468  sequentially consistent memory order */
1469 
1470  return (_INTRIN_SEQ_CST(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1471  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
_Uint8_t _Exchange_seq_cst_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
1981  { /* exchange _Value and *_Tgt atomically with
1982  sequentially consistent memory order */
1983 
1984  return (_INTRIN_SEQ_CST(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value));
1985  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
#define _LONGLONG
Definition: yvals.h:574
_Uint1_t _Fetch_add_acquire_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
621  { /* add _Value to *_Tgt atomically with
622  acquire memory order */
623 
624  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
625  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
_Uint2_t _Fetch_add_acquire_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1138  { /* add _Value to *_Tgt atomically with
1139  acquire memory order */
1140 
1141  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1142  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
_Uint4_t _Fetch_add_acquire_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1655  { /* add _Value to *_Tgt atomically with
1656  acquire memory order */
1657 
1658  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1659  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
_Uint8_t _Fetch_add_acquire_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2169  { /* add _Value to *_Tgt atomically with
2170  acquire memory order */
2171 
2172  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd64)((volatile _LONGLONG *)_Tgt, _Value));
2173  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
#define _LONGLONG
Definition: yvals.h:574
_Uint1_t _Fetch_add_relaxed_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
614  { /* add _Value to *_Tgt atomically with
615  relaxed memory order */
616 
617  return (_INTRIN_RELAXED(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
618  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint2_t _Fetch_add_relaxed_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1131  { /* add _Value to *_Tgt atomically with
1132  relaxed memory order */
1133 
1134  return (_INTRIN_RELAXED(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1135  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint4_t _Fetch_add_relaxed_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1648  { /* add _Value to *_Tgt atomically with
1649  relaxed memory order */
1650 
1651  return (_INTRIN_RELAXED(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1652  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint8_t _Fetch_add_relaxed_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2162  { /* add _Value to *_Tgt atomically with
2163  relaxed memory order */
2164 
2165  return (_INTRIN_RELAXED(_InterlockedExchangeAdd64)((volatile _LONGLONG *)_Tgt, _Value));
2166  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _LONGLONG
Definition: yvals.h:574
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint1_t _Fetch_add_release_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
628  { /* add _Value to *_Tgt atomically with
629  release memory order */
630 
631  return (_INTRIN_RELEASE(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
632  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_Uint2_t _Fetch_add_release_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1145  { /* add _Value to *_Tgt atomically with
1146  release memory order */
1147 
1148  return (_INTRIN_RELEASE(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1149  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_Uint4_t _Fetch_add_release_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1662  { /* add _Value to *_Tgt atomically with
1663  release memory order */
1664 
1665  return (_INTRIN_RELEASE(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1666  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_Uint8_t _Fetch_add_release_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2176  { /* add _Value to *_Tgt atomically with
2177  release memory order */
2178 
2179  return (_INTRIN_RELEASE(_InterlockedExchangeAdd64)((volatile _LONGLONG *)_Tgt, _Value));
2180  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
#define _LONGLONG
Definition: yvals.h:574
_Uint1_t _Fetch_add_seq_cst_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
607  { /* add _Value to *_Tgt atomically with
608  sequentially consistent memory order */
609 
610  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
611  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
_Uint2_t _Fetch_add_seq_cst_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1124  { /* add _Value to *_Tgt atomically with
1125  sequentially consistent memory order */
1126 
1127  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1128  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
_Uint4_t _Fetch_add_seq_cst_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1641  { /* add _Value to *_Tgt atomically with
1642  sequentially consistent memory order */
1643 
1644  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1645  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
_Uint8_t _Fetch_add_seq_cst_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2155  { /* add _Value to *_Tgt atomically with
2156  sequentially consistent memory order */
2157 
2158  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd64)((volatile _LONGLONG *)_Tgt, _Value));
2159  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
#define _LONGLONG
Definition: yvals.h:574
_Uint1_t _Fetch_and_acquire_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
681  { /* and _Value with *_Tgt atomically with
682  acquire memory order */
683 
684  return (_INTRIN_ACQUIRE(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
685  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
_Uint2_t _Fetch_and_acquire_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1198  { /* and _Value with *_Tgt atomically with
1199  acquire memory order */
1200 
1201  return (_INTRIN_ACQUIRE(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1202  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
_Uint4_t _Fetch_and_acquire_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1715  { /* and _Value with *_Tgt atomically with
1716  acquire memory order */
1717 
1718  return (_INTRIN_ACQUIRE(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1719  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
_Uint8_t _Fetch_and_acquire_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2229  { /* and _Value with *_Tgt atomically with
2230  acquire memory order */
2231 
2232  return (_INTRIN_ACQUIRE(_InterlockedAnd64)((volatile _LONGLONG *)_Tgt, _Value));
2233  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
#define _LONGLONG
Definition: yvals.h:574
_Uint1_t _Fetch_and_relaxed_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
674  { /* and _Value with *_Tgt atomically with
675  relaxed memory order */
676 
677  return (_INTRIN_RELAXED(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
678  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint2_t _Fetch_and_relaxed_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1191  { /* and _Value with *_Tgt atomically with
1192  relaxed memory order */
1193 
1194  return (_INTRIN_RELAXED(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1195  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint4_t _Fetch_and_relaxed_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1708  { /* and _Value with *_Tgt atomically with
1709  relaxed memory order */
1710 
1711  return (_INTRIN_RELAXED(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1712  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint8_t _Fetch_and_relaxed_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2222  { /* and _Value with *_Tgt atomically with
2223  relaxed memory order */
2224 
2225  return (_INTRIN_RELAXED(_InterlockedAnd64)((volatile _LONGLONG *)_Tgt, _Value));
2226  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _LONGLONG
Definition: yvals.h:574
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint1_t _Fetch_and_release_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
688  { /* and _Value with *_Tgt atomically with
689  release memory order */
690 
691  return (_INTRIN_RELEASE(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
692  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_Uint2_t _Fetch_and_release_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1205  { /* and _Value with *_Tgt atomically with
1206  release memory order */
1207 
1208  return (_INTRIN_RELEASE(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1209  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_Uint4_t _Fetch_and_release_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1722  { /* and _Value with *_Tgt atomically with
1723  release memory order */
1724 
1725  return (_INTRIN_RELEASE(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1726  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_Uint8_t _Fetch_and_release_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2236  { /* and _Value with *_Tgt atomically with
2237  release memory order */
2238 
2239  return (_INTRIN_RELEASE(_InterlockedAnd64)((volatile _LONGLONG *)_Tgt, _Value));
2240  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
#define _LONGLONG
Definition: yvals.h:574
_Uint1_t _Fetch_and_seq_cst_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
667  { /* and _Value with *_Tgt atomically with
668  sequentially consistent memory order */
669 
670  return (_INTRIN_SEQ_CST(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
671  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
_Uint2_t _Fetch_and_seq_cst_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1184  { /* and _Value with *_Tgt atomically with
1185  sequentially consistent memory order */
1186 
1187  return (_INTRIN_SEQ_CST(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1188  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
_Uint4_t _Fetch_and_seq_cst_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1701  { /* and _Value with *_Tgt atomically with
1702  sequentially consistent memory order */
1703 
1704  return (_INTRIN_SEQ_CST(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1705  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
_Uint8_t _Fetch_and_seq_cst_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2215  { /* and _Value with *_Tgt atomically with
2216  sequentially consistent memory order */
2217 
2218  return (_INTRIN_SEQ_CST(_InterlockedAnd64)((volatile _LONGLONG *)_Tgt, _Value));
2219  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
#define _LONGLONG
Definition: yvals.h:574
_Uint1_t _Fetch_or_acquire_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
735  { /* or _Value with *_Tgt atomically with
736  acquire memory order */
737 
738  return (_INTRIN_ACQUIRE(_InterlockedOr8)((volatile char *)_Tgt, _Value));
739  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
_Uint2_t _Fetch_or_acquire_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1252  { /* or _Value with *_Tgt atomically with
1253  acquire memory order */
1254 
1255  return (_INTRIN_ACQUIRE(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1256  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
_Uint4_t _Fetch_or_acquire_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1769  { /* or _Value with *_Tgt atomically with
1770  acquire memory order */
1771 
1772  return (_INTRIN_ACQUIRE(_InterlockedOr)((volatile long *)_Tgt, _Value));
1773  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
_Uint8_t _Fetch_or_acquire_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2283  { /* or _Value with *_Tgt atomically with
2284  acquire memory order */
2285 
2286  return (_INTRIN_ACQUIRE(_InterlockedOr64)((volatile _LONGLONG *)_Tgt, _Value));
2287  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
#define _LONGLONG
Definition: yvals.h:574
_Uint1_t _Fetch_or_relaxed_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
728  { /* or _Value with *_Tgt atomically with
729  relaxed memory order */
730 
731  return (_INTRIN_RELAXED(_InterlockedOr8)((volatile char *)_Tgt, _Value));
732  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint2_t _Fetch_or_relaxed_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1245  { /* or _Value with *_Tgt atomically with
1246  relaxed memory order */
1247 
1248  return (_INTRIN_RELAXED(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1249  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint4_t _Fetch_or_relaxed_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1762  { /* or _Value with *_Tgt atomically with
1763  relaxed memory order */
1764 
1765  return (_INTRIN_RELAXED(_InterlockedOr)((volatile long *)_Tgt, _Value));
1766  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint8_t _Fetch_or_relaxed_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2276  { /* or _Value with *_Tgt atomically with
2277  relaxed memory order */
2278 
2279  return (_INTRIN_RELAXED(_InterlockedOr64)((volatile _LONGLONG *)_Tgt, _Value));
2280  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _LONGLONG
Definition: yvals.h:574
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint1_t _Fetch_or_release_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
742  { /* or _Value with *_Tgt atomically with
743  release memory order */
744 
745  return (_INTRIN_RELEASE(_InterlockedOr8)((volatile char *)_Tgt, _Value));
746  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_Uint2_t _Fetch_or_release_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1259  { /* or _Value with *_Tgt atomically with
1260  release memory order */
1261 
1262  return (_INTRIN_RELEASE(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1263  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_Uint4_t _Fetch_or_release_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1776  { /* or _Value with *_Tgt atomically with
1777  release memory order */
1778 
1779  return (_INTRIN_RELEASE(_InterlockedOr)((volatile long *)_Tgt, _Value));
1780  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_Uint8_t _Fetch_or_release_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2290  { /* or _Value with *_Tgt atomically with
2291  release memory order */
2292 
2293  return (_INTRIN_RELEASE(_InterlockedOr64)((volatile _LONGLONG *)_Tgt, _Value));
2294  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
#define _LONGLONG
Definition: yvals.h:574
_Uint1_t _Fetch_or_seq_cst_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
721  { /* or _Value with *_Tgt atomically with
722  sequentially consistent memory order */
723 
724  return (_INTRIN_SEQ_CST(_InterlockedOr8)((volatile char *)_Tgt, _Value));
725  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
_Uint2_t _Fetch_or_seq_cst_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1238  { /* or _Value with *_Tgt atomically with
1239  sequentially consistent memory order */
1240 
1241  return (_INTRIN_SEQ_CST(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1242  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
_Uint4_t _Fetch_or_seq_cst_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1755  { /* or _Value with *_Tgt atomically with
1756  sequentially consistent memory order */
1757 
1758  return (_INTRIN_SEQ_CST(_InterlockedOr)((volatile long *)_Tgt, _Value));
1759  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
_Uint8_t _Fetch_or_seq_cst_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2269  { /* or _Value with *_Tgt atomically with
2270  sequentially consistent memory order */
2271 
2272  return (_INTRIN_SEQ_CST(_InterlockedOr64)((volatile _LONGLONG *)_Tgt, _Value));
2273  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
#define _LONGLONG
Definition: yvals.h:574
_Uint1_t _Fetch_xor_acquire_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
789  { /* xor _Value with *_Tgt atomically with
790  acquire memory order */
791 
792  return (_INTRIN_ACQUIRE(_InterlockedXor8)((volatile char *)_Tgt, _Value));
793  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
_Uint2_t _Fetch_xor_acquire_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1306  { /* xor _Value with *_Tgt atomically with
1307  acquire memory order */
1308 
1309  return (_INTRIN_ACQUIRE(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1310  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
_Uint4_t _Fetch_xor_acquire_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1823  { /* xor _Value with *_Tgt atomically with
1824  acquire memory order */
1825 
1826  return (_INTRIN_ACQUIRE(_InterlockedXor)((volatile long *)_Tgt, _Value));
1827  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
_Uint8_t _Fetch_xor_acquire_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2337  { /* xor _Value with *_Tgt atomically with
2338  acquire memory order */
2339 
2340  return (_INTRIN_ACQUIRE(_InterlockedXor64)((volatile _LONGLONG *)_Tgt, _Value));
2341  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
#define _LONGLONG
Definition: yvals.h:574
_Uint1_t _Fetch_xor_relaxed_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
782  { /* xor _Value with *_Tgt atomically with
783  relaxed memory order */
784 
785  return (_INTRIN_RELAXED(_InterlockedXor8)((volatile char *)_Tgt, _Value));
786  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint2_t _Fetch_xor_relaxed_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1299  { /* xor _Value with *_Tgt atomically with
1300  relaxed memory order */
1301 
1302  return (_INTRIN_RELAXED(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1303  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint4_t _Fetch_xor_relaxed_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1816  { /* xor _Value with *_Tgt atomically with
1817  relaxed memory order */
1818 
1819  return (_INTRIN_RELAXED(_InterlockedXor)((volatile long *)_Tgt, _Value));
1820  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint8_t _Fetch_xor_relaxed_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2330  { /* xor _Value with *_Tgt atomically with
2331  relaxed memory order */
2332 
2333  return (_INTRIN_RELAXED(_InterlockedXor64)((volatile _LONGLONG *)_Tgt, _Value));
2334  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _LONGLONG
Definition: yvals.h:574
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint1_t _Fetch_xor_release_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
796  { /* xor _Value with *_Tgt atomically with
797  release memory order */
798 
799  return (_INTRIN_RELEASE(_InterlockedXor8)((volatile char *)_Tgt, _Value));
800  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_Uint2_t _Fetch_xor_release_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1313  { /* xor _Value with *_Tgt atomically with
1314  release memory order */
1315 
1316  return (_INTRIN_RELEASE(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1317  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_Uint4_t _Fetch_xor_release_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1830  { /* xor _Value with *_Tgt atomically with
1831  release memory order */
1832 
1833  return (_INTRIN_RELEASE(_InterlockedXor)((volatile long *)_Tgt, _Value));
1834  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_Uint8_t _Fetch_xor_release_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2344  { /* xor _Value with *_Tgt atomically with
2345  release memory order */
2346 
2347  return (_INTRIN_RELEASE(_InterlockedXor64)((volatile _LONGLONG *)_Tgt, _Value));
2348  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
#define _LONGLONG
Definition: yvals.h:574
_Uint1_t _Fetch_xor_seq_cst_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
775  { /* xor _Value with *_Tgt atomically with
776  sequentially consistent memory order */
777 
778  return (_INTRIN_SEQ_CST(_InterlockedXor8)((volatile char *)_Tgt, _Value));
779  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
_Uint2_t _Fetch_xor_seq_cst_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
1292  { /* xor _Value with *_Tgt atomically with
1293  sequentially consistent memory order */
1294 
1295  return (_INTRIN_SEQ_CST(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1296  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
_Uint4_t _Fetch_xor_seq_cst_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1809  { /* xor _Value with *_Tgt atomically with
1810  sequentially consistent memory order */
1811 
1812  return (_INTRIN_SEQ_CST(_InterlockedXor)((volatile long *)_Tgt, _Value));
1813  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
_Uint8_t _Fetch_xor_seq_cst_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
2323  { /* xor _Value with *_Tgt atomically with
2324  sequentially consistent memory order */
2325 
2326  return (_INTRIN_SEQ_CST(_InterlockedXor64)((volatile _LONGLONG *)_Tgt, _Value));
2327  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
#define _LONGLONG
Definition: yvals.h:574
_Uint1_t _Load_acquire_1 ( volatile _Uint1_t _Tgt)
inline
404  { /* load from *_Tgt atomically with
405  acquire memory order */
406 
407  return (_Load_seq_cst_1(_Tgt));
408  }
_Uint1_t _Load_seq_cst_1(volatile _Uint1_t *_Tgt)
Definition: xatomic.h:371
_Uint2_t _Load_acquire_2 ( volatile _Uint2_t _Tgt)
inline
921  { /* load from *_Tgt atomically with
922  acquire memory order */
923 
924  return (_Load_seq_cst_2(_Tgt));
925  }
_Uint2_t _Load_seq_cst_2(volatile _Uint2_t *_Tgt)
Definition: xatomic.h:888
_Uint4_t _Load_acquire_4 ( volatile _Uint4_t _Tgt)
inline
1438  { /* load from *_Tgt atomically with
1439  acquire memory order */
1440 
1441  return (_Load_seq_cst_4(_Tgt));
1442  }
_Uint4_t _Load_seq_cst_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1405
_Uint8_t _Load_acquire_8 ( volatile _Uint8_t _Tgt)
inline
1952  { /* load from *_Tgt atomically with
1953  acquire memory order */
1954 
1955  return (_Load_seq_cst_8(_Tgt));
1956  }
_Uint8_t _Load_seq_cst_8(volatile _Uint8_t *_Tgt)
Definition: xatomic.h:1913
_Uint1_t _Load_relaxed_1 ( volatile _Uint1_t _Tgt)
inline
389  { /* load from *_Tgt atomically with
390  relaxed memory order */
392 
393  #if defined(_M_ARM)
394  _Value = __iso_volatile_load8((volatile char *)_Tgt);
395 
396  #else
397  _Value = *_Tgt;
398  #endif
399 
400  return (_Value);
401  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_STD_BEGIN typedef unsigned char _Uint1_t
Definition: xatomic.h:209
_Uint2_t _Load_relaxed_2 ( volatile _Uint2_t _Tgt)
inline
906  { /* load from *_Tgt atomically with
907  relaxed memory order */
909 
910  #if defined(_M_ARM)
911  _Value = __iso_volatile_load16((volatile short *)_Tgt);
912 
913  #else
914  _Value = *_Tgt;
915  #endif
916 
917  return (_Value);
918  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
unsigned short _Uint2_t
Definition: xatomic.h:210
_Uint4_t _Load_relaxed_4 ( volatile _Uint4_t _Tgt)
inline
1423  { /* load from *_Tgt atomically with
1424  relaxed memory order */
1425  _Uint4_t _Value;
1426 
1427  #if defined(_M_ARM)
1428  _Value = __iso_volatile_load32((volatile int *)_Tgt);
1429 
1430  #else
1431  _Value = *_Tgt;
1432  #endif
1433 
1434  return (_Value);
1435  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_Uint32t _Uint4_t
Definition: xatomic0.h:28
_Uint8_t _Load_relaxed_8 ( volatile _Uint8_t _Tgt)
inline
1934  { /* load from *_Tgt atomically with
1935  relaxed memory order */
1936  _Uint8_t _Value;
1937 
1938  #if _MS_64
1939  _Value = *_Tgt;
1940 
1941  #elif defined(_M_ARM)
1942  _Value = __ldrexd((volatile _LONGLONG *)_Tgt);
1943 
1944  #else
1945  _Value = _InterlockedOr64((volatile _LONGLONG *)_Tgt, 0);
1946  #endif
1947 
1948  return (_Value);
1949  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _LONGLONG
Definition: yvals.h:574
unsigned _LONGLONG _Uint8_t
Definition: xatomic.h:212
_Uint1_t _Load_seq_cst_1 ( volatile _Uint1_t _Tgt)
inline
372  { /* load from *_Tgt atomically with
373  sequentially consistent memory order */
375 
376  #if defined(_M_ARM)
377  _Value = __iso_volatile_load8((volatile char *)_Tgt);
378  _Memory_barrier();
379 
380  #else
381  _Value = *_Tgt;
383  #endif
384 
385  return (_Value);
386  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _Compiler_barrier()
Definition: xatomic.h:29
_STD_BEGIN typedef unsigned char _Uint1_t
Definition: xatomic.h:209
_Uint2_t _Load_seq_cst_2 ( volatile _Uint2_t _Tgt)
inline
889  { /* load from *_Tgt atomically with
890  sequentially consistent memory order */
892 
893  #if defined(_M_ARM)
894  _Value = __iso_volatile_load16((volatile short *)_Tgt);
895  _Memory_barrier();
896 
897  #else
898  _Value = *_Tgt;
900  #endif
901 
902  return (_Value);
903  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
unsigned short _Uint2_t
Definition: xatomic.h:210
#define _Compiler_barrier()
Definition: xatomic.h:29
_Uint4_t _Load_seq_cst_4 ( volatile _Uint4_t _Tgt)
inline
1406  { /* load from *_Tgt atomically with
1407  sequentially consistent memory order */
1408  _Uint4_t _Value;
1409 
1410  #if defined(_M_ARM)
1411  _Value = __iso_volatile_load32((volatile int *)_Tgt);
1412  _Memory_barrier();
1413 
1414  #else
1415  _Value = *_Tgt;
1417  #endif
1418 
1419  return (_Value);
1420  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_Uint32t _Uint4_t
Definition: xatomic0.h:28
#define _Compiler_barrier()
Definition: xatomic.h:29
_Uint8_t _Load_seq_cst_8 ( volatile _Uint8_t _Tgt)
inline
1914  { /* load from *_Tgt atomically with
1915  sequentially consistent memory order */
1916  _Uint8_t _Value;
1917 
1918  #if _MS_64
1919  _Value = *_Tgt;
1921 
1922  #elif defined(_M_ARM)
1923  _Value = __ldrexd((volatile _LONGLONG *)_Tgt);
1924  _Memory_barrier();
1925 
1926  #else
1927  _Value = _InterlockedOr64((volatile _LONGLONG *)_Tgt, 0);
1928  #endif
1929 
1930  return (_Value);
1931  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _LONGLONG
Definition: yvals.h:574
unsigned _LONGLONG _Uint8_t
Definition: xatomic.h:212
#define _Compiler_barrier()
Definition: xatomic.h:29
void _Lock_spin_lock ( volatile _Atomic_flag_t _Flag)
inline
2458  { /* spin until _Flag successfully set */
2461  }
#define _ATOMIC_FLAG_TEST_AND_SET
Definition: xatomic.h:214
#define _YIELD_PROCESSOR
Definition: xatomic.h:2448
Definition: xatomic0.h:22
memory_order _Memory_order_upper_bound ( memory_order  _Order1,
memory_order  _Order2 
)
inline
238  { /* find upper bound of two memory orders,
239  based on the following partial order:
240 
241  seq_cst
242  |
243  acq_rel
244  / \
245  acquire release
246  | |
247  consume |
248  \ /
249  relaxed
250 
251  */
252 
253  static const memory_order _Upper[6][6] = { /* combined upper bounds */
265  memory_order_seq_cst, memory_order_seq_cst, memory_order_seq_cst
266  }
267  };
268 
269  if ((_Order1 < 0) || (6 <= _Order1)
270  || (_Order2 < 0) || (6 <= _Order2))
271  { /* launder memory order */
273  return (memory_order_seq_cst);
274  }
275  return (_Upper[_Order1][_Order2]);
276  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
memory_order
Definition: xatomic0.h:19
Definition: xatomic0.h:24
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
void _Store_relaxed_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
312  { /* store _Value atomically with relaxed memory order */
313  #if defined(_M_ARM)
314  __iso_volatile_store8((volatile char *)_Tgt, _Value);
315 
316  #else
317  *_Tgt = _Value;
318  #endif
319  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
void _Store_relaxed_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
829  { /* store _Value atomically with relaxed memory order */
830  #if defined(_M_ARM)
831  __iso_volatile_store16((volatile short *)_Tgt, _Value);
832 
833  #else
834  *_Tgt = _Value;
835  #endif
836  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
void _Store_relaxed_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1346  { /* store _Value atomically with relaxed memory order */
1347  #if defined(_M_ARM)
1348  __iso_volatile_store32((volatile int *)_Tgt, _Value);
1349 
1350  #else
1351  *_Tgt = _Value;
1352  #endif
1353  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
void _Store_relaxed_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
1863  { /* store _Value atomically with relaxed memory order */
1864  #if _MS_64
1865  *_Tgt = _Value;
1866 
1867  #else
1868  _INTRIN_RELAXED(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value);
1869  #endif
1870  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _LONGLONG
Definition: yvals.h:574
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
void _Store_release_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
322  { /* store _Value atomically with release memory order */
323  #if defined(_M_ARM)
324  _Memory_barrier();
325  __iso_volatile_store8((volatile char *)_Tgt, _Value);
326 
327  #else
329  *_Tgt = _Value;
330  #endif
331  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _Compiler_barrier()
Definition: xatomic.h:29
void _Store_release_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
839  { /* store _Value atomically with release memory order */
840  #if defined(_M_ARM)
841  _Memory_barrier();
842  __iso_volatile_store16((volatile short *)_Tgt, _Value);
843 
844  #else
846  *_Tgt = _Value;
847  #endif
848  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _Compiler_barrier()
Definition: xatomic.h:29
void _Store_release_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1356  { /* store _Value atomically with release memory order */
1357  #if defined(_M_ARM)
1358  _Memory_barrier();
1359  __iso_volatile_store32((volatile int *)_Tgt, _Value);
1360 
1361  #else
1363  *_Tgt = _Value;
1364  #endif
1365  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _Compiler_barrier()
Definition: xatomic.h:29
void _Store_release_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
1873  { /* store _Value atomically with release memory order */
1874  #if _MS_64
1876  *_Tgt = _Value;
1877 
1878  #else
1879  _INTRIN_RELEASE(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value);
1880  #endif
1881  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
#define _LONGLONG
Definition: yvals.h:574
#define _Compiler_barrier()
Definition: xatomic.h:29
void _Store_seq_cst_1 ( volatile _Uint1_t _Tgt,
_Uint1_t  _Value 
)
inline
334  { /* store _Value atomically with
335  sequentially consistent memory order */
336 
337  #if defined(_M_ARM)
338  _Memory_barrier();
339  __iso_volatile_store8((volatile char *)_Tgt, _Value);
340  _Memory_barrier();
341 
342  #else
343  _INTRIN_SEQ_CST(_InterlockedExchange8)((volatile char *)_Tgt, _Value);
344  #endif
345  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
void _Store_seq_cst_2 ( volatile _Uint2_t _Tgt,
_Uint2_t  _Value 
)
inline
851  { /* store _Value atomically with
852  sequentially consistent memory order */
853 
854  #if defined(_M_ARM)
855  _Memory_barrier();
856  __iso_volatile_store16((volatile short *)_Tgt, _Value);
857  _Memory_barrier();
858 
859  #else
860  _INTRIN_SEQ_CST(_InterlockedExchange16)((volatile short *)_Tgt, _Value);
861  #endif
862  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
void _Store_seq_cst_4 ( volatile _Uint4_t _Tgt,
_Uint4_t  _Value 
)
inline
1368  { /* store _Value atomically with
1369  sequentially consistent memory order */
1370 
1371  #if defined(_M_ARM)
1372  _Memory_barrier();
1373  __iso_volatile_store32((volatile int *)_Tgt, _Value);
1374  _Memory_barrier();
1375 
1376  #else
1377  _INTRIN_SEQ_CST(_InterlockedExchange)((volatile long *)_Tgt, _Value);
1378  #endif
1379  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
void _Store_seq_cst_8 ( volatile _Uint8_t _Tgt,
_Uint8_t  _Value 
)
inline
1884  { /* store _Value atomically with
1885  sequentially consistent memory order */
1886  _INTRIN_SEQ_CST(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value);
1887  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
#define _LONGLONG
Definition: yvals.h:574
void _Unlock_spin_lock ( volatile _Atomic_flag_t _Flag)
inline
2465  { /* release previously obtained lock */
2467  }
#define _ATOMIC_FLAG_CLEAR
Definition: xatomic.h:215
Definition: xatomic0.h:23
void _Validate_compare_exchange_memory_order ( memory_order  _Success,
memory_order  _Failure 
)
inline
280  { /* validate success/failure */
281  /* _Failure may not be memory_order_release or memory_order_acq_rel
282  and may not be stronger than _Success */
283  switch (_Failure)
284  {
286  break;
287 
289  if (_Success != memory_order_seq_cst)
291  break;
292 
294  if ((_Success == memory_order_consume) ||
295  (_Success == memory_order_relaxed))
297  break;
298 
300  if (_Success == memory_order_relaxed)
302  break;
303 
304  default:
306  break;
307  }
308  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:22

Variable Documentation

_STD_BEGIN typedef unsigned char _Uint1_t