STLdoc
STLdocumentation
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros
Macros | Typedefs | Enumerations | Functions
xatomic0.h File Reference
#include <yvals.h>

Go to the source code of this file.

Macros

#define _XATOMIC0_H
 
#define _GENERIC_ATOMICS   0 /* nonzero for (non-conforming) generic */
 
#define _WCHAR_T_SIZE   2
 
#define _SHORT_SIZE   2
 
#define _INT_SIZE   4
 
#define _LONG_SIZE   4
 
#define _LONGLONG_SIZE   8
 
#define _ADDR_SIZE   4
 
#define _MS_32   1
 
#define _MS_64   0
 
#define _ATOMIC_MAXBYTES_LOCK_FREE   8
 
#define _ATOMIC_FLAG_USES_LOCK   0
 
#define _ATOMIC_FENCE_USES_LOCK   0
 
#define _Get_atomic_count(_Counter)   _Counter
 
#define _Init_atomic_counter(_Counter, _Value)   _Counter = _Value
 
#define _Inc_atomic_counter_explicit(_Counter, _Order)   (_Atomic_fetch_add_4(&_Counter, 1, _Order) + 1)
 
#define _Inc_atomic_counter(_Counter)   (_Inc_atomic_counter_explicit(_Counter, memory_order_seq_cst))
 
#define _Dec_atomic_counter_explicit(_Counter, _Order)   (_Atomic_fetch_sub_4(&_Counter, 1, _Order) - 1)
 
#define _Dec_atomic_counter(_Counter)   (_Dec_atomic_counter_explicit(_Counter, memory_order_seq_cst))
 
#define _Load_atomic_counter_explicit(_Counter, _Order)   _Atomic_load_4(&_Counter, _Order)
 
#define _Load_atomic_counter(_Counter)   _Load_atomic_counter_explicit(_Counter, memory_order_seq_cst)
 
#define _Compare_increment_atomic_counter_explicit(_Counter, _Expected, _Order)
 
#define _Compare_increment_atomic_counter(_Counter, _Expected)
 

Typedefs

typedef enum memory_order memory_order
 
typedef _Uint32t _Uint4_t
 
typedef _Uint4_t _Atomic_integral_t
 
typedef long _Atomic_flag_t
 
typedef _Atomic_integral_t _Atomic_counter_t
 

Enumerations

enum  memory_order {
  memory_order_relaxed, memory_order_consume, memory_order_acquire, memory_order_release,
  memory_order_acq_rel, memory_order_seq_cst
}
 

Functions

_Uint4_t _Atomic_load_4 (volatile _Uint4_t *, memory_order)
 
int _Atomic_compare_exchange_weak_4 (volatile _Uint4_t *, _Uint4_t *, _Uint4_t, memory_order, memory_order)
 
_Uint4_t _Atomic_fetch_add_4 (volatile _Uint4_t *, _Uint4_t, memory_order)
 
_Uint4_t _Atomic_fetch_sub_4 (volatile _Uint4_t *, _Uint4_t, memory_order)
 
_EXTERN_C _CRTIMP2_PURE void __cdecl _Lock_shared_ptr_spin_lock ()
 
_CRTIMP2_PURE void __cdecl _Unlock_shared_ptr_spin_lock ()
 

Macro Definition Documentation

#define _ADDR_SIZE   4
#define _ATOMIC_FENCE_USES_LOCK   0
#define _ATOMIC_FLAG_USES_LOCK   0
#define _ATOMIC_MAXBYTES_LOCK_FREE   8
#define _Compare_increment_atomic_counter (   _Counter,
  _Expected 
)
Value:
_Counter, _Expected, memory_order_seq_cst)
#define _Compare_increment_atomic_counter_explicit(_Counter, _Expected, _Order)
Definition: xatomic0.h:174
Definition: xatomic0.h:25
#define _Compare_increment_atomic_counter_explicit (   _Counter,
  _Expected,
  _Order 
)
Value:
_Atomic_compare_exchange_weak_4(&_Counter, &_Expected, _Expected + 1, \
_Order, _Order)
int _Atomic_compare_exchange_weak_4(volatile _Uint4_t *, _Uint4_t *, _Uint4_t, memory_order, memory_order)
Definition: xatomic.h:1629
#define _Dec_atomic_counter (   _Counter)    (_Dec_atomic_counter_explicit(_Counter, memory_order_seq_cst))
#define _Dec_atomic_counter_explicit (   _Counter,
  _Order 
)    (_Atomic_fetch_sub_4(&_Counter, 1, _Order) - 1)
#define _GENERIC_ATOMICS   0 /* nonzero for (non-conforming) generic */
#define _Get_atomic_count (   _Counter)    _Counter
#define _Inc_atomic_counter (   _Counter)    (_Inc_atomic_counter_explicit(_Counter, memory_order_seq_cst))
#define _Inc_atomic_counter_explicit (   _Counter,
  _Order 
)    (_Atomic_fetch_add_4(&_Counter, 1, _Order) + 1)
#define _Init_atomic_counter (   _Counter,
  _Value 
)    _Counter = _Value
#define _INT_SIZE   4
#define _Load_atomic_counter (   _Counter)    _Load_atomic_counter_explicit(_Counter, memory_order_seq_cst)
#define _Load_atomic_counter_explicit (   _Counter,
  _Order 
)    _Atomic_load_4(&_Counter, _Order)
#define _LONG_SIZE   4
#define _LONGLONG_SIZE   8
#define _MS_32   1
#define _MS_64   0
#define _SHORT_SIZE   2
#define _WCHAR_T_SIZE   2
#define _XATOMIC0_H

Typedef Documentation

typedef long _Atomic_flag_t
typedef _Uint32t _Uint4_t
typedef enum memory_order memory_order

Enumeration Type Documentation

Enumerator
memory_order_relaxed 
memory_order_consume 
memory_order_acquire 
memory_order_release 
memory_order_acq_rel 
memory_order_seq_cst 
19  {
26  } memory_order;
memory_order
Definition: xatomic0.h:19
Definition: xatomic0.h:24
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22

Function Documentation

int _Atomic_compare_exchange_weak_4 ( volatile _Uint4_t ,
_Uint4_t ,
_Uint4_t  ,
memory_order  ,
memory_order   
)
inline
1632  { /* compare and exchange values atomically */
1633  /* No weak compare-exchange is currently available,
1634  even for ARM, so fall back to strong */
1636  _Order1, _Order2));
1637  }
int _Atomic_compare_exchange_strong_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:1601
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
_Uint4_t _Atomic_fetch_add_4 ( volatile _Uint4_t ,
_Uint4_t  ,
memory_order   
)
inline
1670  { /* add _Value to *_Tgt atomically */
1671  switch (_Order)
1672  {
1673  case memory_order_relaxed:
1674  return (_Fetch_add_relaxed_4(_Tgt, _Value));
1675 
1676  case memory_order_consume:
1677  case memory_order_acquire:
1678  return (_Fetch_add_acquire_4(_Tgt, _Value));
1679 
1680  case memory_order_release:
1681  return (_Fetch_add_release_4(_Tgt, _Value));
1682 
1683  case memory_order_acq_rel:
1684  case memory_order_seq_cst:
1685  return (_Fetch_add_seq_cst_4(_Tgt, _Value));
1686 
1687  default:
1689  return (0);
1690  }
1691  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint4_t _Fetch_add_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1661
Definition: xatomic0.h:24
_Uint4_t _Fetch_add_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1640
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:23
Definition: xatomic0.h:22
_Uint4_t _Fetch_add_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1654
_Uint4_t _Fetch_add_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1647
_Uint4_t _Atomic_fetch_sub_4 ( volatile _Uint4_t ,
_Uint4_t  ,
memory_order   
)
inline
1695  { /* subtract _Value from *_Tgt atomically */
1696  return (_Atomic_fetch_add_4(_Tgt, 0 - _Value, _Order));
1697  }
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
_Uint4_t _Atomic_fetch_add_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1668
_Uint4_t _Atomic_load_4 ( volatile _Uint4_t ,
memory_order   
)
inline
1446  { /* load from *_Tgt atomically */
1447  switch (_Order)
1448  {
1449  case memory_order_relaxed:
1450  return (_Load_relaxed_4(_Tgt));
1451 
1452  case memory_order_consume:
1453  case memory_order_acquire:
1454  return (_Load_acquire_4(_Tgt));
1455 
1456  case memory_order_seq_cst:
1457  return (_Load_seq_cst_4(_Tgt));
1458 
1459  default:
1461  return (0);
1462  }
1463  }
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint4_t _Load_relaxed_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1422
_Uint4_t _Load_seq_cst_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1405
Definition: xatomic0.h:21
Definition: xatomic0.h:20
Definition: xatomic0.h:25
Definition: xatomic0.h:22
_Uint4_t _Load_acquire_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1437
_EXTERN_C _CRTIMP2_PURE void __cdecl _Lock_shared_ptr_spin_lock ( )
_CRTIMP2_PURE void __cdecl _Unlock_shared_ptr_spin_lock ( )