STLdoc
STLdocumentation
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros
xatomic.h
Go to the documentation of this file.
1 /* xatomic.h internal header */
2 #pragma once
3 #ifndef _XATOMIC_H
4 #define _XATOMIC_H
5 #ifndef RC_INVOKED
6 #include <xatomic0.h>
7 #include <stddef.h> // for size_t
8 #include <stdlib.h>
9 #include <string.h>
10 
11 #include <intrin.h>
12 #include <xutility>
13 
14  #pragma pack(push,_CRT_PACKING)
15  #pragma warning(push,3)
16  #pragma push_macro("new")
17  #undef new
18 
19  #pragma warning (disable: 4100 4390 4793 6326)
20 
21  #define _Compiler_barrier() _ReadWriteBarrier()
22 
23  #if defined(_M_ARM)
24  #define _Memory_barrier() __dmb(_ARM_BARRIER_ISH)
25  #endif /* defined(_M_ARM) */
26 
27  #if defined(_M_ARM64)
28  #define _Memory_barrier() __dmb(_ARM64_BARRIER_ISH)
29  #endif /* defined(_M_ARM64) */
30 
31  #ifndef _CONCAT
32  #define _CONCATX(x, y) x ## y
33  #define _CONCAT(x, y) _CONCATX(x, y)
34  #endif /* _CONCAT */
35 
36 #define ATOMIC_BOOL_LOCK_FREE \
37  (1 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
38 #define _ATOMIC_CHAR_LOCK_FREE \
39  (1 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
40 #define _ATOMIC_CHAR16_T_LOCK_FREE \
41  (2 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
42 #define _ATOMIC_CHAR32_T_LOCK_FREE \
43  (2 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
44 #define _ATOMIC_WCHAR_T_LOCK_FREE \
45  (_WCHAR_T_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
46 #define _ATOMIC_SHORT_LOCK_FREE \
47  (_SHORT_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
48 #define _ATOMIC_INT_LOCK_FREE \
49  (_INT_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
50 #define _ATOMIC_LONG_LOCK_FREE \
51  (_LONG_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
52 #define _ATOMIC_LLONG_LOCK_FREE \
53  (_LONGLONG_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
54 #define ATOMIC_POINTER_LOCK_FREE \
55  (_ADDR_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
56 
57  /* Interlocked intrinsic mapping for _nf/_acq/_rel */
58  #if defined(_M_ARM) || defined(_M_ARM64)
59  #define _INTRIN_RELAXED(x) _CONCAT(x, _nf)
60  #define _INTRIN_ACQUIRE(x) _CONCAT(x, _acq)
61  #define _INTRIN_RELEASE(x) _CONCAT(x, _rel)
62  #define _INTRIN_SEQ_CST(x) x
63  #else /* defined(_M_ARM) || defined(_M_ARM64) */
64  #define _INTRIN_RELAXED(x) x
65  #define _INTRIN_ACQUIRE(x) x
66  #define _INTRIN_RELEASE(x) x
67  #define _INTRIN_SEQ_CST(x) x
68  #endif /* defined(_M_ARM) || defined(_M_ARM64) */
69 
70  #if defined(_M_IX86)
71 #pragma push_macro("_InterlockedExchange64")
72 #pragma push_macro("_InterlockedExchangeAdd64")
73 #pragma push_macro("_InterlockedAnd64")
74 #pragma push_macro("_InterlockedOr64")
75 #pragma push_macro("_InterlockedXor64")
76 
77 #undef _InterlockedExchange64
78 #undef _InterlockedExchangeAdd64
79 #undef _InterlockedAnd64
80 #undef _InterlockedOr64
81 #undef _InterlockedXor64
82 
83 #define _InterlockedExchange64 _InterlockedExchange64_INLINE
84 #define _InterlockedExchangeAdd64 _InterlockedExchangeAdd64_INLINE
85 #define _InterlockedAnd64 _InterlockedAnd64_INLINE
86 #define _InterlockedOr64 _InterlockedOr64_INLINE
87 #define _InterlockedXor64 _InterlockedXor64_INLINE
88 
89 inline _LONGLONG _InterlockedExchange64(volatile _LONGLONG *_Tgt, _LONGLONG _Value)
90 {
91  _LONGLONG _Oldval;
93  do
94  {
95  _Oldval = *_Tgt;
96  } while (_Oldval != _InterlockedCompareExchange64(_Tgt, _Value, _Oldval));
98 
99  return (_Oldval);
100 }
101 
102 inline _LONGLONG _InterlockedExchangeAdd64(volatile _LONGLONG *_Tgt, _LONGLONG _Value)
103 {
104  _LONGLONG _Oldval, _Newval;
106  do
107  {
108  _Oldval = *_Tgt;
109  _Newval = _Oldval + _Value;
110  } while (_Oldval != _InterlockedCompareExchange64(_Tgt, _Newval, _Oldval));
112 
113  return (_Oldval);
114 }
115 
116 inline _LONGLONG _InterlockedAnd64(volatile _LONGLONG *_Tgt, _LONGLONG _Value)
117 {
118  _LONGLONG _Oldval, _Newval;
120  do
121  {
122  _Oldval = *_Tgt;
123  _Newval = _Oldval & _Value;
124  } while (_Oldval != _InterlockedCompareExchange64(_Tgt, _Newval, _Oldval));
126 
127  return (_Oldval);
128 }
129 
130 inline _LONGLONG _InterlockedOr64(volatile _LONGLONG *_Tgt, _LONGLONG _Value)
131 {
132  _LONGLONG _Oldval, _Newval;
134  do
135  {
136  _Oldval = *_Tgt;
137  _Newval = _Oldval | _Value;
138  } while (_Oldval != _InterlockedCompareExchange64(_Tgt, _Newval, _Oldval));
140 
141  return (_Oldval);
142 }
143 
144 inline _LONGLONG _InterlockedXor64(volatile _LONGLONG *_Tgt, _LONGLONG _Value)
145 {
146  _LONGLONG _Oldval, _Newval;
148  do
149  {
150  _Oldval = *_Tgt;
151  _Newval = _Oldval ^ _Value;
152  } while (_Oldval != _InterlockedCompareExchange64(_Tgt, _Newval, _Oldval));
154 
155  return (_Oldval);
156 }
157  #endif /* defined(_M_IX86) */
158 
160  /* TYPEDEFS FOR INTERNAL ARITHMETIC TYPES */
161 typedef unsigned char _Uint1_t;
162 typedef unsigned short _Uint2_t;
163 //typedef _Uint32t _Uint4_t;
164 typedef unsigned _LONGLONG _Uint8_t;
165 
166  #define _ATOMIC_FLAG_TEST_AND_SET _Atomic_flag_test_and_set
167  #define _ATOMIC_FLAG_CLEAR _Atomic_flag_clear
168 
169  #define _ATOMIC_THREAD_FENCE _Atomic_thread_fence
170  #define _ATOMIC_SIGNAL_FENCE _Atomic_signal_fence
171 
172  #ifndef _INVALID_MEMORY_ORDER
173 
174  #if _ITERATOR_DEBUG_LEVEL == 2
175  #define _INVALID_MEMORY_ORDER \
176  {_DEBUG_ERROR("Invalid memory_order"); \
177  _SCL_SECURE_INVALID_ARGUMENT}
178 
179  #elif _ITERATOR_DEBUG_LEVEL == 1
180  #define _INVALID_MEMORY_ORDER \
181  _SCL_SECURE_VALIDATE("Invalid memory_order" && 0)
182 
183  #elif _ITERATOR_DEBUG_LEVEL == 0
184  #define _INVALID_MEMORY_ORDER
185  #endif /* _ITERATOR_DEBUG_LEVEL */
186  #endif /* _INVALID_MEMORY_ORDER */
187 
189  memory_order _Order2)
190  { /* find upper bound of two memory orders,
191  based on the following partial order:
192 
193  seq_cst
194  |
195  acq_rel
196  / \
197  acquire release
198  | |
199  consume |
200  \ /
201  relaxed
202 
203  */
204 
205  static const memory_order _Upper[6][6] = { /* combined upper bounds */ // TRANSITION, VSO#202551
217  memory_order_seq_cst, memory_order_seq_cst, memory_order_seq_cst
218  }
219  };
220 
221  if ((_Order1 < 0) || (6 <= _Order1)
222  || (_Order2 < 0) || (6 <= _Order2))
223  { /* launder memory order */
225  return (memory_order_seq_cst);
226  }
227  return (_Upper[_Order1][_Order2]);
228  }
229 
231  memory_order _Success, memory_order _Failure)
232  { /* validate success/failure */
233  /* _Failure may not be memory_order_release or memory_order_acq_rel
234  and may not be stronger than _Success */
235  switch (_Failure)
236  {
238  break;
239 
241  if (_Success != memory_order_seq_cst)
243  break;
244 
246  if ((_Success == memory_order_consume) ||
247  (_Success == memory_order_relaxed))
249  break;
250 
252  if (_Success == memory_order_relaxed)
254  break;
255 
256  default:
258  break;
259  }
260  }
261 
262 
263  /* _Atomic_store_1 */
264 inline void _Store_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
265  { /* store _Value atomically with relaxed memory order */
266 
267  #if defined(_M_ARM) || defined(_M_ARM64)
268  __iso_volatile_store8((volatile char *)_Tgt, _Value);
269 
270  #else
271  *_Tgt = _Value;
272  #endif
273  }
274 
275 inline void _Store_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
276  { /* store _Value atomically with release memory order */
277 
278  #if defined(_M_ARM) || defined(_M_ARM64)
279  _Memory_barrier();
280  __iso_volatile_store8((volatile char *)_Tgt, _Value);
281 
282  #else
284  *_Tgt = _Value;
285  #endif
286  }
287 
288 inline void _Store_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
289  { /* store _Value atomically with
290  sequentially consistent memory order */
291 
292  #if defined(_M_ARM) || defined(_M_ARM64)
293  _Memory_barrier();
294  __iso_volatile_store8((volatile char *)_Tgt, _Value);
295  _Memory_barrier();
296 
297  #else
298  _INTRIN_SEQ_CST(_InterlockedExchange8)((volatile char *)_Tgt, _Value);
299  #endif
300  }
301 
302 inline void _Atomic_store_1(
303  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
304  { /* store _Value atomically */
305  switch (_Order)
306  {
308  _Store_relaxed_1(_Tgt, _Value);
309  break;
310 
312  _Store_release_1(_Tgt, _Value);
313  break;
314 
316  _Store_seq_cst_1(_Tgt, _Value);
317  break;
318 
319  default:
321  break;
322  }
323  }
324 
325  /* _Atomic_load_1 */
326 inline _Uint1_t _Load_seq_cst_1(volatile _Uint1_t *_Tgt)
327  { /* load from *_Tgt atomically with
328  sequentially consistent memory order */
329  _Uint1_t _Value;
330 
331  #if defined(_M_ARM) || defined(_M_ARM64)
332  _Value = __iso_volatile_load8((volatile char *)_Tgt);
333  _Memory_barrier();
334 
335  #else
336  _Value = *_Tgt;
338  #endif
339 
340  return (_Value);
341  }
342 
343 inline _Uint1_t _Load_relaxed_1(volatile _Uint1_t *_Tgt)
344  { /* load from *_Tgt atomically with
345  relaxed memory order */
346  _Uint1_t _Value;
347 
348  #if defined(_M_ARM) || defined(_M_ARM64)
349  _Value = __iso_volatile_load8((volatile char *)_Tgt);
350 
351  #else
352  _Value = *_Tgt;
353  #endif
354 
355  return (_Value);
356  }
357 
358 inline _Uint1_t _Load_acquire_1(volatile _Uint1_t *_Tgt)
359  { /* load from *_Tgt atomically with
360  acquire memory order */
361 
362  return (_Load_seq_cst_1(_Tgt));
363  }
364 
365 inline _Uint1_t _Atomic_load_1(
366  volatile _Uint1_t *_Tgt, memory_order _Order)
367  { /* load from *_Tgt atomically */
368  switch (_Order)
369  {
371  return (_Load_relaxed_1(_Tgt));
372 
375  return (_Load_acquire_1(_Tgt));
376 
378  return (_Load_seq_cst_1(_Tgt));
379 
380  default:
382  return (0);
383  }
384  }
385 
386  /* _Atomic_exchange_1 */
387 inline _Uint1_t _Exchange_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
388  { /* exchange _Value and *_Tgt atomically with
389  sequentially consistent memory order */
390 
391  return (_INTRIN_SEQ_CST(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
392  }
393 
394 inline _Uint1_t _Exchange_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
395  { /* exchange _Value and *_Tgt atomically with
396  relaxed memory order */
397 
398  return (_INTRIN_RELAXED(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
399  }
400 
401 inline _Uint1_t _Exchange_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
402  { /* exchange _Value and *_Tgt atomically with
403  acquire memory order */
404 
405  return (_INTRIN_ACQUIRE(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
406  }
407 
408 inline _Uint1_t _Exchange_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
409  { /* exchange _Value and *_Tgt atomically with
410  release memory order */
411 
412  return (_INTRIN_RELEASE(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
413  }
414 
415 inline _Uint1_t _Atomic_exchange_1(
416  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
417  { /* exchange _Value and *_Tgt atomically */
418  switch (_Order)
419  {
421  return (_Exchange_relaxed_1(_Tgt, _Value));
422 
425  return (_Exchange_acquire_1(_Tgt, _Value));
426 
428  return (_Exchange_release_1(_Tgt, _Value));
429 
432  return (_Exchange_seq_cst_1(_Tgt, _Value));
433 
434  default:
436  return (0);
437  }
438  }
439 
440  /* _Atomic_compare_exchange_weak_1, _Atomic_compare_exchange_strong_1 */
441 inline int _Compare_exchange_seq_cst_1(volatile _Uint1_t *_Tgt,
442  _Uint1_t *_Exp, _Uint1_t _Value)
443  { /* compare and exchange values atomically with
444  sequentially consistent memory order */
445  _Uint1_t _Old_exp = *_Exp; /* read before atomic operation */
446 
447  _Uint1_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange8)((volatile char *)_Tgt,
448  _Value, _Old_exp);
449 
450  if (_Prev == _Old_exp)
451  return (1);
452  else
453  { /* copy old value */
454  *_Exp = _Prev;
455  return (0);
456  }
457  }
458 
459 inline int _Compare_exchange_relaxed_1(volatile _Uint1_t *_Tgt,
460  _Uint1_t *_Exp, _Uint1_t _Value)
461  { /* compare and exchange values atomically with
462  relaxed memory order */
463  _Uint1_t _Old_exp = *_Exp; /* read before atomic operation */
464 
465  _Uint1_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange8)((volatile char *)_Tgt,
466  _Value, _Old_exp);
467 
468  if (_Prev == _Old_exp)
469  return (1);
470  else
471  { /* copy old value */
472  *_Exp = _Prev;
473  return (0);
474  }
475  }
476 
477 inline int _Compare_exchange_acquire_1(volatile _Uint1_t *_Tgt,
478  _Uint1_t *_Exp, _Uint1_t _Value)
479  { /* compare and exchange values atomically with
480  acquire memory order */
481  _Uint1_t _Old_exp = *_Exp; /* read before atomic operation */
482 
483  _Uint1_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange8)((volatile char *)_Tgt,
484  _Value, _Old_exp);
485 
486  if (_Prev == _Old_exp)
487  return (1);
488  else
489  { /* copy old value */
490  *_Exp = _Prev;
491  return (0);
492  }
493  }
494 
495 inline int _Compare_exchange_release_1(volatile _Uint1_t *_Tgt,
496  _Uint1_t *_Exp, _Uint1_t _Value)
497  { /* compare and exchange values atomically with
498  release memory order */
499  _Uint1_t _Old_exp = *_Exp; /* read before atomic operation */
500 
501  _Uint1_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange8)((volatile char *)_Tgt,
502  _Value, _Old_exp);
503 
504  if (_Prev == _Old_exp)
505  return (1);
506  else
507  { /* copy old value */
508  *_Exp = _Prev;
509  return (0);
510  }
511  }
512 
514  volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value,
515  memory_order _Order1, memory_order _Order2)
516  { /* compare and exchange values atomically */
517  _Validate_compare_exchange_memory_order(_Order1, _Order2);
518 
519  switch (_Memory_order_upper_bound(_Order1, _Order2))
520  {
522  return (_Compare_exchange_relaxed_1(_Tgt, _Exp, _Value));
523 
526  return (_Compare_exchange_acquire_1(_Tgt, _Exp, _Value));
527 
529  return (_Compare_exchange_release_1(_Tgt, _Exp, _Value));
530 
533  return (_Compare_exchange_seq_cst_1(_Tgt, _Exp, _Value));
534 
535  default:
537  return (0);
538  }
539  }
540 
542  volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value,
543  memory_order _Order1, memory_order _Order2)
544  { /* compare and exchange values atomically */
545  /* No weak compare-exchange is currently available,
546  even for ARM, so fall back to strong */
547  return (_Atomic_compare_exchange_strong_1(_Tgt, _Exp, _Value,
548  _Order1, _Order2));
549  }
550 
551  /* _Atomic_fetch_add_1, _Atomic_fetch_sub_1 */
552 inline _Uint1_t _Fetch_add_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
553  { /* add _Value to *_Tgt atomically with
554  sequentially consistent memory order */
555 
556  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
557  }
558 
559 inline _Uint1_t _Fetch_add_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
560  { /* add _Value to *_Tgt atomically with
561  relaxed memory order */
562 
563  return (_INTRIN_RELAXED(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
564  }
565 
566 inline _Uint1_t _Fetch_add_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
567  { /* add _Value to *_Tgt atomically with
568  acquire memory order */
569 
570  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
571  }
572 
573 inline _Uint1_t _Fetch_add_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
574  { /* add _Value to *_Tgt atomically with
575  release memory order */
576 
577  return (_INTRIN_RELEASE(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
578  }
579 
580 inline _Uint1_t _Atomic_fetch_add_1(
581  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
582  { /* add _Value to *_Tgt atomically */
583  switch (_Order)
584  {
586  return (_Fetch_add_relaxed_1(_Tgt, _Value));
587 
590  return (_Fetch_add_acquire_1(_Tgt, _Value));
591 
593  return (_Fetch_add_release_1(_Tgt, _Value));
594 
597  return (_Fetch_add_seq_cst_1(_Tgt, _Value));
598 
599  default:
601  return (0);
602  }
603  }
604 
605 inline _Uint1_t _Atomic_fetch_sub_1(
606  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
607  { /* subtract _Value from *_Tgt atomically */
608  return (_Atomic_fetch_add_1(_Tgt, 0 - _Value, _Order));
609  }
610 
611  /* _Atomic_fetch_and_1 */
612 inline _Uint1_t _Fetch_and_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
613  { /* and _Value with *_Tgt atomically with
614  sequentially consistent memory order */
615 
616  return (_INTRIN_SEQ_CST(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
617  }
618 
619 inline _Uint1_t _Fetch_and_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
620  { /* and _Value with *_Tgt atomically with
621  relaxed memory order */
622 
623  return (_INTRIN_RELAXED(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
624  }
625 
626 inline _Uint1_t _Fetch_and_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
627  { /* and _Value with *_Tgt atomically with
628  acquire memory order */
629 
630  return (_INTRIN_ACQUIRE(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
631  }
632 
633 inline _Uint1_t _Fetch_and_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
634  { /* and _Value with *_Tgt atomically with
635  release memory order */
636 
637  return (_INTRIN_RELEASE(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
638  }
639 
640 inline _Uint1_t _Atomic_fetch_and_1(
641  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
642  { /* and _Value with *_Tgt atomically */
643  switch (_Order)
644  {
646  return (_Fetch_and_relaxed_1(_Tgt, _Value));
647 
650  return (_Fetch_and_acquire_1(_Tgt, _Value));
651 
653  return (_Fetch_and_release_1(_Tgt, _Value));
654 
657  return (_Fetch_and_seq_cst_1(_Tgt, _Value));
658 
659  default:
661  return (0);
662  }
663  }
664 
665  /* _Atomic_fetch_or_1 */
666 inline _Uint1_t _Fetch_or_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
667  { /* or _Value with *_Tgt atomically with
668  sequentially consistent memory order */
669 
670  return (_INTRIN_SEQ_CST(_InterlockedOr8)((volatile char *)_Tgt, _Value));
671  }
672 
673 inline _Uint1_t _Fetch_or_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
674  { /* or _Value with *_Tgt atomically with
675  relaxed memory order */
676 
677  return (_INTRIN_RELAXED(_InterlockedOr8)((volatile char *)_Tgt, _Value));
678  }
679 
680 inline _Uint1_t _Fetch_or_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
681  { /* or _Value with *_Tgt atomically with
682  acquire memory order */
683 
684  return (_INTRIN_ACQUIRE(_InterlockedOr8)((volatile char *)_Tgt, _Value));
685  }
686 
687 inline _Uint1_t _Fetch_or_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
688  { /* or _Value with *_Tgt atomically with
689  release memory order */
690 
691  return (_INTRIN_RELEASE(_InterlockedOr8)((volatile char *)_Tgt, _Value));
692  }
693 
694 inline _Uint1_t _Atomic_fetch_or_1(
695  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
696  { /* or _Value with *_Tgt atomically */
697  switch (_Order)
698  {
700  return (_Fetch_or_relaxed_1(_Tgt, _Value));
701 
704  return (_Fetch_or_acquire_1(_Tgt, _Value));
705 
707  return (_Fetch_or_release_1(_Tgt, _Value));
708 
711  return (_Fetch_or_seq_cst_1(_Tgt, _Value));
712 
713  default:
715  return (0);
716  }
717  }
718 
719  /* _Atomic_fetch_xor_1 */
720 inline _Uint1_t _Fetch_xor_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
721  { /* xor _Value with *_Tgt atomically with
722  sequentially consistent memory order */
723 
724  return (_INTRIN_SEQ_CST(_InterlockedXor8)((volatile char *)_Tgt, _Value));
725  }
726 
727 inline _Uint1_t _Fetch_xor_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
728  { /* xor _Value with *_Tgt atomically with
729  relaxed memory order */
730 
731  return (_INTRIN_RELAXED(_InterlockedXor8)((volatile char *)_Tgt, _Value));
732  }
733 
734 inline _Uint1_t _Fetch_xor_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
735  { /* xor _Value with *_Tgt atomically with
736  acquire memory order */
737 
738  return (_INTRIN_ACQUIRE(_InterlockedXor8)((volatile char *)_Tgt, _Value));
739  }
740 
741 inline _Uint1_t _Fetch_xor_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
742  { /* xor _Value with *_Tgt atomically with
743  release memory order */
744 
745  return (_INTRIN_RELEASE(_InterlockedXor8)((volatile char *)_Tgt, _Value));
746  }
747 
748 inline _Uint1_t _Atomic_fetch_xor_1(
749  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
750  { /* xor _Value with *_Tgt atomically */
751  switch (_Order)
752  {
754  return (_Fetch_xor_relaxed_1(_Tgt, _Value));
755 
758  return (_Fetch_xor_acquire_1(_Tgt, _Value));
759 
761  return (_Fetch_xor_release_1(_Tgt, _Value));
762 
765  return (_Fetch_xor_seq_cst_1(_Tgt, _Value));
766 
767  default:
769  return (0);
770  }
771  }
772 
773  /* _Atomic_store_2 */
774 inline void _Store_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
775  { /* store _Value atomically with relaxed memory order */
776 
777  #if defined(_M_ARM) || defined(_M_ARM64)
778  __iso_volatile_store16((volatile short *)_Tgt, _Value);
779 
780  #else
781  *_Tgt = _Value;
782  #endif
783  }
784 
785 inline void _Store_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
786  { /* store _Value atomically with release memory order */
787 
788  #if defined(_M_ARM) || defined(_M_ARM64)
789  _Memory_barrier();
790  __iso_volatile_store16((volatile short *)_Tgt, _Value);
791 
792  #else
794  *_Tgt = _Value;
795  #endif
796  }
797 
798 inline void _Store_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
799  { /* store _Value atomically with
800  sequentially consistent memory order */
801 
802  #if defined(_M_ARM) || defined(_M_ARM64)
803  _Memory_barrier();
804  __iso_volatile_store16((volatile short *)_Tgt, _Value);
805  _Memory_barrier();
806 
807  #else
808  _INTRIN_SEQ_CST(_InterlockedExchange16)((volatile short *)_Tgt, _Value);
809  #endif
810  }
811 
812 inline void _Atomic_store_2(
813  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
814  { /* store _Value atomically */
815  switch (_Order)
816  {
818  _Store_relaxed_2(_Tgt, _Value);
819  break;
820 
822  _Store_release_2(_Tgt, _Value);
823  break;
824 
826  _Store_seq_cst_2(_Tgt, _Value);
827  break;
828 
829  default:
831  break;
832  }
833  }
834 
835  /* _Atomic_load_2 */
836 inline _Uint2_t _Load_seq_cst_2(volatile _Uint2_t *_Tgt)
837  { /* load from *_Tgt atomically with
838  sequentially consistent memory order */
839  _Uint2_t _Value;
840 
841  #if defined(_M_ARM) || defined(_M_ARM64)
842  _Value = __iso_volatile_load16((volatile short *)_Tgt);
843  _Memory_barrier();
844 
845  #else
846  _Value = *_Tgt;
848  #endif
849 
850  return (_Value);
851  }
852 
853 inline _Uint2_t _Load_relaxed_2(volatile _Uint2_t *_Tgt)
854  { /* load from *_Tgt atomically with
855  relaxed memory order */
856  _Uint2_t _Value;
857 
858  #if defined(_M_ARM) || defined(_M_ARM64)
859  _Value = __iso_volatile_load16((volatile short *)_Tgt);
860 
861  #else
862  _Value = *_Tgt;
863  #endif
864 
865  return (_Value);
866  }
867 
868 inline _Uint2_t _Load_acquire_2(volatile _Uint2_t *_Tgt)
869  { /* load from *_Tgt atomically with
870  acquire memory order */
871 
872  return (_Load_seq_cst_2(_Tgt));
873  }
874 
875 inline _Uint2_t _Atomic_load_2(
876  volatile _Uint2_t *_Tgt, memory_order _Order)
877  { /* load from *_Tgt atomically */
878  switch (_Order)
879  {
881  return (_Load_relaxed_2(_Tgt));
882 
885  return (_Load_acquire_2(_Tgt));
886 
888  return (_Load_seq_cst_2(_Tgt));
889 
890  default:
892  return (0);
893  }
894  }
895 
896  /* _Atomic_exchange_2 */
897 inline _Uint2_t _Exchange_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
898  { /* exchange _Value and *_Tgt atomically with
899  sequentially consistent memory order */
900 
901  return (_INTRIN_SEQ_CST(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
902  }
903 
904 inline _Uint2_t _Exchange_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
905  { /* exchange _Value and *_Tgt atomically with
906  relaxed memory order */
907 
908  return (_INTRIN_RELAXED(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
909  }
910 
911 inline _Uint2_t _Exchange_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
912  { /* exchange _Value and *_Tgt atomically with
913  acquire memory order */
914 
915  return (_INTRIN_ACQUIRE(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
916  }
917 
918 inline _Uint2_t _Exchange_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
919  { /* exchange _Value and *_Tgt atomically with
920  release memory order */
921 
922  return (_INTRIN_RELEASE(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
923  }
924 
925 inline _Uint2_t _Atomic_exchange_2(
926  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
927  { /* exchange _Value and *_Tgt atomically */
928  switch (_Order)
929  {
931  return (_Exchange_relaxed_2(_Tgt, _Value));
932 
935  return (_Exchange_acquire_2(_Tgt, _Value));
936 
938  return (_Exchange_release_2(_Tgt, _Value));
939 
942  return (_Exchange_seq_cst_2(_Tgt, _Value));
943 
944  default:
946  return (0);
947  }
948  }
949 
950  /* _Atomic_compare_exchange_weak_2, _Atomic_compare_exchange_strong_2 */
951 inline int _Compare_exchange_seq_cst_2(volatile _Uint2_t *_Tgt,
952  _Uint2_t *_Exp, _Uint2_t _Value)
953  { /* compare and exchange values atomically with
954  sequentially consistent memory order */
955  _Uint2_t _Old_exp = *_Exp; /* read before atomic operation */
956 
957  _Uint2_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange16)((volatile short *)_Tgt,
958  _Value, _Old_exp);
959 
960  if (_Prev == _Old_exp)
961  return (1);
962  else
963  { /* copy old value */
964  *_Exp = _Prev;
965  return (0);
966  }
967  }
968 
969 inline int _Compare_exchange_relaxed_2(volatile _Uint2_t *_Tgt,
970  _Uint2_t *_Exp, _Uint2_t _Value)
971  { /* compare and exchange values atomically with
972  relaxed memory order */
973  _Uint2_t _Old_exp = *_Exp; /* read before atomic operation */
974 
975  _Uint2_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange16)((volatile short *)_Tgt,
976  _Value, _Old_exp);
977 
978  if (_Prev == _Old_exp)
979  return (1);
980  else
981  { /* copy old value */
982  *_Exp = _Prev;
983  return (0);
984  }
985  }
986 
987 inline int _Compare_exchange_acquire_2(volatile _Uint2_t *_Tgt,
988  _Uint2_t *_Exp, _Uint2_t _Value)
989  { /* compare and exchange values atomically with
990  acquire memory order */
991  _Uint2_t _Old_exp = *_Exp; /* read before atomic operation */
992 
993  _Uint2_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange16)((volatile short *)_Tgt,
994  _Value, _Old_exp);
995 
996  if (_Prev == _Old_exp)
997  return (1);
998  else
999  { /* copy old value */
1000  *_Exp = _Prev;
1001  return (0);
1002  }
1003  }
1004 
1005 inline int _Compare_exchange_release_2(volatile _Uint2_t *_Tgt,
1006  _Uint2_t *_Exp, _Uint2_t _Value)
1007  { /* compare and exchange values atomically with
1008  release memory order */
1009  _Uint2_t _Old_exp = *_Exp; /* read before atomic operation */
1010 
1011  _Uint2_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange16)((volatile short *)_Tgt,
1012  _Value, _Old_exp);
1013 
1014  if (_Prev == _Old_exp)
1015  return (1);
1016  else
1017  { /* copy old value */
1018  *_Exp = _Prev;
1019  return (0);
1020  }
1021  }
1022 
1024  volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value,
1025  memory_order _Order1, memory_order _Order2)
1026  { /* compare and exchange values atomically */
1027  _Validate_compare_exchange_memory_order(_Order1, _Order2);
1028 
1029  switch (_Memory_order_upper_bound(_Order1, _Order2))
1030  {
1031  case memory_order_relaxed:
1032  return (_Compare_exchange_relaxed_2(_Tgt, _Exp, _Value));
1033 
1034  case memory_order_consume:
1035  case memory_order_acquire:
1036  return (_Compare_exchange_acquire_2(_Tgt, _Exp, _Value));
1037 
1038  case memory_order_release:
1039  return (_Compare_exchange_release_2(_Tgt, _Exp, _Value));
1040 
1041  case memory_order_acq_rel:
1042  case memory_order_seq_cst:
1043  return (_Compare_exchange_seq_cst_2(_Tgt, _Exp, _Value));
1044 
1045  default:
1047  return (0);
1048  }
1049  }
1050 
1052  volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value,
1053  memory_order _Order1, memory_order _Order2)
1054  { /* compare and exchange values atomically */
1055  /* No weak compare-exchange is currently available,
1056  even for ARM, so fall back to strong */
1057  return (_Atomic_compare_exchange_strong_2(_Tgt, _Exp, _Value,
1058  _Order1, _Order2));
1059  }
1060 
1061  /* _Atomic_fetch_add_2, _Atomic_fetch_sub_2 */
1062 inline _Uint2_t _Fetch_add_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1063  { /* add _Value to *_Tgt atomically with
1064  sequentially consistent memory order */
1065 
1066  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1067  }
1068 
1069 inline _Uint2_t _Fetch_add_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1070  { /* add _Value to *_Tgt atomically with
1071  relaxed memory order */
1072 
1073  return (_INTRIN_RELAXED(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1074  }
1075 
1076 inline _Uint2_t _Fetch_add_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1077  { /* add _Value to *_Tgt atomically with
1078  acquire memory order */
1079 
1080  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1081  }
1082 
1083 inline _Uint2_t _Fetch_add_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1084  { /* add _Value to *_Tgt atomically with
1085  release memory order */
1086 
1087  return (_INTRIN_RELEASE(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1088  }
1089 
1090 inline _Uint2_t _Atomic_fetch_add_2(
1091  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
1092  { /* add _Value to *_Tgt atomically */
1093  switch (_Order)
1094  {
1095  case memory_order_relaxed:
1096  return (_Fetch_add_relaxed_2(_Tgt, _Value));
1097 
1098  case memory_order_consume:
1099  case memory_order_acquire:
1100  return (_Fetch_add_acquire_2(_Tgt, _Value));
1101 
1102  case memory_order_release:
1103  return (_Fetch_add_release_2(_Tgt, _Value));
1104 
1105  case memory_order_acq_rel:
1106  case memory_order_seq_cst:
1107  return (_Fetch_add_seq_cst_2(_Tgt, _Value));
1108 
1109  default:
1111  return (0);
1112  }
1113  }
1114 
1115 inline _Uint2_t _Atomic_fetch_sub_2(
1116  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
1117  { /* subtract _Value from *_Tgt atomically */
1118  return (_Atomic_fetch_add_2(_Tgt, 0 - _Value, _Order));
1119  }
1120 
1121  /* _Atomic_fetch_and_2 */
1122 inline _Uint2_t _Fetch_and_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1123  { /* and _Value with *_Tgt atomically with
1124  sequentially consistent memory order */
1125 
1126  return (_INTRIN_SEQ_CST(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1127  }
1128 
1129 inline _Uint2_t _Fetch_and_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1130  { /* and _Value with *_Tgt atomically with
1131  relaxed memory order */
1132 
1133  return (_INTRIN_RELAXED(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1134  }
1135 
1136 inline _Uint2_t _Fetch_and_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1137  { /* and _Value with *_Tgt atomically with
1138  acquire memory order */
1139 
1140  return (_INTRIN_ACQUIRE(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1141  }
1142 
1143 inline _Uint2_t _Fetch_and_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1144  { /* and _Value with *_Tgt atomically with
1145  release memory order */
1146 
1147  return (_INTRIN_RELEASE(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1148  }
1149 
1150 inline _Uint2_t _Atomic_fetch_and_2(
1151  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
1152  { /* and _Value with *_Tgt atomically */
1153  switch (_Order)
1154  {
1155  case memory_order_relaxed:
1156  return (_Fetch_and_relaxed_2(_Tgt, _Value));
1157 
1158  case memory_order_consume:
1159  case memory_order_acquire:
1160  return (_Fetch_and_acquire_2(_Tgt, _Value));
1161 
1162  case memory_order_release:
1163  return (_Fetch_and_release_2(_Tgt, _Value));
1164 
1165  case memory_order_acq_rel:
1166  case memory_order_seq_cst:
1167  return (_Fetch_and_seq_cst_2(_Tgt, _Value));
1168 
1169  default:
1171  return (0);
1172  }
1173  }
1174 
1175  /* _Atomic_fetch_or_2 */
1176 inline _Uint2_t _Fetch_or_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1177  { /* or _Value with *_Tgt atomically with
1178  sequentially consistent memory order */
1179 
1180  return (_INTRIN_SEQ_CST(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1181  }
1182 
1183 inline _Uint2_t _Fetch_or_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1184  { /* or _Value with *_Tgt atomically with
1185  relaxed memory order */
1186 
1187  return (_INTRIN_RELAXED(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1188  }
1189 
1190 inline _Uint2_t _Fetch_or_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1191  { /* or _Value with *_Tgt atomically with
1192  acquire memory order */
1193 
1194  return (_INTRIN_ACQUIRE(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1195  }
1196 
1197 inline _Uint2_t _Fetch_or_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1198  { /* or _Value with *_Tgt atomically with
1199  release memory order */
1200 
1201  return (_INTRIN_RELEASE(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1202  }
1203 
1204 inline _Uint2_t _Atomic_fetch_or_2(
1205  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
1206  { /* or _Value with *_Tgt atomically */
1207  switch (_Order)
1208  {
1209  case memory_order_relaxed:
1210  return (_Fetch_or_relaxed_2(_Tgt, _Value));
1211 
1212  case memory_order_consume:
1213  case memory_order_acquire:
1214  return (_Fetch_or_acquire_2(_Tgt, _Value));
1215 
1216  case memory_order_release:
1217  return (_Fetch_or_release_2(_Tgt, _Value));
1218 
1219  case memory_order_acq_rel:
1220  case memory_order_seq_cst:
1221  return (_Fetch_or_seq_cst_2(_Tgt, _Value));
1222 
1223  default:
1225  return (0);
1226  }
1227  }
1228 
1229  /* _Atomic_fetch_xor_2 */
1230 inline _Uint2_t _Fetch_xor_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1231  { /* xor _Value with *_Tgt atomically with
1232  sequentially consistent memory order */
1233 
1234  return (_INTRIN_SEQ_CST(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1235  }
1236 
1237 inline _Uint2_t _Fetch_xor_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1238  { /* xor _Value with *_Tgt atomically with
1239  relaxed memory order */
1240 
1241  return (_INTRIN_RELAXED(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1242  }
1243 
1244 inline _Uint2_t _Fetch_xor_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1245  { /* xor _Value with *_Tgt atomically with
1246  acquire memory order */
1247 
1248  return (_INTRIN_ACQUIRE(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1249  }
1250 
1251 inline _Uint2_t _Fetch_xor_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1252  { /* xor _Value with *_Tgt atomically with
1253  release memory order */
1254 
1255  return (_INTRIN_RELEASE(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1256  }
1257 
1258 inline _Uint2_t _Atomic_fetch_xor_2(
1259  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
1260  { /* xor _Value with *_Tgt atomically */
1261  switch (_Order)
1262  {
1263  case memory_order_relaxed:
1264  return (_Fetch_xor_relaxed_2(_Tgt, _Value));
1265 
1266  case memory_order_consume:
1267  case memory_order_acquire:
1268  return (_Fetch_xor_acquire_2(_Tgt, _Value));
1269 
1270  case memory_order_release:
1271  return (_Fetch_xor_release_2(_Tgt, _Value));
1272 
1273  case memory_order_acq_rel:
1274  case memory_order_seq_cst:
1275  return (_Fetch_xor_seq_cst_2(_Tgt, _Value));
1276 
1277  default:
1279  return (0);
1280  }
1281  }
1282 
1283  /* _Atomic_store_4 */
1284 inline void _Store_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1285  { /* store _Value atomically with relaxed memory order */
1286 
1287  #if defined(_M_ARM) || defined(_M_ARM64)
1288  __iso_volatile_store32((volatile int *)_Tgt, _Value);
1289 
1290  #else
1291  *_Tgt = _Value;
1292  #endif
1293  }
1294 
1295 inline void _Store_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1296  { /* store _Value atomically with release memory order */
1297 
1298  #if defined(_M_ARM) || defined(_M_ARM64)
1299  _Memory_barrier();
1300  __iso_volatile_store32((volatile int *)_Tgt, _Value);
1301 
1302  #else
1304  *_Tgt = _Value;
1305  #endif
1306  }
1307 
1308 inline void _Store_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1309  { /* store _Value atomically with
1310  sequentially consistent memory order */
1311 
1312  #if defined(_M_ARM) || defined(_M_ARM64)
1313  _Memory_barrier();
1314  __iso_volatile_store32((volatile int *)_Tgt, _Value);
1315  _Memory_barrier();
1316 
1317  #else
1318  _INTRIN_SEQ_CST(_InterlockedExchange)((volatile long *)_Tgt, _Value);
1319  #endif
1320  }
1321 
1322 inline void _Atomic_store_4(
1323  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1324  { /* store _Value atomically */
1325  switch (_Order)
1326  {
1327  case memory_order_relaxed:
1328  _Store_relaxed_4(_Tgt, _Value);
1329  break;
1330 
1331  case memory_order_release:
1332  _Store_release_4(_Tgt, _Value);
1333  break;
1334 
1335  case memory_order_seq_cst:
1336  _Store_seq_cst_4(_Tgt, _Value);
1337  break;
1338 
1339  default:
1341  break;
1342  }
1343  }
1344 
1345  /* _Atomic_load_4 */
1346 inline _Uint4_t _Load_seq_cst_4(volatile _Uint4_t *_Tgt)
1347  { /* load from *_Tgt atomically with
1348  sequentially consistent memory order */
1349  _Uint4_t _Value;
1350 
1351  #if defined(_M_ARM) || defined(_M_ARM64)
1352  _Value = __iso_volatile_load32((volatile int *)_Tgt);
1353  _Memory_barrier();
1354 
1355  #else
1356  _Value = *_Tgt;
1358  #endif
1359 
1360  return (_Value);
1361  }
1362 
1363 inline _Uint4_t _Load_relaxed_4(volatile _Uint4_t *_Tgt)
1364  { /* load from *_Tgt atomically with
1365  relaxed memory order */
1366  _Uint4_t _Value;
1367 
1368  #if defined(_M_ARM) || defined(_M_ARM64)
1369  _Value = __iso_volatile_load32((volatile int *)_Tgt);
1370 
1371  #else
1372  _Value = *_Tgt;
1373  #endif
1374 
1375  return (_Value);
1376  }
1377 
1378 inline _Uint4_t _Load_acquire_4(volatile _Uint4_t *_Tgt)
1379  { /* load from *_Tgt atomically with
1380  acquire memory order */
1381 
1382  return (_Load_seq_cst_4(_Tgt));
1383  }
1384 
1386  volatile _Uint4_t *_Tgt, memory_order _Order)
1387  { /* load from *_Tgt atomically */
1388  switch (_Order)
1389  {
1390  case memory_order_relaxed:
1391  return (_Load_relaxed_4(_Tgt));
1392 
1393  case memory_order_consume:
1394  case memory_order_acquire:
1395  return (_Load_acquire_4(_Tgt));
1396 
1397  case memory_order_seq_cst:
1398  return (_Load_seq_cst_4(_Tgt));
1399 
1400  default:
1402  return (0);
1403  }
1404  }
1405 
1406  /* _Atomic_exchange_4 */
1407 inline _Uint4_t _Exchange_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1408  { /* exchange _Value and *_Tgt atomically with
1409  sequentially consistent memory order */
1410 
1411  return (_INTRIN_SEQ_CST(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1412  }
1413 
1414 inline _Uint4_t _Exchange_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1415  { /* exchange _Value and *_Tgt atomically with
1416  relaxed memory order */
1417 
1418  return (_INTRIN_RELAXED(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1419  }
1420 
1421 inline _Uint4_t _Exchange_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1422  { /* exchange _Value and *_Tgt atomically with
1423  acquire memory order */
1424 
1425  return (_INTRIN_ACQUIRE(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1426  }
1427 
1428 inline _Uint4_t _Exchange_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1429  { /* exchange _Value and *_Tgt atomically with
1430  release memory order */
1431 
1432  return (_INTRIN_RELEASE(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1433  }
1434 
1436  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1437  { /* exchange _Value and *_Tgt atomically */
1438  switch (_Order)
1439  {
1440  case memory_order_relaxed:
1441  return (_Exchange_relaxed_4(_Tgt, _Value));
1442 
1443  case memory_order_consume:
1444  case memory_order_acquire:
1445  return (_Exchange_acquire_4(_Tgt, _Value));
1446 
1447  case memory_order_release:
1448  return (_Exchange_release_4(_Tgt, _Value));
1449 
1450  case memory_order_acq_rel:
1451  case memory_order_seq_cst:
1452  return (_Exchange_seq_cst_4(_Tgt, _Value));
1453 
1454  default:
1456  return (0);
1457  }
1458  }
1459 
1460  /* _Atomic_compare_exchange_weak_4, _Atomic_compare_exchange_strong_4 */
1461 inline int _Compare_exchange_seq_cst_4(volatile _Uint4_t *_Tgt,
1462  _Uint4_t *_Exp, _Uint4_t _Value)
1463  { /* compare and exchange values atomically with
1464  sequentially consistent memory order */
1465  _Uint4_t _Old_exp = *_Exp; /* read before atomic operation */
1466 
1467  _Uint4_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange)((volatile long *)_Tgt,
1468  _Value, _Old_exp);
1469 
1470  if (_Prev == _Old_exp)
1471  return (1);
1472  else
1473  { /* copy old value */
1474  *_Exp = _Prev;
1475  return (0);
1476  }
1477  }
1478 
1479 inline int _Compare_exchange_relaxed_4(volatile _Uint4_t *_Tgt,
1480  _Uint4_t *_Exp, _Uint4_t _Value)
1481  { /* compare and exchange values atomically with
1482  relaxed memory order */
1483  _Uint4_t _Old_exp = *_Exp; /* read before atomic operation */
1484 
1485  _Uint4_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange)((volatile long *)_Tgt,
1486  _Value, _Old_exp);
1487 
1488  if (_Prev == _Old_exp)
1489  return (1);
1490  else
1491  { /* copy old value */
1492  *_Exp = _Prev;
1493  return (0);
1494  }
1495  }
1496 
1497 inline int _Compare_exchange_acquire_4(volatile _Uint4_t *_Tgt,
1498  _Uint4_t *_Exp, _Uint4_t _Value)
1499  { /* compare and exchange values atomically with
1500  acquire memory order */
1501  _Uint4_t _Old_exp = *_Exp; /* read before atomic operation */
1502 
1503  _Uint4_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange)((volatile long *)_Tgt,
1504  _Value, _Old_exp);
1505 
1506  if (_Prev == _Old_exp)
1507  return (1);
1508  else
1509  { /* copy old value */
1510  *_Exp = _Prev;
1511  return (0);
1512  }
1513  }
1514 
1515 inline int _Compare_exchange_release_4(volatile _Uint4_t *_Tgt,
1516  _Uint4_t *_Exp, _Uint4_t _Value)
1517  { /* compare and exchange values atomically with
1518  release memory order */
1519  _Uint4_t _Old_exp = *_Exp; /* read before atomic operation */
1520 
1521  _Uint4_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange)((volatile long *)_Tgt,
1522  _Value, _Old_exp);
1523 
1524  if (_Prev == _Old_exp)
1525  return (1);
1526  else
1527  { /* copy old value */
1528  *_Exp = _Prev;
1529  return (0);
1530  }
1531  }
1532 
1534  volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value,
1535  memory_order _Order1, memory_order _Order2)
1536  { /* compare and exchange values atomically */
1537  _Validate_compare_exchange_memory_order(_Order1, _Order2);
1538 
1539  switch (_Memory_order_upper_bound(_Order1, _Order2))
1540  {
1541  case memory_order_relaxed:
1542  return (_Compare_exchange_relaxed_4(_Tgt, _Exp, _Value));
1543 
1544  case memory_order_consume:
1545  case memory_order_acquire:
1546  return (_Compare_exchange_acquire_4(_Tgt, _Exp, _Value));
1547 
1548  case memory_order_release:
1549  return (_Compare_exchange_release_4(_Tgt, _Exp, _Value));
1550 
1551  case memory_order_acq_rel:
1552  case memory_order_seq_cst:
1553  return (_Compare_exchange_seq_cst_4(_Tgt, _Exp, _Value));
1554 
1555  default:
1557  return (0);
1558  }
1559  }
1560 
1562  volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value,
1563  memory_order _Order1, memory_order _Order2)
1564  { /* compare and exchange values atomically */
1565  /* No weak compare-exchange is currently available,
1566  even for ARM, so fall back to strong */
1567  return (_Atomic_compare_exchange_strong_4(_Tgt, _Exp, _Value,
1568  _Order1, _Order2));
1569  }
1570 
1571  /* _Atomic_fetch_add_4, _Atomic_fetch_sub_4 */
1572 inline _Uint4_t _Fetch_add_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1573  { /* add _Value to *_Tgt atomically with
1574  sequentially consistent memory order */
1575 
1576  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1577  }
1578 
1579 inline _Uint4_t _Fetch_add_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1580  { /* add _Value to *_Tgt atomically with
1581  relaxed memory order */
1582 
1583  return (_INTRIN_RELAXED(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1584  }
1585 
1586 inline _Uint4_t _Fetch_add_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1587  { /* add _Value to *_Tgt atomically with
1588  acquire memory order */
1589 
1590  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1591  }
1592 
1593 inline _Uint4_t _Fetch_add_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1594  { /* add _Value to *_Tgt atomically with
1595  release memory order */
1596 
1597  return (_INTRIN_RELEASE(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1598  }
1599 
1601  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1602  { /* add _Value to *_Tgt atomically */
1603  switch (_Order)
1604  {
1605  case memory_order_relaxed:
1606  return (_Fetch_add_relaxed_4(_Tgt, _Value));
1607 
1608  case memory_order_consume:
1609  case memory_order_acquire:
1610  return (_Fetch_add_acquire_4(_Tgt, _Value));
1611 
1612  case memory_order_release:
1613  return (_Fetch_add_release_4(_Tgt, _Value));
1614 
1615  case memory_order_acq_rel:
1616  case memory_order_seq_cst:
1617  return (_Fetch_add_seq_cst_4(_Tgt, _Value));
1618 
1619  default:
1621  return (0);
1622  }
1623  }
1624 
1626  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1627  { /* subtract _Value from *_Tgt atomically */
1628  return (_Atomic_fetch_add_4(_Tgt, 0 - _Value, _Order));
1629  }
1630 
1631  /* _Atomic_fetch_and_4 */
1632 inline _Uint4_t _Fetch_and_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1633  { /* and _Value with *_Tgt atomically with
1634  sequentially consistent memory order */
1635 
1636  return (_INTRIN_SEQ_CST(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1637  }
1638 
1639 inline _Uint4_t _Fetch_and_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1640  { /* and _Value with *_Tgt atomically with
1641  relaxed memory order */
1642 
1643  return (_INTRIN_RELAXED(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1644  }
1645 
1646 inline _Uint4_t _Fetch_and_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1647  { /* and _Value with *_Tgt atomically with
1648  acquire memory order */
1649 
1650  return (_INTRIN_ACQUIRE(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1651  }
1652 
1653 inline _Uint4_t _Fetch_and_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1654  { /* and _Value with *_Tgt atomically with
1655  release memory order */
1656 
1657  return (_INTRIN_RELEASE(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1658  }
1659 
1661  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1662  { /* and _Value with *_Tgt atomically */
1663  switch (_Order)
1664  {
1665  case memory_order_relaxed:
1666  return (_Fetch_and_relaxed_4(_Tgt, _Value));
1667 
1668  case memory_order_consume:
1669  case memory_order_acquire:
1670  return (_Fetch_and_acquire_4(_Tgt, _Value));
1671 
1672  case memory_order_release:
1673  return (_Fetch_and_release_4(_Tgt, _Value));
1674 
1675  case memory_order_acq_rel:
1676  case memory_order_seq_cst:
1677  return (_Fetch_and_seq_cst_4(_Tgt, _Value));
1678 
1679  default:
1681  return (0);
1682  }
1683  }
1684 
1685  /* _Atomic_fetch_or_4 */
1686 inline _Uint4_t _Fetch_or_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1687  { /* or _Value with *_Tgt atomically with
1688  sequentially consistent memory order */
1689 
1690  return (_INTRIN_SEQ_CST(_InterlockedOr)((volatile long *)_Tgt, _Value));
1691  }
1692 
1693 inline _Uint4_t _Fetch_or_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1694  { /* or _Value with *_Tgt atomically with
1695  relaxed memory order */
1696 
1697  return (_INTRIN_RELAXED(_InterlockedOr)((volatile long *)_Tgt, _Value));
1698  }
1699 
1700 inline _Uint4_t _Fetch_or_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1701  { /* or _Value with *_Tgt atomically with
1702  acquire memory order */
1703 
1704  return (_INTRIN_ACQUIRE(_InterlockedOr)((volatile long *)_Tgt, _Value));
1705  }
1706 
1707 inline _Uint4_t _Fetch_or_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1708  { /* or _Value with *_Tgt atomically with
1709  release memory order */
1710 
1711  return (_INTRIN_RELEASE(_InterlockedOr)((volatile long *)_Tgt, _Value));
1712  }
1713 
1715  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1716  { /* or _Value with *_Tgt atomically */
1717  switch (_Order)
1718  {
1719  case memory_order_relaxed:
1720  return (_Fetch_or_relaxed_4(_Tgt, _Value));
1721 
1722  case memory_order_consume:
1723  case memory_order_acquire:
1724  return (_Fetch_or_acquire_4(_Tgt, _Value));
1725 
1726  case memory_order_release:
1727  return (_Fetch_or_release_4(_Tgt, _Value));
1728 
1729  case memory_order_acq_rel:
1730  case memory_order_seq_cst:
1731  return (_Fetch_or_seq_cst_4(_Tgt, _Value));
1732 
1733  default:
1735  return (0);
1736  }
1737  }
1738 
1739  /* _Atomic_fetch_xor_4 */
1740 inline _Uint4_t _Fetch_xor_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1741  { /* xor _Value with *_Tgt atomically with
1742  sequentially consistent memory order */
1743 
1744  return (_INTRIN_SEQ_CST(_InterlockedXor)((volatile long *)_Tgt, _Value));
1745  }
1746 
1747 inline _Uint4_t _Fetch_xor_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1748  { /* xor _Value with *_Tgt atomically with
1749  relaxed memory order */
1750 
1751  return (_INTRIN_RELAXED(_InterlockedXor)((volatile long *)_Tgt, _Value));
1752  }
1753 
1754 inline _Uint4_t _Fetch_xor_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1755  { /* xor _Value with *_Tgt atomically with
1756  acquire memory order */
1757 
1758  return (_INTRIN_ACQUIRE(_InterlockedXor)((volatile long *)_Tgt, _Value));
1759  }
1760 
1761 inline _Uint4_t _Fetch_xor_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1762  { /* xor _Value with *_Tgt atomically with
1763  release memory order */
1764 
1765  return (_INTRIN_RELEASE(_InterlockedXor)((volatile long *)_Tgt, _Value));
1766  }
1767 
1769  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1770  { /* xor _Value with *_Tgt atomically */
1771  switch (_Order)
1772  {
1773  case memory_order_relaxed:
1774  return (_Fetch_xor_relaxed_4(_Tgt, _Value));
1775 
1776  case memory_order_consume:
1777  case memory_order_acquire:
1778  return (_Fetch_xor_acquire_4(_Tgt, _Value));
1779 
1780  case memory_order_release:
1781  return (_Fetch_xor_release_4(_Tgt, _Value));
1782 
1783  case memory_order_acq_rel:
1784  case memory_order_seq_cst:
1785  return (_Fetch_xor_seq_cst_4(_Tgt, _Value));
1786 
1787  default:
1789  return (0);
1790  }
1791  }
1792 
1793  /* _Atomic_store_8 */
1794 inline void _Store_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1795  { /* store _Value atomically with relaxed memory order */
1796 
1797  #if defined(_M_X64)
1798  *_Tgt = _Value;
1799 
1800  #elif defined(_M_ARM64)
1801  __iso_volatile_store64((volatile _LONGLONG *)_Tgt, _Value);
1802 
1803  #else
1804  _INTRIN_RELAXED(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value);
1805  #endif
1806  }
1807 
1808 inline void _Store_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1809  { /* store _Value atomically with release memory order */
1810 
1811  #if defined(_M_X64)
1813  *_Tgt = _Value;
1814 
1815  #elif defined(_M_ARM64)
1816  _Memory_barrier();
1817  __iso_volatile_store64((volatile _LONGLONG *)_Tgt, _Value);
1818 
1819  #else
1820  _INTRIN_RELEASE(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value);
1821  #endif
1822  }
1823 
1824 inline void _Store_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1825  { /* store _Value atomically with
1826  sequentially consistent memory order */
1827 
1828  #if defined(_M_ARM64)
1829  _Memory_barrier();
1830  __iso_volatile_store64((volatile _LONGLONG *)_Tgt, _Value);
1831  _Memory_barrier();
1832 
1833  #else
1834  _INTRIN_SEQ_CST(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value);
1835  #endif
1836  }
1837 
1838 inline void _Atomic_store_8(
1839  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
1840  { /* store _Value atomically */
1841  switch (_Order)
1842  {
1843  case memory_order_relaxed:
1844  _Store_relaxed_8(_Tgt, _Value);
1845  break;
1846 
1847  case memory_order_release:
1848  _Store_release_8(_Tgt, _Value);
1849  break;
1850 
1851  case memory_order_seq_cst:
1852  _Store_seq_cst_8(_Tgt, _Value);
1853  break;
1854 
1855  default:
1857  break;
1858  }
1859  }
1860 
1861  /* _Atomic_load_8 */
1862 inline _Uint8_t _Load_seq_cst_8(volatile _Uint8_t *_Tgt)
1863  { /* load from *_Tgt atomically with
1864  sequentially consistent memory order */
1865  _Uint8_t _Value;
1866 
1867  #if defined(_M_X64)
1868  _Value = *_Tgt;
1870 
1871  #elif defined(_M_ARM)
1872  _Value = __ldrexd((volatile _LONGLONG *)_Tgt);
1873  _Memory_barrier();
1874 
1875  #elif defined(_M_ARM64)
1876  _Value = __iso_volatile_load64((volatile _LONGLONG *)_Tgt);
1877  _Memory_barrier();
1878 
1879  #else
1880  _Value = _InterlockedOr64((volatile _LONGLONG *)_Tgt, 0);
1881  #endif
1882 
1883  return (_Value);
1884  }
1885 
1886 inline _Uint8_t _Load_relaxed_8(volatile _Uint8_t *_Tgt)
1887  { /* load from *_Tgt atomically with
1888  relaxed memory order */
1889  _Uint8_t _Value;
1890 
1891  #if defined(_M_X64)
1892  _Value = *_Tgt;
1893 
1894  #elif defined(_M_ARM)
1895  _Value = __ldrexd((volatile _LONGLONG *)_Tgt);
1896 
1897  #elif defined(_M_ARM64)
1898  _Value = __iso_volatile_load64((volatile _LONGLONG *)_Tgt);
1899 
1900  #else
1901  _Value = _InterlockedOr64((volatile _LONGLONG *)_Tgt, 0);
1902  #endif
1903 
1904  return (_Value);
1905  }
1906 
1907 inline _Uint8_t _Load_acquire_8(volatile _Uint8_t *_Tgt)
1908  { /* load from *_Tgt atomically with
1909  acquire memory order */
1910 
1911  return (_Load_seq_cst_8(_Tgt));
1912  }
1913 
1914 inline _Uint8_t _Atomic_load_8(
1915  volatile _Uint8_t *_Tgt, memory_order _Order)
1916  { /* load from *_Tgt atomically */
1917  switch (_Order)
1918  {
1919  case memory_order_relaxed:
1920  return (_Load_relaxed_8(_Tgt));
1921 
1922  case memory_order_consume:
1923  case memory_order_acquire:
1924  return (_Load_acquire_8(_Tgt));
1925 
1926  case memory_order_seq_cst:
1927  return (_Load_seq_cst_8(_Tgt));
1928 
1929  default:
1931  return (0);
1932  }
1933  }
1934 
1935  /* _Atomic_exchange_8 */
1936 inline _Uint8_t _Exchange_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1937  { /* exchange _Value and *_Tgt atomically with
1938  sequentially consistent memory order */
1939 
1940  return (_INTRIN_SEQ_CST(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value));
1941  }
1942 
1943 inline _Uint8_t _Exchange_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1944  { /* exchange _Value and *_Tgt atomically with
1945  relaxed memory order */
1946 
1947  return (_INTRIN_RELAXED(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value));
1948  }
1949 
1950 inline _Uint8_t _Exchange_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1951  { /* exchange _Value and *_Tgt atomically with
1952  acquire memory order */
1953 
1954  return (_INTRIN_ACQUIRE(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value));
1955  }
1956 
1957 inline _Uint8_t _Exchange_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1958  { /* exchange _Value and *_Tgt atomically with
1959  release memory order */
1960 
1961  return (_INTRIN_RELEASE(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value));
1962  }
1963 
1964 inline _Uint8_t _Atomic_exchange_8(
1965  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
1966  { /* exchange _Value and *_Tgt atomically */
1967  switch (_Order)
1968  {
1969  case memory_order_relaxed:
1970  return (_Exchange_relaxed_8(_Tgt, _Value));
1971 
1972  case memory_order_consume:
1973  case memory_order_acquire:
1974  return (_Exchange_acquire_8(_Tgt, _Value));
1975 
1976  case memory_order_release:
1977  return (_Exchange_release_8(_Tgt, _Value));
1978 
1979  case memory_order_acq_rel:
1980  case memory_order_seq_cst:
1981  return (_Exchange_seq_cst_8(_Tgt, _Value));
1982 
1983  default:
1985  return (0);
1986  }
1987  }
1988 
1989  /* _Atomic_compare_exchange_weak_8, _Atomic_compare_exchange_strong_8 */
1990 inline int _Compare_exchange_seq_cst_8(volatile _Uint8_t *_Tgt,
1991  _Uint8_t *_Exp, _Uint8_t _Value)
1992  { /* compare and exchange values atomically with
1993  sequentially consistent memory order */
1994  _Uint8_t _Old_exp = *_Exp; /* read before atomic operation */
1995 
1996  _Uint8_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange64)((volatile _LONGLONG *)_Tgt,
1997  _Value, _Old_exp);
1998 
1999  if (_Prev == _Old_exp)
2000  return (1);
2001  else
2002  { /* copy old value */
2003  *_Exp = _Prev;
2004  return (0);
2005  }
2006  }
2007 
2008 inline int _Compare_exchange_relaxed_8(volatile _Uint8_t *_Tgt,
2009  _Uint8_t *_Exp, _Uint8_t _Value)
2010  { /* compare and exchange values atomically with
2011  relaxed memory order */
2012  _Uint8_t _Old_exp = *_Exp; /* read before atomic operation */
2013 
2014  _Uint8_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange64)((volatile _LONGLONG *)_Tgt,
2015  _Value, _Old_exp);
2016 
2017  if (_Prev == _Old_exp)
2018  return (1);
2019  else
2020  { /* copy old value */
2021  *_Exp = _Prev;
2022  return (0);
2023  }
2024  }
2025 
2026 inline int _Compare_exchange_acquire_8(volatile _Uint8_t *_Tgt,
2027  _Uint8_t *_Exp, _Uint8_t _Value)
2028  { /* compare and exchange values atomically with
2029  acquire memory order */
2030  _Uint8_t _Old_exp = *_Exp; /* read before atomic operation */
2031 
2032  _Uint8_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange64)((volatile _LONGLONG *)_Tgt,
2033  _Value, _Old_exp);
2034 
2035  if (_Prev == _Old_exp)
2036  return (1);
2037  else
2038  { /* copy old value */
2039  *_Exp = _Prev;
2040  return (0);
2041  }
2042  }
2043 
2044 inline int _Compare_exchange_release_8(volatile _Uint8_t *_Tgt,
2045  _Uint8_t *_Exp, _Uint8_t _Value)
2046  { /* compare and exchange values atomically with
2047  release memory order */
2048  _Uint8_t _Old_exp = *_Exp; /* read before atomic operation */
2049 
2050  _Uint8_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange64)((volatile _LONGLONG *)_Tgt,
2051  _Value, _Old_exp);
2052 
2053  if (_Prev == _Old_exp)
2054  return (1);
2055  else
2056  { /* copy old value */
2057  *_Exp = _Prev;
2058  return (0);
2059  }
2060  }
2061 
2063  volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value,
2064  memory_order _Order1, memory_order _Order2)
2065  { /* compare and exchange values atomically */
2066  _Validate_compare_exchange_memory_order(_Order1, _Order2);
2067 
2068  switch (_Memory_order_upper_bound(_Order1, _Order2))
2069  {
2070  case memory_order_relaxed:
2071  return (_Compare_exchange_relaxed_8(_Tgt, _Exp, _Value));
2072 
2073  case memory_order_consume:
2074  case memory_order_acquire:
2075  return (_Compare_exchange_acquire_8(_Tgt, _Exp, _Value));
2076 
2077  case memory_order_release:
2078  return (_Compare_exchange_release_8(_Tgt, _Exp, _Value));
2079 
2080  case memory_order_acq_rel:
2081  case memory_order_seq_cst:
2082  return (_Compare_exchange_seq_cst_8(_Tgt, _Exp, _Value));
2083 
2084  default:
2086  return (0);
2087  }
2088  }
2089 
2091  volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value,
2092  memory_order _Order1, memory_order _Order2)
2093  { /* compare and exchange values atomically */
2094  /* No weak compare-exchange is currently available,
2095  even for ARM, so fall back to strong */
2096  return (_Atomic_compare_exchange_strong_8(_Tgt, _Exp, _Value,
2097  _Order1, _Order2));
2098  }
2099 
2100  /* _Atomic_fetch_add_8, _Atomic_fetch_sub_8 */
2101 inline _Uint8_t _Fetch_add_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2102  { /* add _Value to *_Tgt atomically with
2103  sequentially consistent memory order */
2104 
2105  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd64)((volatile _LONGLONG *)_Tgt, _Value));
2106  }
2107 
2108 inline _Uint8_t _Fetch_add_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2109  { /* add _Value to *_Tgt atomically with
2110  relaxed memory order */
2111 
2112  return (_INTRIN_RELAXED(_InterlockedExchangeAdd64)((volatile _LONGLONG *)_Tgt, _Value));
2113  }
2114 
2115 inline _Uint8_t _Fetch_add_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2116  { /* add _Value to *_Tgt atomically with
2117  acquire memory order */
2118 
2119  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd64)((volatile _LONGLONG *)_Tgt, _Value));
2120  }
2121 
2122 inline _Uint8_t _Fetch_add_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2123  { /* add _Value to *_Tgt atomically with
2124  release memory order */
2125 
2126  return (_INTRIN_RELEASE(_InterlockedExchangeAdd64)((volatile _LONGLONG *)_Tgt, _Value));
2127  }
2128 
2129 inline _Uint8_t _Atomic_fetch_add_8(
2130  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
2131  { /* add _Value to *_Tgt atomically */
2132  switch (_Order)
2133  {
2134  case memory_order_relaxed:
2135  return (_Fetch_add_relaxed_8(_Tgt, _Value));
2136 
2137  case memory_order_consume:
2138  case memory_order_acquire:
2139  return (_Fetch_add_acquire_8(_Tgt, _Value));
2140 
2141  case memory_order_release:
2142  return (_Fetch_add_release_8(_Tgt, _Value));
2143 
2144  case memory_order_acq_rel:
2145  case memory_order_seq_cst:
2146  return (_Fetch_add_seq_cst_8(_Tgt, _Value));
2147 
2148  default:
2150  return (0);
2151  }
2152  }
2153 
2154 inline _Uint8_t _Atomic_fetch_sub_8(
2155  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
2156  { /* subtract _Value from *_Tgt atomically */
2157  return (_Atomic_fetch_add_8(_Tgt, 0 - _Value, _Order));
2158  }
2159 
2160  /* _Atomic_fetch_and_8 */
2161 inline _Uint8_t _Fetch_and_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2162  { /* and _Value with *_Tgt atomically with
2163  sequentially consistent memory order */
2164 
2165  return (_INTRIN_SEQ_CST(_InterlockedAnd64)((volatile _LONGLONG *)_Tgt, _Value));
2166  }
2167 
2168 inline _Uint8_t _Fetch_and_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2169  { /* and _Value with *_Tgt atomically with
2170  relaxed memory order */
2171 
2172  return (_INTRIN_RELAXED(_InterlockedAnd64)((volatile _LONGLONG *)_Tgt, _Value));
2173  }
2174 
2175 inline _Uint8_t _Fetch_and_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2176  { /* and _Value with *_Tgt atomically with
2177  acquire memory order */
2178 
2179  return (_INTRIN_ACQUIRE(_InterlockedAnd64)((volatile _LONGLONG *)_Tgt, _Value));
2180  }
2181 
2182 inline _Uint8_t _Fetch_and_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2183  { /* and _Value with *_Tgt atomically with
2184  release memory order */
2185 
2186  return (_INTRIN_RELEASE(_InterlockedAnd64)((volatile _LONGLONG *)_Tgt, _Value));
2187  }
2188 
2189 inline _Uint8_t _Atomic_fetch_and_8(
2190  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
2191  { /* and _Value with *_Tgt atomically */
2192  switch (_Order)
2193  {
2194  case memory_order_relaxed:
2195  return (_Fetch_and_relaxed_8(_Tgt, _Value));
2196 
2197  case memory_order_consume:
2198  case memory_order_acquire:
2199  return (_Fetch_and_acquire_8(_Tgt, _Value));
2200 
2201  case memory_order_release:
2202  return (_Fetch_and_release_8(_Tgt, _Value));
2203 
2204  case memory_order_acq_rel:
2205  case memory_order_seq_cst:
2206  return (_Fetch_and_seq_cst_8(_Tgt, _Value));
2207 
2208  default:
2210  return (0);
2211  }
2212  }
2213 
2214  /* _Atomic_fetch_or_8 */
2215 inline _Uint8_t _Fetch_or_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2216  { /* or _Value with *_Tgt atomically with
2217  sequentially consistent memory order */
2218 
2219  return (_INTRIN_SEQ_CST(_InterlockedOr64)((volatile _LONGLONG *)_Tgt, _Value));
2220  }
2221 
2222 inline _Uint8_t _Fetch_or_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2223  { /* or _Value with *_Tgt atomically with
2224  relaxed memory order */
2225 
2226  return (_INTRIN_RELAXED(_InterlockedOr64)((volatile _LONGLONG *)_Tgt, _Value));
2227  }
2228 
2229 inline _Uint8_t _Fetch_or_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2230  { /* or _Value with *_Tgt atomically with
2231  acquire memory order */
2232 
2233  return (_INTRIN_ACQUIRE(_InterlockedOr64)((volatile _LONGLONG *)_Tgt, _Value));
2234  }
2235 
2236 inline _Uint8_t _Fetch_or_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2237  { /* or _Value with *_Tgt atomically with
2238  release memory order */
2239 
2240  return (_INTRIN_RELEASE(_InterlockedOr64)((volatile _LONGLONG *)_Tgt, _Value));
2241  }
2242 
2243 inline _Uint8_t _Atomic_fetch_or_8(
2244  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
2245  { /* or _Value with *_Tgt atomically */
2246  switch (_Order)
2247  {
2248  case memory_order_relaxed:
2249  return (_Fetch_or_relaxed_8(_Tgt, _Value));
2250 
2251  case memory_order_consume:
2252  case memory_order_acquire:
2253  return (_Fetch_or_acquire_8(_Tgt, _Value));
2254 
2255  case memory_order_release:
2256  return (_Fetch_or_release_8(_Tgt, _Value));
2257 
2258  case memory_order_acq_rel:
2259  case memory_order_seq_cst:
2260  return (_Fetch_or_seq_cst_8(_Tgt, _Value));
2261 
2262  default:
2264  return (0);
2265  }
2266  }
2267 
2268  /* _Atomic_fetch_xor_8 */
2269 inline _Uint8_t _Fetch_xor_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2270  { /* xor _Value with *_Tgt atomically with
2271  sequentially consistent memory order */
2272 
2273  return (_INTRIN_SEQ_CST(_InterlockedXor64)((volatile _LONGLONG *)_Tgt, _Value));
2274  }
2275 
2276 inline _Uint8_t _Fetch_xor_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2277  { /* xor _Value with *_Tgt atomically with
2278  relaxed memory order */
2279 
2280  return (_INTRIN_RELAXED(_InterlockedXor64)((volatile _LONGLONG *)_Tgt, _Value));
2281  }
2282 
2283 inline _Uint8_t _Fetch_xor_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2284  { /* xor _Value with *_Tgt atomically with
2285  acquire memory order */
2286 
2287  return (_INTRIN_ACQUIRE(_InterlockedXor64)((volatile _LONGLONG *)_Tgt, _Value));
2288  }
2289 
2290 inline _Uint8_t _Fetch_xor_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2291  { /* xor _Value with *_Tgt atomically with
2292  release memory order */
2293 
2294  return (_INTRIN_RELEASE(_InterlockedXor64)((volatile _LONGLONG *)_Tgt, _Value));
2295  }
2296 
2297 inline _Uint8_t _Atomic_fetch_xor_8(
2298  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
2299  { /* xor _Value with *_Tgt atomically */
2300  switch (_Order)
2301  {
2302  case memory_order_relaxed:
2303  return (_Fetch_xor_relaxed_8(_Tgt, _Value));
2304 
2305  case memory_order_consume:
2306  case memory_order_acquire:
2307  return (_Fetch_xor_acquire_8(_Tgt, _Value));
2308 
2309  case memory_order_release:
2310  return (_Fetch_xor_release_8(_Tgt, _Value));
2311 
2312  case memory_order_acq_rel:
2313  case memory_order_seq_cst:
2314  return (_Fetch_xor_seq_cst_8(_Tgt, _Value));
2315 
2316  default:
2318  return (0);
2319  }
2320  }
2321 
2322 inline int _Atomic_flag_test_and_set(volatile _Atomic_flag_t *_Flag,
2323  memory_order _Order)
2324  { /* atomically test flag and set to true */
2325  switch (_Order)
2326  {
2327  case memory_order_relaxed:
2328  return (_INTRIN_RELAXED(_interlockedbittestandset)(_Flag, 0));
2329 
2330  case memory_order_consume:
2331  case memory_order_acquire:
2332  return (_INTRIN_ACQUIRE(_interlockedbittestandset)(_Flag, 0));
2333 
2334  case memory_order_release:
2335  return (_INTRIN_RELEASE(_interlockedbittestandset)(_Flag, 0));
2336 
2337  case memory_order_acq_rel:
2338  case memory_order_seq_cst:
2339  return (_INTRIN_SEQ_CST(_interlockedbittestandset)(_Flag, 0));
2340 
2341  default:
2343  return (0);
2344  }
2345  }
2346 
2347 inline void _Atomic_flag_clear(volatile _Atomic_flag_t *_Flag,
2348  memory_order _Order)
2349  { /* atomically clear flag */
2350  static_assert(sizeof(_Atomic_flag_t) == sizeof(_Uint4_t),
2351  "Unexpected _Atomic_flag_t size");
2352 
2353  switch (_Order)
2354  {
2355  case memory_order_relaxed:
2356  case memory_order_release:
2357  case memory_order_seq_cst:
2358  _Atomic_store_4((volatile _Uint4_t *)_Flag, 0, _Order);
2359  break;
2360 
2361  default:
2363  break;
2364  }
2365  }
2366 
2368  { /* force memory visibility and inhibit compiler reordering */
2369  #if defined(_M_ARM) || defined(_M_ARM64)
2370  if (_Order != memory_order_relaxed)
2371  {
2372  _Memory_barrier();
2373  }
2374 
2375  #else
2377  if (_Order == memory_order_seq_cst)
2378  { /* force visibility */
2379  static _Uint4_t _Guard;
2382  }
2383  #endif
2384  }
2385 
2387  { /* inhibit compiler reordering */
2389  }
2390 
2391  #if defined(_M_ARM) || defined(_M_ARM64)
2392  #define _YIELD_PROCESSOR __yield()
2393 
2394  #else
2395  #define _YIELD_PROCESSOR
2396  #endif
2397 
2398  /* SPIN LOCK FOR LOCKING VERSIONS OF OPERATIONS */
2399  /* Use acquire semantics on lock and release on unlock. Given our
2400  current atomic_flag implementation, this ensures not just
2401  atomicity but also sequential consistency. */
2402 
2403 inline void _Lock_spin_lock(
2404  volatile _Atomic_flag_t *_Flag)
2405  { /* spin until _Flag successfully set */
2408  }
2409 
2410 inline void _Unlock_spin_lock(
2411  volatile _Atomic_flag_t *_Flag)
2412  { /* release previously obtained lock */
2414  }
2415 
2416  /* ATOMIC OPERATIONS FOR OBJECTS WITH SIZES THAT
2417  DON'T MATCH THE SIZE OF ANY INTEGRAL TYPE */
2418 inline void _Atomic_copy(
2419  volatile _Atomic_flag_t *_Flag, size_t _Size,
2420  volatile void *_Tgt, volatile const void *_Src,
2421  memory_order _Order)
2422  { /* atomically copy *_Src to *_Tgt with memory ordering */
2423  _Lock_spin_lock(_Flag);
2424  _CSTD memcpy((void *)_Tgt, (void *)_Src, _Size);
2425  _Unlock_spin_lock(_Flag);
2426  }
2427 
2428 inline void _Atomic_exchange(
2429  volatile _Atomic_flag_t *_Flag, size_t _Size,
2430  volatile void *_Tgt, volatile void *_Src,
2431  memory_order _Order)
2432  { /* atomically swap *_Src and *_Tgt with memory ordering */
2433  unsigned char *_Left = (unsigned char *)_Tgt;
2434  unsigned char *_Right = (unsigned char *)_Src;
2435 
2436  _Lock_spin_lock(_Flag);
2437  for (; 0 < _Size; --_Size)
2438  { /* copy bytes */
2439  unsigned char _Tmp = *_Left;
2440  *_Left++ = *_Right;
2441  *_Right++ = _Tmp;
2442  }
2443  _Unlock_spin_lock(_Flag);
2444  }
2445 
2447  volatile _Atomic_flag_t *_Flag, size_t _Size,
2448  volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src,
2449  memory_order _Order1, memory_order _Order2)
2450  { /* atomically compare and exchange with memory ordering */
2451  int _Result;
2452 
2453  _Lock_spin_lock(_Flag);
2454  _Result = _CSTD memcmp((const void *)_Tgt, (const void *)_Exp, _Size) == 0;
2455  if (_Result != 0)
2456  _CSTD memcpy((void *)_Tgt, (void *)_Src, _Size);
2457  else
2458  _CSTD memcpy((void *)_Exp, (void *)_Tgt, _Size);
2459  _Unlock_spin_lock(_Flag);
2460  return (_Result);
2461  }
2462 
2464  volatile _Atomic_flag_t *_Flag, size_t _Size,
2465  volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src,
2466  memory_order _Order1, memory_order _Order2)
2467  { /* atomically compare and exchange with memory ordering */
2468  return (_Atomic_compare_exchange_weak(_Flag, _Size, _Tgt, _Exp, _Src,
2469  _Order1, _Order2));
2470  }
2471 
2472  /* LOCK-FREE PROPERTY FOR INTEGRAL TYPES */
2473 inline int _Atomic_is_lock_free_1(void)
2474  { /* return true if 1-byte atomic values are lock-free */
2475  return (1 <= _ATOMIC_MAXBYTES_LOCK_FREE);
2476  }
2477 
2478 inline int _Atomic_is_lock_free_2(void)
2479  { /* return true if 2-byte atomic values are lock-free */
2480  return (2 <= _ATOMIC_MAXBYTES_LOCK_FREE);
2481  }
2482 
2483 inline int _Atomic_is_lock_free_4(void)
2484  { /* return true if 4-byte atomic values are lock-free */
2485  return (4 <= _ATOMIC_MAXBYTES_LOCK_FREE);
2486  }
2487 
2488 inline int _Atomic_is_lock_free_8(void)
2489  { /* return true if 8-byte atomic values are lock-free */
2490  return (8 <= _ATOMIC_MAXBYTES_LOCK_FREE);
2491  }
2492 
2493  #if _USE_INTERLOCKED_REFCOUNTING == 0
2494  /* ATOMIC REFERENCE COUNTING */
2496  _Atomic_counter_t& _Counter, memory_order _Order)
2497  { // atomically increment counter and return result
2498  return (_Atomic_fetch_add_4(&_Counter, 1, _Order) + 1);
2499  }
2500 
2502  { // atomically increment counter and return result
2504  }
2505 
2507  _Atomic_counter_t& _Counter, memory_order _Order)
2508  { // atomically decrement counter and return result
2509  return (_Atomic_fetch_sub_4(&_Counter, 1, _Order) - 1);
2510  }
2511 
2513  { // atomically decrement counter and return result
2515  }
2516 
2518  _Atomic_counter_t& _Counter, memory_order _Order)
2519  { // atomically load counter and return result
2520  return (_Atomic_load_4(&_Counter, _Order));
2521  }
2522 
2524  { // atomically load counter and return result
2526  }
2527 
2529  _Atomic_counter_t& _Counter,
2530  _Atomic_integral_t _Expected,
2531  memory_order _Order)
2532  { // atomically compare and increment counter and return result
2534  &_Counter, &_Expected, _Expected + 1,
2535  _Order, _Order));
2536  }
2537 
2539  _Atomic_counter_t& _Counter, _Atomic_integral_t _Expected)
2540  { // atomically compare and increment counter and return result
2542  _Counter, _Expected, memory_order_seq_cst));
2543  }
2544  #endif /* _USE_INTERLOCKED_REFCOUNTING == 0 */
2545 _STD_END
2546 
2547  #if defined(_M_IX86)
2548 #pragma pop_macro("_InterlockedExchange64")
2549 #pragma pop_macro("_InterlockedExchangeAdd64")
2550 #pragma pop_macro("_InterlockedAnd64")
2551 #pragma pop_macro("_InterlockedOr64")
2552 #pragma pop_macro("_InterlockedXor64")
2553  #endif /* defined(_M_IX86) */
2554 
2555  #pragma pop_macro("new")
2556  #pragma warning(pop)
2557  #pragma pack(pop)
2558 #endif /* RC_INVOKED */
2559 #endif /* _XATOMIC_H */
2560 
2561 /*
2562  * Copyright (c) by P.J. Plauger. All rights reserved.
2563  * Consult your license regarding permissions and restrictions.
2564 V6.50:0009 */
int _Atomic_compare_exchange_strong_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:513
_Uint1_t _Atomic_fetch_add_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:580
void _Atomic_store_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:1838
_Uint1_t _Fetch_and_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:619
_Uint4_t _Fetch_or_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1693
_Uint1_t _Fetch_add_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:573
int _Compare_exchange_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:951
_Uint1_t _Fetch_or_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:687
int _Atomic_compare_exchange_strong_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:1533
_Uint2_t _Atomic_load_2(volatile _Uint2_t *_Tgt, memory_order _Order)
Definition: xatomic.h:875
constexpr const _Ty &() _Left
Definition: algorithm:3590
int _Atomic_is_lock_free_2(void)
Definition: xatomic.h:2478
int _Atomic_compare_exchange_strong_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:2062
_Uint4_t _Atomic_fetch_sub_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1625
void _Atomic_store_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:302
_Uint1_t _Fetch_and_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:626
_Uint8_t _Load_relaxed_8(volatile _Uint8_t *_Tgt)
Definition: xatomic.h:1886
_Uint1_t _Exchange_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:401
#define _STD_BEGIN
Definition: yvals.h:564
_Uint4_t _Fetch_or_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1707
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _ATOMIC_MAXBYTES_LOCK_FREE
Definition: xatomic0.h:49
int _Atomic_compare_exchange_strong_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:1023
_Uint1_t _Fetch_and_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:633
_Uint2_t _Load_relaxed_2(volatile _Uint2_t *_Tgt)
Definition: xatomic.h:853
_Check_return_ int __cdecl memcmp(_In_reads_bytes_(_Size) void const *_Buf1, _In_reads_bytes_(_Size) void const *_Buf2, _In_ size_t _Size)
#define _ATOMIC_FLAG_TEST_AND_SET
Definition: xatomic.h:166
_Uint2_t _Fetch_or_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1190
void _Store_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1284
_Uint4_t _Atomic_exchange_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1435
void _Atomic_store_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1322
_Uint1_t _Fetch_or_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:680
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:67
int _Compare_exchange_release_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:1005
_Uint8_t _Atomic_fetch_or_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2243
_Uint4_t _Fetch_xor_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1761
int _Compare_exchange_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:969
void _Atomic_exchange(volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile void *_Src, memory_order _Order)
Definition: xatomic.h:2428
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:66
_Uint4_t _Fetch_xor_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1740
_Atomic_integral_t _Inc_atomic_counter_explicit(_Atomic_counter_t &_Counter, memory_order _Order)
Definition: xatomic.h:2495
_Uint8_t _Exchange_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1950
#define _ATOMIC_FLAG_CLEAR
Definition: xatomic.h:167
void _Atomic_thread_fence(memory_order _Order)
Definition: xatomic.h:2367
_Uint1_t _Fetch_add_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:559
_Uint2_t _Fetch_and_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1129
_Uint1_t _Fetch_add_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:566
_Uint8_t _Fetch_or_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2215
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:184
_Uint4_t _Atomic_fetch_and_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1660
_Uint2_t _Fetch_and_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1122
_Uint1_t _Load_acquire_1(volatile _Uint1_t *_Tgt)
Definition: xatomic.h:358
_Uint4_t _Fetch_and_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1632
int _Compare_exchange_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:441
_Uint8_t _Fetch_add_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2115
_Uint8_t _Fetch_xor_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2276
int _Atomic_is_lock_free_8(void)
Definition: xatomic.h:2488
_Uint4_t _Atomic_fetch_or_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1714
_Atomic_integral_t _Load_atomic_counter_explicit(_Atomic_counter_t &_Counter, memory_order _Order)
Definition: xatomic.h:2517
_Atomic_integral_t _Dec_atomic_counter_explicit(_Atomic_counter_t &_Counter, memory_order _Order)
Definition: xatomic.h:2506
void _Atomic_flag_clear(volatile _Atomic_flag_t *_Flag, memory_order _Order)
Definition: xatomic.h:2347
void _Store_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1295
_Uint4_t _Atomic_fetch_add_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1600
_Uint1_t _Atomic_load_1(volatile _Uint1_t *_Tgt, memory_order _Order)
Definition: xatomic.h:365
_Uint4_t _Fetch_xor_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1754
_Uint8_t _Fetch_add_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2122
long _Atomic_flag_t
Definition: xatomic0.h:47
int _Compare_exchange_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1497
_Uint1_t _Exchange_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:394
_Uint2_t _Atomic_fetch_add_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:1090
memory_order _Memory_order_upper_bound(memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:188
_Uint2_t _Fetch_xor_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1230
_Uint8_t _Fetch_add_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2108
_Uint8_t _Fetch_and_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2175
_Uint1_t _Fetch_xor_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:727
_Uint32t _Uint4_t
Definition: xatomic0.h:24
_Uint2_t _Fetch_or_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1183
_Atomic_integral_t _Inc_atomic_counter(_Atomic_counter_t &_Counter)
Definition: xatomic.h:2501
int _Compare_exchange_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:1990
_Uint1_t _Fetch_xor_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:720
void _Atomic_copy(volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile const void *_Src, memory_order _Order)
Definition: xatomic.h:2418
int _Compare_exchange_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:987
int _Atomic_compare_exchange_strong(volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:2463
_Uint2_t _Atomic_fetch_or_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:1204
void _Store_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:275
void _Store_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1808
_Uint2_t _Atomic_fetch_and_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:1150
_Uint2_t _Atomic_exchange_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:925
_Uint4_t _Exchange_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1407
void _Store_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:785
_Uint8_t _Fetch_and_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2168
_Atomic_integral_t _Compare_increment_atomic_counter(_Atomic_counter_t &_Counter, _Atomic_integral_t _Expected)
Definition: xatomic.h:2538
_Uint8_t _Atomic_exchange_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:1964
_Uint1_t _Load_seq_cst_1(volatile _Uint1_t *_Tgt)
Definition: xatomic.h:326
_Uint4_t _Fetch_add_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1593
_Uint1_t _Fetch_xor_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:741
int _Atomic_compare_exchange_weak_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:1561
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:65
_Uint4_t _Atomic_fetch_xor_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1768
_Uint2_t _Fetch_add_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1076
_Uint4_t _Atomic_load_4(volatile _Uint4_t *_Tgt, memory_order _Order)
Definition: xatomic.h:1385
_Uint1_t _Load_relaxed_1(volatile _Uint1_t *_Tgt)
Definition: xatomic.h:343
_Uint8_t _Fetch_or_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2222
_Uint4_t _Fetch_xor_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1747
void _Store_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:798
int _Atomic_is_lock_free_1(void)
Definition: xatomic.h:2473
_Uint4_t _Atomic_integral_t
Definition: xatomic0.h:25
_Uint8_t _Fetch_or_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2229
_Uint8_t _Atomic_fetch_and_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2189
_Uint4_t _Fetch_or_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1686
_Uint2_t _Exchange_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:918
_Uint2_t _Fetch_and_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1143
_Uint1_t _Fetch_or_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:673
_Uint4_t _Load_relaxed_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1363
void _Atomic_signal_fence(memory_order _Order)
Definition: xatomic.h:2386
_Uint8_t _Load_seq_cst_8(volatile _Uint8_t *_Tgt)
Definition: xatomic.h:1862
#define _LONGLONG
Definition: yvals.h:579
int _Compare_exchange_release_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1515
int _Compare_exchange_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1461
void _Store_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:264
_Uint8_t _Fetch_xor_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2269
int _Compare_exchange_release_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:2044
int _Compare_exchange_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1479
_Uint1_t _Atomic_exchange_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:415
_Uint4_t _Exchange_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1428
unsigned _LONGLONG _Uint8_t
Definition: xatomic.h:164
_Uint8_t _Atomic_fetch_sub_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2154
_Uint4_t _Load_seq_cst_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1346
int _Atomic_flag_test_and_set(volatile _Atomic_flag_t *_Flag, memory_order _Order)
Definition: xatomic.h:2322
memory_order
Definition: xatomic0.h:15
unsigned short _Uint2_t
Definition: xatomic.h:162
int _Compare_exchange_release_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:495
#define _YIELD_PROCESSOR
Definition: xatomic.h:2395
void _Store_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:288
_Uint1_t _Atomic_fetch_or_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:694
_Uint2_t _Exchange_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:911
_Uint4_t _Exchange_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1414
_Uint1_t _Atomic_fetch_xor_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:748
Definition: xatomic0.h:20
_Uint8_t _Exchange_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1957
_Uint4_t _Fetch_add_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1572
int _Atomic_compare_exchange_weak(volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:2446
_Uint1_t _Fetch_or_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:666
_Uint1_t _Exchange_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:408
_Uint2_t _Fetch_add_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1069
#define _STD_END
Definition: yvals.h:565
_Uint4_t _Fetch_or_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1700
_Uint8_t _Fetch_and_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2182
_Uint8_t _Atomic_fetch_add_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2129
int _Atomic_is_lock_free_4(void)
Definition: xatomic.h:2483
_Uint2_t _Atomic_fetch_xor_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:1258
_Atomic_integral_t _Dec_atomic_counter(_Atomic_counter_t &_Counter)
Definition: xatomic.h:2512
Definition: xatomic0.h:17
void _Store_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1308
_Uint8_t _Fetch_and_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2161
void _Lock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2403
_Uint2_t _Atomic_fetch_sub_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:1115
void _Store_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:774
_Uint2_t _Fetch_add_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1062
_Uint2_t _Fetch_xor_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1244
_Uint2_t _Exchange_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:897
_Uint4_t _Exchange_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1421
_Uint8_t _Fetch_or_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2236
_Uint1_t _Fetch_add_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:552
_Uint1_t _Atomic_fetch_and_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:640
_Uint8_t _Fetch_xor_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2283
_Uint8_t _Fetch_xor_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2290
void _Store_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1794
Definition: xatomic0.h:16
_Uint4_t _Fetch_and_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1653
_Uint1_t _Exchange_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:387
_Atomic_integral_t _Compare_increment_atomic_counter_explicit(_Atomic_counter_t &_Counter, _Atomic_integral_t _Expected, memory_order _Order)
Definition: xatomic.h:2528
_Uint2_t _Fetch_add_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1083
int _Atomic_compare_exchange_weak_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:541
Definition: xatomic0.h:21
_Uint8_t _Atomic_load_8(volatile _Uint8_t *_Tgt, memory_order _Order)
Definition: xatomic.h:1914
void _Atomic_store_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:812
Definition: xatomic0.h:19
_Uint2_t _Exchange_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:904
_Uint8_t _Load_acquire_8(volatile _Uint8_t *_Tgt)
Definition: xatomic.h:1907
#define _CSTD
Definition: yvals.h:570
#define _Compiler_barrier()
Definition: xatomic.h:21
Definition: xatomic0.h:18
_Uint4_t _Fetch_and_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1646
void _Unlock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2410
_Uint8_t _Atomic_fetch_xor_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2297
_Uint8_t _Exchange_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1943
_Uint1_t _Atomic_fetch_sub_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:605
_STD_BEGIN typedef unsigned char _Uint1_t
Definition: xatomic.h:161
_Uint4_t _Fetch_add_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1586
_Atomic_integral_t _Load_atomic_counter(_Atomic_counter_t &_Counter)
Definition: xatomic.h:2523
_Uint2_t _Fetch_and_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1136
_Uint4_t _Load_acquire_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1378
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:64
_Uint4_t _Fetch_add_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1579
_Uint2_t _Fetch_or_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1197
_Uint8_t _Exchange_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1936
void _Store_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1824
_Uint1_t _Fetch_xor_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:734
int _Compare_exchange_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:2008
_Uint2_t _Fetch_xor_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1237
_Uint4_t _Fetch_and_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1639
int _Compare_exchange_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:459
int _Atomic_compare_exchange_weak_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:2090
_In_ int _Value
Definition: setjmp.h:173
_Uint2_t _Load_acquire_2(volatile _Uint2_t *_Tgt)
Definition: xatomic.h:868
long __cdecl _InterlockedCompareExchange(long volatile *, long, long)
_Uint2_t _Load_seq_cst_2(volatile _Uint2_t *_Tgt)
Definition: xatomic.h:836
int _Compare_exchange_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:477
_Uint2_t _Fetch_or_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1176
_Uint2_t _Fetch_xor_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1251
_Size
Definition: vcruntime_string.h:36
_Atomic_integral_t _Atomic_counter_t
Definition: xatomic0.h:52
constexpr const _Ty &() _Right
Definition: algorithm:3591
int _Atomic_compare_exchange_weak_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:1051
int _Compare_exchange_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:2026
_Uint8_t _Fetch_add_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2101
_Uint1_t _Fetch_and_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:612
void _Validate_compare_exchange_memory_order(memory_order _Success, memory_order _Failure)
Definition: xatomic.h:230