STLdoc
STLdocumentation
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros
xatomic.h
Go to the documentation of this file.
1 /* xatomic.h internal header */
2 #pragma once
3 #ifndef _XATOMIC_H
4 #define _XATOMIC_H
5 #ifndef RC_INVOKED
6 #include <xatomic0.h>
7 #include <stddef.h> // for size_t
8 #include <stdlib.h>
9 #include <string.h>
10 
11 #include <intrin0.h>
12 #include <xutility>
13 
14  #pragma pack(push,_CRT_PACKING)
15  #pragma warning(push,_STL_WARNING_LEVEL)
16  #pragma warning(disable: _STL_DISABLED_WARNINGS)
17  #pragma push_macro("new")
18  #undef new
19 
20  #pragma warning(disable: 4793) // function compiled as native
21 
22  #define _Compiler_barrier() _ReadWriteBarrier()
23 
24  #if defined(_M_ARM)
25  #define _Memory_barrier() __dmb(0xB /* _ARM_BARRIER_ISH */)
26  #endif /* defined(_M_ARM) */
27 
28  #if defined(_M_ARM64)
29  #define _Memory_barrier() __dmb(0xB /* _ARM64_BARRIER_ISH */)
30  #endif /* defined(_M_ARM64) */
31 
32  /* Interlocked intrinsic mapping for _nf/_acq/_rel */
33  #if defined(_M_ARM) || defined(_M_ARM64)
34  #define _INTRIN_RELAXED(x) _CONCAT(x, _nf)
35  #define _INTRIN_ACQUIRE(x) _CONCAT(x, _acq)
36  #define _INTRIN_RELEASE(x) _CONCAT(x, _rel)
37  #define _INTRIN_SEQ_CST(x) x
38  #else /* defined(_M_ARM) || defined(_M_ARM64) */
39  #define _INTRIN_RELAXED(x) x
40  #define _INTRIN_ACQUIRE(x) x
41  #define _INTRIN_RELEASE(x) x
42  #define _INTRIN_SEQ_CST(x) x
43  #endif /* defined(_M_ARM) || defined(_M_ARM64) */
44 
45  #if defined(_M_IX86)
46 #pragma push_macro("_InterlockedExchange64")
47 #pragma push_macro("_InterlockedExchangeAdd64")
48 #pragma push_macro("_InterlockedAnd64")
49 #pragma push_macro("_InterlockedOr64")
50 #pragma push_macro("_InterlockedXor64")
51 
52 #undef _InterlockedExchange64
53 #undef _InterlockedExchangeAdd64
54 #undef _InterlockedAnd64
55 #undef _InterlockedOr64
56 #undef _InterlockedXor64
57 
58 #define _InterlockedExchange64 _InterlockedExchange64_INLINE
59 #define _InterlockedExchangeAdd64 _InterlockedExchangeAdd64_INLINE
60 #define _InterlockedAnd64 _InterlockedAnd64_INLINE
61 #define _InterlockedOr64 _InterlockedOr64_INLINE
62 #define _InterlockedXor64 _InterlockedXor64_INLINE
63 
64 inline long long _InterlockedExchange64(volatile long long *_Tgt, long long _Value)
65 {
66  long long _Oldval;
68  do
69  {
70  _Oldval = *_Tgt;
71  } while (_Oldval != _InterlockedCompareExchange64(_Tgt, _Value, _Oldval));
73 
74  return (_Oldval);
75 }
76 
77 inline long long _InterlockedExchangeAdd64(volatile long long *_Tgt, long long _Value)
78 {
79  long long _Oldval, _Newval;
81  do
82  {
83  _Oldval = *_Tgt;
84  _Newval = _Oldval + _Value;
85  } while (_Oldval != _InterlockedCompareExchange64(_Tgt, _Newval, _Oldval));
87 
88  return (_Oldval);
89 }
90 
91 inline long long _InterlockedAnd64(volatile long long *_Tgt, long long _Value)
92 {
93  long long _Oldval, _Newval;
95  do
96  {
97  _Oldval = *_Tgt;
98  _Newval = _Oldval & _Value;
99  } while (_Oldval != _InterlockedCompareExchange64(_Tgt, _Newval, _Oldval));
101 
102  return (_Oldval);
103 }
104 
105 inline long long _InterlockedOr64(volatile long long *_Tgt, long long _Value)
106 {
107  long long _Oldval, _Newval;
109  do
110  {
111  _Oldval = *_Tgt;
112  _Newval = _Oldval | _Value;
113  } while (_Oldval != _InterlockedCompareExchange64(_Tgt, _Newval, _Oldval));
115 
116  return (_Oldval);
117 }
118 
119 inline long long _InterlockedXor64(volatile long long *_Tgt, long long _Value)
120 {
121  long long _Oldval, _Newval;
123  do
124  {
125  _Oldval = *_Tgt;
126  _Newval = _Oldval ^ _Value;
127  } while (_Oldval != _InterlockedCompareExchange64(_Tgt, _Newval, _Oldval));
129 
130  return (_Oldval);
131 }
132  #endif /* defined(_M_IX86) */
133 
135  /* TYPEDEFS FOR INTERNAL ARITHMETIC TYPES */
136 typedef unsigned char _Uint1_t;
137 typedef unsigned short _Uint2_t;
138 // xatomic0.h provides _Uint4_t
139 typedef unsigned long long _Uint8_t;
140 
141  /* TYPEDEF _Atomic_flag_t */
142 typedef long _Atomic_flag_t;
143 
144  #ifndef _INVALID_MEMORY_ORDER
145  #if _ITERATOR_DEBUG_LEVEL == 2
146  #define _INVALID_MEMORY_ORDER _DEBUG_ERROR("Invalid memory_order")
147  #elif _ITERATOR_DEBUG_LEVEL == 1
148  #define _INVALID_MEMORY_ORDER _SCL_SECURE_VALIDATE("Invalid memory_order" && 0)
149  #elif _ITERATOR_DEBUG_LEVEL == 0
150  #define _INVALID_MEMORY_ORDER static_cast<void>(0)
151  #endif /* _ITERATOR_DEBUG_LEVEL */
152  #endif /* _INVALID_MEMORY_ORDER */
153 
155  memory_order _Order2)
156  { /* find upper bound of two memory orders,
157  based on the following partial order:
158 
159  seq_cst
160  |
161  acq_rel
162  / \
163  acquire release
164  | |
165  consume |
166  \ /
167  relaxed
168 
169  */
170 
171  static constexpr memory_order _Upper[6][6] = { /* combined upper bounds */
183  memory_order_seq_cst, memory_order_seq_cst, memory_order_seq_cst
184  }
185  };
186 
187  if ((_Order1 < 0) || (6 <= _Order1)
188  || (_Order2 < 0) || (6 <= _Order2))
189  { /* launder memory order */
191  return (memory_order_seq_cst);
192  }
193  return (_Upper[_Order1][_Order2]);
194  }
195 
197  memory_order _Success, memory_order _Failure)
198  { /* validate success/failure */
199  /* _Failure may not be memory_order_release or memory_order_acq_rel
200  and may not be stronger than _Success */
201  switch (_Failure)
202  {
204  break;
205 
207  if (_Success != memory_order_seq_cst)
209  break;
210 
212  if ((_Success == memory_order_consume) ||
213  (_Success == memory_order_relaxed))
215  break;
216 
218  if (_Success == memory_order_relaxed)
220  break;
221 
222  default:
224  break;
225  }
226  }
227 
228 
229  /* _Atomic_store_1 */
230 inline void _Store_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
231  { /* store _Value atomically with relaxed memory order */
232 
233  #if defined(_M_ARM) || defined(_M_ARM64)
234  __iso_volatile_store8((volatile char *)_Tgt, _Value);
235 
236  #else
237  *_Tgt = _Value;
238  #endif
239  }
240 
241 inline void _Store_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
242  { /* store _Value atomically with release memory order */
243 
244  #if defined(_M_ARM) || defined(_M_ARM64)
245  _Memory_barrier();
246  __iso_volatile_store8((volatile char *)_Tgt, _Value);
247 
248  #else
250  *_Tgt = _Value;
251  #endif
252  }
253 
254 inline void _Store_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
255  { /* store _Value atomically with
256  sequentially consistent memory order */
257 
258  #if defined(_M_ARM) || defined(_M_ARM64)
259  _Memory_barrier();
260  __iso_volatile_store8((volatile char *)_Tgt, _Value);
261  _Memory_barrier();
262 
263  #else
264  _INTRIN_SEQ_CST(_InterlockedExchange8)((volatile char *)_Tgt, _Value);
265  #endif
266  }
267 
268 inline void _Atomic_store_1(
269  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
270  { /* store _Value atomically */
271  switch (_Order)
272  {
274  _Store_relaxed_1(_Tgt, _Value);
275  break;
276 
278  _Store_release_1(_Tgt, _Value);
279  break;
280 
282  _Store_seq_cst_1(_Tgt, _Value);
283  break;
284 
285  default:
287  break;
288  }
289  }
290 
291  /* _Atomic_load_1 */
292 inline _Uint1_t _Load_seq_cst_1(volatile _Uint1_t *_Tgt)
293  { /* load from *_Tgt atomically with
294  sequentially consistent memory order */
295  _Uint1_t _Value;
296 
297  #if defined(_M_ARM) || defined(_M_ARM64)
298  _Value = __iso_volatile_load8((volatile char *)_Tgt);
299  _Memory_barrier();
300 
301  #else
302  _Value = *_Tgt;
304  #endif
305 
306  return (_Value);
307  }
308 
309 inline _Uint1_t _Load_relaxed_1(volatile _Uint1_t *_Tgt)
310  { /* load from *_Tgt atomically with
311  relaxed memory order */
312  _Uint1_t _Value;
313 
314  #if defined(_M_ARM) || defined(_M_ARM64)
315  _Value = __iso_volatile_load8((volatile char *)_Tgt);
316 
317  #else
318  _Value = *_Tgt;
319  #endif
320 
321  return (_Value);
322  }
323 
324 inline _Uint1_t _Load_acquire_1(volatile _Uint1_t *_Tgt)
325  { /* load from *_Tgt atomically with
326  acquire memory order */
327 
328  return (_Load_seq_cst_1(_Tgt));
329  }
330 
331 inline _Uint1_t _Atomic_load_1(
332  volatile _Uint1_t *_Tgt, memory_order _Order)
333  { /* load from *_Tgt atomically */
334  switch (_Order)
335  {
337  return (_Load_relaxed_1(_Tgt));
338 
341  return (_Load_acquire_1(_Tgt));
342 
344  return (_Load_seq_cst_1(_Tgt));
345 
346  default:
348  return (0);
349  }
350  }
351 
352  /* _Atomic_exchange_1 */
353 inline _Uint1_t _Exchange_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
354  { /* exchange _Value and *_Tgt atomically with
355  sequentially consistent memory order */
356 
357  return (_INTRIN_SEQ_CST(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
358  }
359 
360 inline _Uint1_t _Exchange_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
361  { /* exchange _Value and *_Tgt atomically with
362  relaxed memory order */
363 
364  return (_INTRIN_RELAXED(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
365  }
366 
367 inline _Uint1_t _Exchange_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
368  { /* exchange _Value and *_Tgt atomically with
369  acquire memory order */
370 
371  return (_INTRIN_ACQUIRE(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
372  }
373 
374 inline _Uint1_t _Exchange_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
375  { /* exchange _Value and *_Tgt atomically with
376  release memory order */
377 
378  return (_INTRIN_RELEASE(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
379  }
380 
381 inline _Uint1_t _Atomic_exchange_1(
382  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
383  { /* exchange _Value and *_Tgt atomically */
384  switch (_Order)
385  {
387  return (_Exchange_relaxed_1(_Tgt, _Value));
388 
391  return (_Exchange_acquire_1(_Tgt, _Value));
392 
394  return (_Exchange_release_1(_Tgt, _Value));
395 
398  return (_Exchange_seq_cst_1(_Tgt, _Value));
399 
400  default:
402  return (0);
403  }
404  }
405 
406  /* _Atomic_compare_exchange_weak_1, _Atomic_compare_exchange_strong_1 */
407 inline int _Compare_exchange_seq_cst_1(volatile _Uint1_t *_Tgt,
408  _Uint1_t *_Exp, _Uint1_t _Value)
409  { /* compare and exchange values atomically with
410  sequentially consistent memory order */
411  _Uint1_t _Old_exp = *_Exp; /* read before atomic operation */
412 
413  _Uint1_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange8)((volatile char *)_Tgt,
414  _Value, _Old_exp);
415 
416  if (_Prev == _Old_exp)
417  return (1);
418  else
419  { /* copy old value */
420  *_Exp = _Prev;
421  return (0);
422  }
423  }
424 
425 inline int _Compare_exchange_relaxed_1(volatile _Uint1_t *_Tgt,
426  _Uint1_t *_Exp, _Uint1_t _Value)
427  { /* compare and exchange values atomically with
428  relaxed memory order */
429  _Uint1_t _Old_exp = *_Exp; /* read before atomic operation */
430 
431  _Uint1_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange8)((volatile char *)_Tgt,
432  _Value, _Old_exp);
433 
434  if (_Prev == _Old_exp)
435  return (1);
436  else
437  { /* copy old value */
438  *_Exp = _Prev;
439  return (0);
440  }
441  }
442 
443 inline int _Compare_exchange_acquire_1(volatile _Uint1_t *_Tgt,
444  _Uint1_t *_Exp, _Uint1_t _Value)
445  { /* compare and exchange values atomically with
446  acquire memory order */
447  _Uint1_t _Old_exp = *_Exp; /* read before atomic operation */
448 
449  _Uint1_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange8)((volatile char *)_Tgt,
450  _Value, _Old_exp);
451 
452  if (_Prev == _Old_exp)
453  return (1);
454  else
455  { /* copy old value */
456  *_Exp = _Prev;
457  return (0);
458  }
459  }
460 
461 inline int _Compare_exchange_release_1(volatile _Uint1_t *_Tgt,
462  _Uint1_t *_Exp, _Uint1_t _Value)
463  { /* compare and exchange values atomically with
464  release memory order */
465  _Uint1_t _Old_exp = *_Exp; /* read before atomic operation */
466 
467  _Uint1_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange8)((volatile char *)_Tgt,
468  _Value, _Old_exp);
469 
470  if (_Prev == _Old_exp)
471  return (1);
472  else
473  { /* copy old value */
474  *_Exp = _Prev;
475  return (0);
476  }
477  }
478 
480  volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value,
481  memory_order _Order1, memory_order _Order2)
482  { /* compare and exchange values atomically */
483  _Validate_compare_exchange_memory_order(_Order1, _Order2);
484 
485  switch (_Memory_order_upper_bound(_Order1, _Order2))
486  {
488  return (_Compare_exchange_relaxed_1(_Tgt, _Exp, _Value));
489 
492  return (_Compare_exchange_acquire_1(_Tgt, _Exp, _Value));
493 
495  return (_Compare_exchange_release_1(_Tgt, _Exp, _Value));
496 
499  return (_Compare_exchange_seq_cst_1(_Tgt, _Exp, _Value));
500 
501  default:
503  return (0);
504  }
505  }
506 
508  volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value,
509  memory_order _Order1, memory_order _Order2)
510  { /* compare and exchange values atomically */
511  /* No weak compare-exchange is currently available,
512  even for ARM, so fall back to strong */
513  return (_Atomic_compare_exchange_strong_1(_Tgt, _Exp, _Value,
514  _Order1, _Order2));
515  }
516 
517  /* _Atomic_fetch_add_1, _Atomic_fetch_sub_1 */
518 inline _Uint1_t _Fetch_add_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
519  { /* add _Value to *_Tgt atomically with
520  sequentially consistent memory order */
521 
522  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
523  }
524 
525 inline _Uint1_t _Fetch_add_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
526  { /* add _Value to *_Tgt atomically with
527  relaxed memory order */
528 
529  return (_INTRIN_RELAXED(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
530  }
531 
532 inline _Uint1_t _Fetch_add_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
533  { /* add _Value to *_Tgt atomically with
534  acquire memory order */
535 
536  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
537  }
538 
539 inline _Uint1_t _Fetch_add_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
540  { /* add _Value to *_Tgt atomically with
541  release memory order */
542 
543  return (_INTRIN_RELEASE(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
544  }
545 
546 inline _Uint1_t _Atomic_fetch_add_1(
547  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
548  { /* add _Value to *_Tgt atomically */
549  switch (_Order)
550  {
552  return (_Fetch_add_relaxed_1(_Tgt, _Value));
553 
556  return (_Fetch_add_acquire_1(_Tgt, _Value));
557 
559  return (_Fetch_add_release_1(_Tgt, _Value));
560 
563  return (_Fetch_add_seq_cst_1(_Tgt, _Value));
564 
565  default:
567  return (0);
568  }
569  }
570 
571 inline _Uint1_t _Atomic_fetch_sub_1(
572  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
573  { /* subtract _Value from *_Tgt atomically */
574  return (_Atomic_fetch_add_1(_Tgt, 0 - _Value, _Order));
575  }
576 
577  /* _Atomic_fetch_and_1 */
578 inline _Uint1_t _Fetch_and_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
579  { /* and _Value with *_Tgt atomically with
580  sequentially consistent memory order */
581 
582  return (_INTRIN_SEQ_CST(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
583  }
584 
585 inline _Uint1_t _Fetch_and_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
586  { /* and _Value with *_Tgt atomically with
587  relaxed memory order */
588 
589  return (_INTRIN_RELAXED(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
590  }
591 
592 inline _Uint1_t _Fetch_and_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
593  { /* and _Value with *_Tgt atomically with
594  acquire memory order */
595 
596  return (_INTRIN_ACQUIRE(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
597  }
598 
599 inline _Uint1_t _Fetch_and_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
600  { /* and _Value with *_Tgt atomically with
601  release memory order */
602 
603  return (_INTRIN_RELEASE(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
604  }
605 
606 inline _Uint1_t _Atomic_fetch_and_1(
607  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
608  { /* and _Value with *_Tgt atomically */
609  switch (_Order)
610  {
612  return (_Fetch_and_relaxed_1(_Tgt, _Value));
613 
616  return (_Fetch_and_acquire_1(_Tgt, _Value));
617 
619  return (_Fetch_and_release_1(_Tgt, _Value));
620 
623  return (_Fetch_and_seq_cst_1(_Tgt, _Value));
624 
625  default:
627  return (0);
628  }
629  }
630 
631  /* _Atomic_fetch_or_1 */
632 inline _Uint1_t _Fetch_or_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
633  { /* or _Value with *_Tgt atomically with
634  sequentially consistent memory order */
635 
636  return (_INTRIN_SEQ_CST(_InterlockedOr8)((volatile char *)_Tgt, _Value));
637  }
638 
639 inline _Uint1_t _Fetch_or_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
640  { /* or _Value with *_Tgt atomically with
641  relaxed memory order */
642 
643  return (_INTRIN_RELAXED(_InterlockedOr8)((volatile char *)_Tgt, _Value));
644  }
645 
646 inline _Uint1_t _Fetch_or_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
647  { /* or _Value with *_Tgt atomically with
648  acquire memory order */
649 
650  return (_INTRIN_ACQUIRE(_InterlockedOr8)((volatile char *)_Tgt, _Value));
651  }
652 
653 inline _Uint1_t _Fetch_or_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
654  { /* or _Value with *_Tgt atomically with
655  release memory order */
656 
657  return (_INTRIN_RELEASE(_InterlockedOr8)((volatile char *)_Tgt, _Value));
658  }
659 
660 inline _Uint1_t _Atomic_fetch_or_1(
661  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
662  { /* or _Value with *_Tgt atomically */
663  switch (_Order)
664  {
666  return (_Fetch_or_relaxed_1(_Tgt, _Value));
667 
670  return (_Fetch_or_acquire_1(_Tgt, _Value));
671 
673  return (_Fetch_or_release_1(_Tgt, _Value));
674 
677  return (_Fetch_or_seq_cst_1(_Tgt, _Value));
678 
679  default:
681  return (0);
682  }
683  }
684 
685  /* _Atomic_fetch_xor_1 */
686 inline _Uint1_t _Fetch_xor_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
687  { /* xor _Value with *_Tgt atomically with
688  sequentially consistent memory order */
689 
690  return (_INTRIN_SEQ_CST(_InterlockedXor8)((volatile char *)_Tgt, _Value));
691  }
692 
693 inline _Uint1_t _Fetch_xor_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
694  { /* xor _Value with *_Tgt atomically with
695  relaxed memory order */
696 
697  return (_INTRIN_RELAXED(_InterlockedXor8)((volatile char *)_Tgt, _Value));
698  }
699 
700 inline _Uint1_t _Fetch_xor_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
701  { /* xor _Value with *_Tgt atomically with
702  acquire memory order */
703 
704  return (_INTRIN_ACQUIRE(_InterlockedXor8)((volatile char *)_Tgt, _Value));
705  }
706 
707 inline _Uint1_t _Fetch_xor_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
708  { /* xor _Value with *_Tgt atomically with
709  release memory order */
710 
711  return (_INTRIN_RELEASE(_InterlockedXor8)((volatile char *)_Tgt, _Value));
712  }
713 
714 inline _Uint1_t _Atomic_fetch_xor_1(
715  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
716  { /* xor _Value with *_Tgt atomically */
717  switch (_Order)
718  {
720  return (_Fetch_xor_relaxed_1(_Tgt, _Value));
721 
724  return (_Fetch_xor_acquire_1(_Tgt, _Value));
725 
727  return (_Fetch_xor_release_1(_Tgt, _Value));
728 
731  return (_Fetch_xor_seq_cst_1(_Tgt, _Value));
732 
733  default:
735  return (0);
736  }
737  }
738 
739  /* _Atomic_store_2 */
740 inline void _Store_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
741  { /* store _Value atomically with relaxed memory order */
742 
743  #if defined(_M_ARM) || defined(_M_ARM64)
744  __iso_volatile_store16((volatile short *)_Tgt, _Value);
745 
746  #else
747  *_Tgt = _Value;
748  #endif
749  }
750 
751 inline void _Store_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
752  { /* store _Value atomically with release memory order */
753 
754  #if defined(_M_ARM) || defined(_M_ARM64)
755  _Memory_barrier();
756  __iso_volatile_store16((volatile short *)_Tgt, _Value);
757 
758  #else
760  *_Tgt = _Value;
761  #endif
762  }
763 
764 inline void _Store_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
765  { /* store _Value atomically with
766  sequentially consistent memory order */
767 
768  #if defined(_M_ARM) || defined(_M_ARM64)
769  _Memory_barrier();
770  __iso_volatile_store16((volatile short *)_Tgt, _Value);
771  _Memory_barrier();
772 
773  #else
774  _INTRIN_SEQ_CST(_InterlockedExchange16)((volatile short *)_Tgt, _Value);
775  #endif
776  }
777 
778 inline void _Atomic_store_2(
779  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
780  { /* store _Value atomically */
781  switch (_Order)
782  {
784  _Store_relaxed_2(_Tgt, _Value);
785  break;
786 
788  _Store_release_2(_Tgt, _Value);
789  break;
790 
792  _Store_seq_cst_2(_Tgt, _Value);
793  break;
794 
795  default:
797  break;
798  }
799  }
800 
801  /* _Atomic_load_2 */
802 inline _Uint2_t _Load_seq_cst_2(volatile _Uint2_t *_Tgt)
803  { /* load from *_Tgt atomically with
804  sequentially consistent memory order */
805  _Uint2_t _Value;
806 
807  #if defined(_M_ARM) || defined(_M_ARM64)
808  _Value = __iso_volatile_load16((volatile short *)_Tgt);
809  _Memory_barrier();
810 
811  #else
812  _Value = *_Tgt;
814  #endif
815 
816  return (_Value);
817  }
818 
819 inline _Uint2_t _Load_relaxed_2(volatile _Uint2_t *_Tgt)
820  { /* load from *_Tgt atomically with
821  relaxed memory order */
822  _Uint2_t _Value;
823 
824  #if defined(_M_ARM) || defined(_M_ARM64)
825  _Value = __iso_volatile_load16((volatile short *)_Tgt);
826 
827  #else
828  _Value = *_Tgt;
829  #endif
830 
831  return (_Value);
832  }
833 
834 inline _Uint2_t _Load_acquire_2(volatile _Uint2_t *_Tgt)
835  { /* load from *_Tgt atomically with
836  acquire memory order */
837 
838  return (_Load_seq_cst_2(_Tgt));
839  }
840 
841 inline _Uint2_t _Atomic_load_2(
842  volatile _Uint2_t *_Tgt, memory_order _Order)
843  { /* load from *_Tgt atomically */
844  switch (_Order)
845  {
847  return (_Load_relaxed_2(_Tgt));
848 
851  return (_Load_acquire_2(_Tgt));
852 
854  return (_Load_seq_cst_2(_Tgt));
855 
856  default:
858  return (0);
859  }
860  }
861 
862  /* _Atomic_exchange_2 */
863 inline _Uint2_t _Exchange_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
864  { /* exchange _Value and *_Tgt atomically with
865  sequentially consistent memory order */
866 
867  return (_INTRIN_SEQ_CST(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
868  }
869 
870 inline _Uint2_t _Exchange_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
871  { /* exchange _Value and *_Tgt atomically with
872  relaxed memory order */
873 
874  return (_INTRIN_RELAXED(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
875  }
876 
877 inline _Uint2_t _Exchange_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
878  { /* exchange _Value and *_Tgt atomically with
879  acquire memory order */
880 
881  return (_INTRIN_ACQUIRE(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
882  }
883 
884 inline _Uint2_t _Exchange_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
885  { /* exchange _Value and *_Tgt atomically with
886  release memory order */
887 
888  return (_INTRIN_RELEASE(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
889  }
890 
891 inline _Uint2_t _Atomic_exchange_2(
892  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
893  { /* exchange _Value and *_Tgt atomically */
894  switch (_Order)
895  {
897  return (_Exchange_relaxed_2(_Tgt, _Value));
898 
901  return (_Exchange_acquire_2(_Tgt, _Value));
902 
904  return (_Exchange_release_2(_Tgt, _Value));
905 
908  return (_Exchange_seq_cst_2(_Tgt, _Value));
909 
910  default:
912  return (0);
913  }
914  }
915 
916  /* _Atomic_compare_exchange_weak_2, _Atomic_compare_exchange_strong_2 */
917 inline int _Compare_exchange_seq_cst_2(volatile _Uint2_t *_Tgt,
918  _Uint2_t *_Exp, _Uint2_t _Value)
919  { /* compare and exchange values atomically with
920  sequentially consistent memory order */
921  _Uint2_t _Old_exp = *_Exp; /* read before atomic operation */
922 
923  _Uint2_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange16)((volatile short *)_Tgt,
924  _Value, _Old_exp);
925 
926  if (_Prev == _Old_exp)
927  return (1);
928  else
929  { /* copy old value */
930  *_Exp = _Prev;
931  return (0);
932  }
933  }
934 
935 inline int _Compare_exchange_relaxed_2(volatile _Uint2_t *_Tgt,
936  _Uint2_t *_Exp, _Uint2_t _Value)
937  { /* compare and exchange values atomically with
938  relaxed memory order */
939  _Uint2_t _Old_exp = *_Exp; /* read before atomic operation */
940 
941  _Uint2_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange16)((volatile short *)_Tgt,
942  _Value, _Old_exp);
943 
944  if (_Prev == _Old_exp)
945  return (1);
946  else
947  { /* copy old value */
948  *_Exp = _Prev;
949  return (0);
950  }
951  }
952 
953 inline int _Compare_exchange_acquire_2(volatile _Uint2_t *_Tgt,
954  _Uint2_t *_Exp, _Uint2_t _Value)
955  { /* compare and exchange values atomically with
956  acquire memory order */
957  _Uint2_t _Old_exp = *_Exp; /* read before atomic operation */
958 
959  _Uint2_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange16)((volatile short *)_Tgt,
960  _Value, _Old_exp);
961 
962  if (_Prev == _Old_exp)
963  return (1);
964  else
965  { /* copy old value */
966  *_Exp = _Prev;
967  return (0);
968  }
969  }
970 
971 inline int _Compare_exchange_release_2(volatile _Uint2_t *_Tgt,
972  _Uint2_t *_Exp, _Uint2_t _Value)
973  { /* compare and exchange values atomically with
974  release memory order */
975  _Uint2_t _Old_exp = *_Exp; /* read before atomic operation */
976 
977  _Uint2_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange16)((volatile short *)_Tgt,
978  _Value, _Old_exp);
979 
980  if (_Prev == _Old_exp)
981  return (1);
982  else
983  { /* copy old value */
984  *_Exp = _Prev;
985  return (0);
986  }
987  }
988 
990  volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value,
991  memory_order _Order1, memory_order _Order2)
992  { /* compare and exchange values atomically */
993  _Validate_compare_exchange_memory_order(_Order1, _Order2);
994 
995  switch (_Memory_order_upper_bound(_Order1, _Order2))
996  {
998  return (_Compare_exchange_relaxed_2(_Tgt, _Exp, _Value));
999 
1000  case memory_order_consume:
1001  case memory_order_acquire:
1002  return (_Compare_exchange_acquire_2(_Tgt, _Exp, _Value));
1003 
1004  case memory_order_release:
1005  return (_Compare_exchange_release_2(_Tgt, _Exp, _Value));
1006 
1007  case memory_order_acq_rel:
1008  case memory_order_seq_cst:
1009  return (_Compare_exchange_seq_cst_2(_Tgt, _Exp, _Value));
1010 
1011  default:
1013  return (0);
1014  }
1015  }
1016 
1018  volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value,
1019  memory_order _Order1, memory_order _Order2)
1020  { /* compare and exchange values atomically */
1021  /* No weak compare-exchange is currently available,
1022  even for ARM, so fall back to strong */
1023  return (_Atomic_compare_exchange_strong_2(_Tgt, _Exp, _Value,
1024  _Order1, _Order2));
1025  }
1026 
1027  /* _Atomic_fetch_add_2, _Atomic_fetch_sub_2 */
1028 inline _Uint2_t _Fetch_add_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1029  { /* add _Value to *_Tgt atomically with
1030  sequentially consistent memory order */
1031 
1032  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1033  }
1034 
1035 inline _Uint2_t _Fetch_add_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1036  { /* add _Value to *_Tgt atomically with
1037  relaxed memory order */
1038 
1039  return (_INTRIN_RELAXED(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1040  }
1041 
1042 inline _Uint2_t _Fetch_add_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1043  { /* add _Value to *_Tgt atomically with
1044  acquire memory order */
1045 
1046  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1047  }
1048 
1049 inline _Uint2_t _Fetch_add_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1050  { /* add _Value to *_Tgt atomically with
1051  release memory order */
1052 
1053  return (_INTRIN_RELEASE(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1054  }
1055 
1056 inline _Uint2_t _Atomic_fetch_add_2(
1057  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
1058  { /* add _Value to *_Tgt atomically */
1059  switch (_Order)
1060  {
1061  case memory_order_relaxed:
1062  return (_Fetch_add_relaxed_2(_Tgt, _Value));
1063 
1064  case memory_order_consume:
1065  case memory_order_acquire:
1066  return (_Fetch_add_acquire_2(_Tgt, _Value));
1067 
1068  case memory_order_release:
1069  return (_Fetch_add_release_2(_Tgt, _Value));
1070 
1071  case memory_order_acq_rel:
1072  case memory_order_seq_cst:
1073  return (_Fetch_add_seq_cst_2(_Tgt, _Value));
1074 
1075  default:
1077  return (0);
1078  }
1079  }
1080 
1081 inline _Uint2_t _Atomic_fetch_sub_2(
1082  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
1083  { /* subtract _Value from *_Tgt atomically */
1084  return (_Atomic_fetch_add_2(_Tgt, 0 - _Value, _Order));
1085  }
1086 
1087  /* _Atomic_fetch_and_2 */
1088 inline _Uint2_t _Fetch_and_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1089  { /* and _Value with *_Tgt atomically with
1090  sequentially consistent memory order */
1091 
1092  return (_INTRIN_SEQ_CST(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1093  }
1094 
1095 inline _Uint2_t _Fetch_and_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1096  { /* and _Value with *_Tgt atomically with
1097  relaxed memory order */
1098 
1099  return (_INTRIN_RELAXED(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1100  }
1101 
1102 inline _Uint2_t _Fetch_and_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1103  { /* and _Value with *_Tgt atomically with
1104  acquire memory order */
1105 
1106  return (_INTRIN_ACQUIRE(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1107  }
1108 
1109 inline _Uint2_t _Fetch_and_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1110  { /* and _Value with *_Tgt atomically with
1111  release memory order */
1112 
1113  return (_INTRIN_RELEASE(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1114  }
1115 
1116 inline _Uint2_t _Atomic_fetch_and_2(
1117  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
1118  { /* and _Value with *_Tgt atomically */
1119  switch (_Order)
1120  {
1121  case memory_order_relaxed:
1122  return (_Fetch_and_relaxed_2(_Tgt, _Value));
1123 
1124  case memory_order_consume:
1125  case memory_order_acquire:
1126  return (_Fetch_and_acquire_2(_Tgt, _Value));
1127 
1128  case memory_order_release:
1129  return (_Fetch_and_release_2(_Tgt, _Value));
1130 
1131  case memory_order_acq_rel:
1132  case memory_order_seq_cst:
1133  return (_Fetch_and_seq_cst_2(_Tgt, _Value));
1134 
1135  default:
1137  return (0);
1138  }
1139  }
1140 
1141  /* _Atomic_fetch_or_2 */
1142 inline _Uint2_t _Fetch_or_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1143  { /* or _Value with *_Tgt atomically with
1144  sequentially consistent memory order */
1145 
1146  return (_INTRIN_SEQ_CST(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1147  }
1148 
1149 inline _Uint2_t _Fetch_or_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1150  { /* or _Value with *_Tgt atomically with
1151  relaxed memory order */
1152 
1153  return (_INTRIN_RELAXED(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1154  }
1155 
1156 inline _Uint2_t _Fetch_or_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1157  { /* or _Value with *_Tgt atomically with
1158  acquire memory order */
1159 
1160  return (_INTRIN_ACQUIRE(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1161  }
1162 
1163 inline _Uint2_t _Fetch_or_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1164  { /* or _Value with *_Tgt atomically with
1165  release memory order */
1166 
1167  return (_INTRIN_RELEASE(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1168  }
1169 
1170 inline _Uint2_t _Atomic_fetch_or_2(
1171  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
1172  { /* or _Value with *_Tgt atomically */
1173  switch (_Order)
1174  {
1175  case memory_order_relaxed:
1176  return (_Fetch_or_relaxed_2(_Tgt, _Value));
1177 
1178  case memory_order_consume:
1179  case memory_order_acquire:
1180  return (_Fetch_or_acquire_2(_Tgt, _Value));
1181 
1182  case memory_order_release:
1183  return (_Fetch_or_release_2(_Tgt, _Value));
1184 
1185  case memory_order_acq_rel:
1186  case memory_order_seq_cst:
1187  return (_Fetch_or_seq_cst_2(_Tgt, _Value));
1188 
1189  default:
1191  return (0);
1192  }
1193  }
1194 
1195  /* _Atomic_fetch_xor_2 */
1196 inline _Uint2_t _Fetch_xor_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1197  { /* xor _Value with *_Tgt atomically with
1198  sequentially consistent memory order */
1199 
1200  return (_INTRIN_SEQ_CST(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1201  }
1202 
1203 inline _Uint2_t _Fetch_xor_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1204  { /* xor _Value with *_Tgt atomically with
1205  relaxed memory order */
1206 
1207  return (_INTRIN_RELAXED(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1208  }
1209 
1210 inline _Uint2_t _Fetch_xor_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1211  { /* xor _Value with *_Tgt atomically with
1212  acquire memory order */
1213 
1214  return (_INTRIN_ACQUIRE(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1215  }
1216 
1217 inline _Uint2_t _Fetch_xor_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1218  { /* xor _Value with *_Tgt atomically with
1219  release memory order */
1220 
1221  return (_INTRIN_RELEASE(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1222  }
1223 
1224 inline _Uint2_t _Atomic_fetch_xor_2(
1225  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
1226  { /* xor _Value with *_Tgt atomically */
1227  switch (_Order)
1228  {
1229  case memory_order_relaxed:
1230  return (_Fetch_xor_relaxed_2(_Tgt, _Value));
1231 
1232  case memory_order_consume:
1233  case memory_order_acquire:
1234  return (_Fetch_xor_acquire_2(_Tgt, _Value));
1235 
1236  case memory_order_release:
1237  return (_Fetch_xor_release_2(_Tgt, _Value));
1238 
1239  case memory_order_acq_rel:
1240  case memory_order_seq_cst:
1241  return (_Fetch_xor_seq_cst_2(_Tgt, _Value));
1242 
1243  default:
1245  return (0);
1246  }
1247  }
1248 
1249  /* _Atomic_store_4 */
1250 inline void _Store_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1251  { /* store _Value atomically with relaxed memory order */
1252 
1253  #if defined(_M_ARM) || defined(_M_ARM64)
1254  __iso_volatile_store32((volatile int *)_Tgt, _Value);
1255 
1256  #else
1257  *_Tgt = _Value;
1258  #endif
1259  }
1260 
1261 inline void _Store_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1262  { /* store _Value atomically with release memory order */
1263 
1264  #if defined(_M_ARM) || defined(_M_ARM64)
1265  _Memory_barrier();
1266  __iso_volatile_store32((volatile int *)_Tgt, _Value);
1267 
1268  #else
1270  *_Tgt = _Value;
1271  #endif
1272  }
1273 
1274 inline void _Store_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1275  { /* store _Value atomically with
1276  sequentially consistent memory order */
1277 
1278  #if defined(_M_ARM) || defined(_M_ARM64)
1279  _Memory_barrier();
1280  __iso_volatile_store32((volatile int *)_Tgt, _Value);
1281  _Memory_barrier();
1282 
1283  #else
1284  _INTRIN_SEQ_CST(_InterlockedExchange)((volatile long *)_Tgt, _Value);
1285  #endif
1286  }
1287 
1288 inline void _Atomic_store_4(
1289  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1290  { /* store _Value atomically */
1291  switch (_Order)
1292  {
1293  case memory_order_relaxed:
1294  _Store_relaxed_4(_Tgt, _Value);
1295  break;
1296 
1297  case memory_order_release:
1298  _Store_release_4(_Tgt, _Value);
1299  break;
1300 
1301  case memory_order_seq_cst:
1302  _Store_seq_cst_4(_Tgt, _Value);
1303  break;
1304 
1305  default:
1307  break;
1308  }
1309  }
1310 
1311  /* _Atomic_load_4 */
1312 inline _Uint4_t _Load_seq_cst_4(volatile _Uint4_t *_Tgt)
1313  { /* load from *_Tgt atomically with
1314  sequentially consistent memory order */
1315  _Uint4_t _Value;
1316 
1317  #if defined(_M_ARM) || defined(_M_ARM64)
1318  _Value = __iso_volatile_load32((volatile int *)_Tgt);
1319  _Memory_barrier();
1320 
1321  #else
1322  _Value = *_Tgt;
1324  #endif
1325 
1326  return (_Value);
1327  }
1328 
1329 inline _Uint4_t _Load_relaxed_4(volatile _Uint4_t *_Tgt)
1330  { /* load from *_Tgt atomically with
1331  relaxed memory order */
1332  _Uint4_t _Value;
1333 
1334  #if defined(_M_ARM) || defined(_M_ARM64)
1335  _Value = __iso_volatile_load32((volatile int *)_Tgt);
1336 
1337  #else
1338  _Value = *_Tgt;
1339  #endif
1340 
1341  return (_Value);
1342  }
1343 
1344 inline _Uint4_t _Load_acquire_4(volatile _Uint4_t *_Tgt)
1345  { /* load from *_Tgt atomically with
1346  acquire memory order */
1347 
1348  return (_Load_seq_cst_4(_Tgt));
1349  }
1350 
1352  volatile _Uint4_t *_Tgt, memory_order _Order)
1353  { /* load from *_Tgt atomically */
1354  switch (_Order)
1355  {
1356  case memory_order_relaxed:
1357  return (_Load_relaxed_4(_Tgt));
1358 
1359  case memory_order_consume:
1360  case memory_order_acquire:
1361  return (_Load_acquire_4(_Tgt));
1362 
1363  case memory_order_seq_cst:
1364  return (_Load_seq_cst_4(_Tgt));
1365 
1366  default:
1368  return (0);
1369  }
1370  }
1371 
1372  /* _Atomic_exchange_4 */
1373 inline _Uint4_t _Exchange_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1374  { /* exchange _Value and *_Tgt atomically with
1375  sequentially consistent memory order */
1376 
1377  return (_INTRIN_SEQ_CST(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1378  }
1379 
1380 inline _Uint4_t _Exchange_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1381  { /* exchange _Value and *_Tgt atomically with
1382  relaxed memory order */
1383 
1384  return (_INTRIN_RELAXED(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1385  }
1386 
1387 inline _Uint4_t _Exchange_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1388  { /* exchange _Value and *_Tgt atomically with
1389  acquire memory order */
1390 
1391  return (_INTRIN_ACQUIRE(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1392  }
1393 
1394 inline _Uint4_t _Exchange_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1395  { /* exchange _Value and *_Tgt atomically with
1396  release memory order */
1397 
1398  return (_INTRIN_RELEASE(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1399  }
1400 
1402  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1403  { /* exchange _Value and *_Tgt atomically */
1404  switch (_Order)
1405  {
1406  case memory_order_relaxed:
1407  return (_Exchange_relaxed_4(_Tgt, _Value));
1408 
1409  case memory_order_consume:
1410  case memory_order_acquire:
1411  return (_Exchange_acquire_4(_Tgt, _Value));
1412 
1413  case memory_order_release:
1414  return (_Exchange_release_4(_Tgt, _Value));
1415 
1416  case memory_order_acq_rel:
1417  case memory_order_seq_cst:
1418  return (_Exchange_seq_cst_4(_Tgt, _Value));
1419 
1420  default:
1422  return (0);
1423  }
1424  }
1425 
1426  /* _Atomic_compare_exchange_weak_4, _Atomic_compare_exchange_strong_4 */
1427 inline int _Compare_exchange_seq_cst_4(volatile _Uint4_t *_Tgt,
1428  _Uint4_t *_Exp, _Uint4_t _Value)
1429  { /* compare and exchange values atomically with
1430  sequentially consistent memory order */
1431  _Uint4_t _Old_exp = *_Exp; /* read before atomic operation */
1432 
1433  _Uint4_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange)((volatile long *)_Tgt,
1434  _Value, _Old_exp);
1435 
1436  if (_Prev == _Old_exp)
1437  return (1);
1438  else
1439  { /* copy old value */
1440  *_Exp = _Prev;
1441  return (0);
1442  }
1443  }
1444 
1445 inline int _Compare_exchange_relaxed_4(volatile _Uint4_t *_Tgt,
1446  _Uint4_t *_Exp, _Uint4_t _Value)
1447  { /* compare and exchange values atomically with
1448  relaxed memory order */
1449  _Uint4_t _Old_exp = *_Exp; /* read before atomic operation */
1450 
1451  _Uint4_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange)((volatile long *)_Tgt,
1452  _Value, _Old_exp);
1453 
1454  if (_Prev == _Old_exp)
1455  return (1);
1456  else
1457  { /* copy old value */
1458  *_Exp = _Prev;
1459  return (0);
1460  }
1461  }
1462 
1463 inline int _Compare_exchange_acquire_4(volatile _Uint4_t *_Tgt,
1464  _Uint4_t *_Exp, _Uint4_t _Value)
1465  { /* compare and exchange values atomically with
1466  acquire memory order */
1467  _Uint4_t _Old_exp = *_Exp; /* read before atomic operation */
1468 
1469  _Uint4_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange)((volatile long *)_Tgt,
1470  _Value, _Old_exp);
1471 
1472  if (_Prev == _Old_exp)
1473  return (1);
1474  else
1475  { /* copy old value */
1476  *_Exp = _Prev;
1477  return (0);
1478  }
1479  }
1480 
1481 inline int _Compare_exchange_release_4(volatile _Uint4_t *_Tgt,
1482  _Uint4_t *_Exp, _Uint4_t _Value)
1483  { /* compare and exchange values atomically with
1484  release memory order */
1485  _Uint4_t _Old_exp = *_Exp; /* read before atomic operation */
1486 
1487  _Uint4_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange)((volatile long *)_Tgt,
1488  _Value, _Old_exp);
1489 
1490  if (_Prev == _Old_exp)
1491  return (1);
1492  else
1493  { /* copy old value */
1494  *_Exp = _Prev;
1495  return (0);
1496  }
1497  }
1498 
1500  volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value,
1501  memory_order _Order1, memory_order _Order2)
1502  { /* compare and exchange values atomically */
1503  _Validate_compare_exchange_memory_order(_Order1, _Order2);
1504 
1505  switch (_Memory_order_upper_bound(_Order1, _Order2))
1506  {
1507  case memory_order_relaxed:
1508  return (_Compare_exchange_relaxed_4(_Tgt, _Exp, _Value));
1509 
1510  case memory_order_consume:
1511  case memory_order_acquire:
1512  return (_Compare_exchange_acquire_4(_Tgt, _Exp, _Value));
1513 
1514  case memory_order_release:
1515  return (_Compare_exchange_release_4(_Tgt, _Exp, _Value));
1516 
1517  case memory_order_acq_rel:
1518  case memory_order_seq_cst:
1519  return (_Compare_exchange_seq_cst_4(_Tgt, _Exp, _Value));
1520 
1521  default:
1523  return (0);
1524  }
1525  }
1526 
1528  volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value,
1529  memory_order _Order1, memory_order _Order2)
1530  { /* compare and exchange values atomically */
1531  /* No weak compare-exchange is currently available,
1532  even for ARM, so fall back to strong */
1533  return (_Atomic_compare_exchange_strong_4(_Tgt, _Exp, _Value,
1534  _Order1, _Order2));
1535  }
1536 
1537  /* _Atomic_fetch_add_4, _Atomic_fetch_sub_4 */
1538 inline _Uint4_t _Fetch_add_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1539  { /* add _Value to *_Tgt atomically with
1540  sequentially consistent memory order */
1541 
1542  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1543  }
1544 
1545 inline _Uint4_t _Fetch_add_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1546  { /* add _Value to *_Tgt atomically with
1547  relaxed memory order */
1548 
1549  return (_INTRIN_RELAXED(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1550  }
1551 
1552 inline _Uint4_t _Fetch_add_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1553  { /* add _Value to *_Tgt atomically with
1554  acquire memory order */
1555 
1556  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1557  }
1558 
1559 inline _Uint4_t _Fetch_add_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1560  { /* add _Value to *_Tgt atomically with
1561  release memory order */
1562 
1563  return (_INTRIN_RELEASE(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1564  }
1565 
1567  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1568  { /* add _Value to *_Tgt atomically */
1569  switch (_Order)
1570  {
1571  case memory_order_relaxed:
1572  return (_Fetch_add_relaxed_4(_Tgt, _Value));
1573 
1574  case memory_order_consume:
1575  case memory_order_acquire:
1576  return (_Fetch_add_acquire_4(_Tgt, _Value));
1577 
1578  case memory_order_release:
1579  return (_Fetch_add_release_4(_Tgt, _Value));
1580 
1581  case memory_order_acq_rel:
1582  case memory_order_seq_cst:
1583  return (_Fetch_add_seq_cst_4(_Tgt, _Value));
1584 
1585  default:
1587  return (0);
1588  }
1589  }
1590 
1592  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1593  { /* subtract _Value from *_Tgt atomically */
1594  return (_Atomic_fetch_add_4(_Tgt, 0 - _Value, _Order));
1595  }
1596 
1597  /* _Atomic_fetch_and_4 */
1598 inline _Uint4_t _Fetch_and_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1599  { /* and _Value with *_Tgt atomically with
1600  sequentially consistent memory order */
1601 
1602  return (_INTRIN_SEQ_CST(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1603  }
1604 
1605 inline _Uint4_t _Fetch_and_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1606  { /* and _Value with *_Tgt atomically with
1607  relaxed memory order */
1608 
1609  return (_INTRIN_RELAXED(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1610  }
1611 
1612 inline _Uint4_t _Fetch_and_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1613  { /* and _Value with *_Tgt atomically with
1614  acquire memory order */
1615 
1616  return (_INTRIN_ACQUIRE(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1617  }
1618 
1619 inline _Uint4_t _Fetch_and_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1620  { /* and _Value with *_Tgt atomically with
1621  release memory order */
1622 
1623  return (_INTRIN_RELEASE(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1624  }
1625 
1627  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1628  { /* and _Value with *_Tgt atomically */
1629  switch (_Order)
1630  {
1631  case memory_order_relaxed:
1632  return (_Fetch_and_relaxed_4(_Tgt, _Value));
1633 
1634  case memory_order_consume:
1635  case memory_order_acquire:
1636  return (_Fetch_and_acquire_4(_Tgt, _Value));
1637 
1638  case memory_order_release:
1639  return (_Fetch_and_release_4(_Tgt, _Value));
1640 
1641  case memory_order_acq_rel:
1642  case memory_order_seq_cst:
1643  return (_Fetch_and_seq_cst_4(_Tgt, _Value));
1644 
1645  default:
1647  return (0);
1648  }
1649  }
1650 
1651  /* _Atomic_fetch_or_4 */
1652 inline _Uint4_t _Fetch_or_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1653  { /* or _Value with *_Tgt atomically with
1654  sequentially consistent memory order */
1655 
1656  return (_INTRIN_SEQ_CST(_InterlockedOr)((volatile long *)_Tgt, _Value));
1657  }
1658 
1659 inline _Uint4_t _Fetch_or_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1660  { /* or _Value with *_Tgt atomically with
1661  relaxed memory order */
1662 
1663  return (_INTRIN_RELAXED(_InterlockedOr)((volatile long *)_Tgt, _Value));
1664  }
1665 
1666 inline _Uint4_t _Fetch_or_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1667  { /* or _Value with *_Tgt atomically with
1668  acquire memory order */
1669 
1670  return (_INTRIN_ACQUIRE(_InterlockedOr)((volatile long *)_Tgt, _Value));
1671  }
1672 
1673 inline _Uint4_t _Fetch_or_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1674  { /* or _Value with *_Tgt atomically with
1675  release memory order */
1676 
1677  return (_INTRIN_RELEASE(_InterlockedOr)((volatile long *)_Tgt, _Value));
1678  }
1679 
1681  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1682  { /* or _Value with *_Tgt atomically */
1683  switch (_Order)
1684  {
1685  case memory_order_relaxed:
1686  return (_Fetch_or_relaxed_4(_Tgt, _Value));
1687 
1688  case memory_order_consume:
1689  case memory_order_acquire:
1690  return (_Fetch_or_acquire_4(_Tgt, _Value));
1691 
1692  case memory_order_release:
1693  return (_Fetch_or_release_4(_Tgt, _Value));
1694 
1695  case memory_order_acq_rel:
1696  case memory_order_seq_cst:
1697  return (_Fetch_or_seq_cst_4(_Tgt, _Value));
1698 
1699  default:
1701  return (0);
1702  }
1703  }
1704 
1705  /* _Atomic_fetch_xor_4 */
1706 inline _Uint4_t _Fetch_xor_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1707  { /* xor _Value with *_Tgt atomically with
1708  sequentially consistent memory order */
1709 
1710  return (_INTRIN_SEQ_CST(_InterlockedXor)((volatile long *)_Tgt, _Value));
1711  }
1712 
1713 inline _Uint4_t _Fetch_xor_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1714  { /* xor _Value with *_Tgt atomically with
1715  relaxed memory order */
1716 
1717  return (_INTRIN_RELAXED(_InterlockedXor)((volatile long *)_Tgt, _Value));
1718  }
1719 
1720 inline _Uint4_t _Fetch_xor_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1721  { /* xor _Value with *_Tgt atomically with
1722  acquire memory order */
1723 
1724  return (_INTRIN_ACQUIRE(_InterlockedXor)((volatile long *)_Tgt, _Value));
1725  }
1726 
1727 inline _Uint4_t _Fetch_xor_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1728  { /* xor _Value with *_Tgt atomically with
1729  release memory order */
1730 
1731  return (_INTRIN_RELEASE(_InterlockedXor)((volatile long *)_Tgt, _Value));
1732  }
1733 
1735  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1736  { /* xor _Value with *_Tgt atomically */
1737  switch (_Order)
1738  {
1739  case memory_order_relaxed:
1740  return (_Fetch_xor_relaxed_4(_Tgt, _Value));
1741 
1742  case memory_order_consume:
1743  case memory_order_acquire:
1744  return (_Fetch_xor_acquire_4(_Tgt, _Value));
1745 
1746  case memory_order_release:
1747  return (_Fetch_xor_release_4(_Tgt, _Value));
1748 
1749  case memory_order_acq_rel:
1750  case memory_order_seq_cst:
1751  return (_Fetch_xor_seq_cst_4(_Tgt, _Value));
1752 
1753  default:
1755  return (0);
1756  }
1757  }
1758 
1759  /* _Atomic_store_8 */
1760 inline void _Store_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1761  { /* store _Value atomically with relaxed memory order */
1762 
1763  #if defined(_M_X64)
1764  *_Tgt = _Value;
1765 
1766  #elif defined(_M_ARM64)
1767  __iso_volatile_store64((volatile long long *)_Tgt, _Value);
1768 
1769  #else
1770  _INTRIN_RELAXED(_InterlockedExchange64)((volatile long long *)_Tgt, _Value);
1771  #endif
1772  }
1773 
1774 inline void _Store_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1775  { /* store _Value atomically with release memory order */
1776 
1777  #if defined(_M_X64)
1779  *_Tgt = _Value;
1780 
1781  #elif defined(_M_ARM64)
1782  _Memory_barrier();
1783  __iso_volatile_store64((volatile long long *)_Tgt, _Value);
1784 
1785  #else
1786  _INTRIN_RELEASE(_InterlockedExchange64)((volatile long long *)_Tgt, _Value);
1787  #endif
1788  }
1789 
1790 inline void _Store_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1791  { /* store _Value atomically with
1792  sequentially consistent memory order */
1793 
1794  #if defined(_M_ARM64)
1795  _Memory_barrier();
1796  __iso_volatile_store64((volatile long long *)_Tgt, _Value);
1797  _Memory_barrier();
1798 
1799  #else
1800  _INTRIN_SEQ_CST(_InterlockedExchange64)((volatile long long *)_Tgt, _Value);
1801  #endif
1802  }
1803 
1804 inline void _Atomic_store_8(
1805  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
1806  { /* store _Value atomically */
1807  switch (_Order)
1808  {
1809  case memory_order_relaxed:
1810  _Store_relaxed_8(_Tgt, _Value);
1811  break;
1812 
1813  case memory_order_release:
1814  _Store_release_8(_Tgt, _Value);
1815  break;
1816 
1817  case memory_order_seq_cst:
1818  _Store_seq_cst_8(_Tgt, _Value);
1819  break;
1820 
1821  default:
1823  break;
1824  }
1825  }
1826 
1827  /* _Atomic_load_8 */
1828 inline _Uint8_t _Load_seq_cst_8(volatile _Uint8_t *_Tgt)
1829  { /* load from *_Tgt atomically with
1830  sequentially consistent memory order */
1831  _Uint8_t _Value;
1832 
1833  #if defined(_M_X64)
1834  _Value = *_Tgt;
1836 
1837  #elif defined(_M_ARM)
1838  _Value = __ldrexd((volatile long long *)_Tgt);
1839  _Memory_barrier();
1840 
1841  #elif defined(_M_ARM64)
1842  _Value = __iso_volatile_load64((volatile long long *)_Tgt);
1843  _Memory_barrier();
1844 
1845  #else
1846  _Value = _InterlockedOr64((volatile long long *)_Tgt, 0);
1847  #endif
1848 
1849  return (_Value);
1850  }
1851 
1852 inline _Uint8_t _Load_relaxed_8(volatile _Uint8_t *_Tgt)
1853  { /* load from *_Tgt atomically with
1854  relaxed memory order */
1855  _Uint8_t _Value;
1856 
1857  #if defined(_M_X64)
1858  _Value = *_Tgt;
1859 
1860  #elif defined(_M_ARM)
1861  _Value = __ldrexd((volatile long long *)_Tgt);
1862 
1863  #elif defined(_M_ARM64)
1864  _Value = __iso_volatile_load64((volatile long long *)_Tgt);
1865 
1866  #else
1867  _Value = _InterlockedOr64((volatile long long *)_Tgt, 0);
1868  #endif
1869 
1870  return (_Value);
1871  }
1872 
1873 inline _Uint8_t _Load_acquire_8(volatile _Uint8_t *_Tgt)
1874  { /* load from *_Tgt atomically with
1875  acquire memory order */
1876 
1877  return (_Load_seq_cst_8(_Tgt));
1878  }
1879 
1880 inline _Uint8_t _Atomic_load_8(
1881  volatile _Uint8_t *_Tgt, memory_order _Order)
1882  { /* load from *_Tgt atomically */
1883  switch (_Order)
1884  {
1885  case memory_order_relaxed:
1886  return (_Load_relaxed_8(_Tgt));
1887 
1888  case memory_order_consume:
1889  case memory_order_acquire:
1890  return (_Load_acquire_8(_Tgt));
1891 
1892  case memory_order_seq_cst:
1893  return (_Load_seq_cst_8(_Tgt));
1894 
1895  default:
1897  return (0);
1898  }
1899  }
1900 
1901  /* _Atomic_exchange_8 */
1902 inline _Uint8_t _Exchange_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1903  { /* exchange _Value and *_Tgt atomically with
1904  sequentially consistent memory order */
1905 
1906  return (_INTRIN_SEQ_CST(_InterlockedExchange64)((volatile long long *)_Tgt, _Value));
1907  }
1908 
1909 inline _Uint8_t _Exchange_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1910  { /* exchange _Value and *_Tgt atomically with
1911  relaxed memory order */
1912 
1913  return (_INTRIN_RELAXED(_InterlockedExchange64)((volatile long long *)_Tgt, _Value));
1914  }
1915 
1916 inline _Uint8_t _Exchange_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1917  { /* exchange _Value and *_Tgt atomically with
1918  acquire memory order */
1919 
1920  return (_INTRIN_ACQUIRE(_InterlockedExchange64)((volatile long long *)_Tgt, _Value));
1921  }
1922 
1923 inline _Uint8_t _Exchange_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1924  { /* exchange _Value and *_Tgt atomically with
1925  release memory order */
1926 
1927  return (_INTRIN_RELEASE(_InterlockedExchange64)((volatile long long *)_Tgt, _Value));
1928  }
1929 
1930 inline _Uint8_t _Atomic_exchange_8(
1931  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
1932  { /* exchange _Value and *_Tgt atomically */
1933  switch (_Order)
1934  {
1935  case memory_order_relaxed:
1936  return (_Exchange_relaxed_8(_Tgt, _Value));
1937 
1938  case memory_order_consume:
1939  case memory_order_acquire:
1940  return (_Exchange_acquire_8(_Tgt, _Value));
1941 
1942  case memory_order_release:
1943  return (_Exchange_release_8(_Tgt, _Value));
1944 
1945  case memory_order_acq_rel:
1946  case memory_order_seq_cst:
1947  return (_Exchange_seq_cst_8(_Tgt, _Value));
1948 
1949  default:
1951  return (0);
1952  }
1953  }
1954 
1955  /* _Atomic_compare_exchange_weak_8, _Atomic_compare_exchange_strong_8 */
1956 inline int _Compare_exchange_seq_cst_8(volatile _Uint8_t *_Tgt,
1957  _Uint8_t *_Exp, _Uint8_t _Value)
1958  { /* compare and exchange values atomically with
1959  sequentially consistent memory order */
1960  _Uint8_t _Old_exp = *_Exp; /* read before atomic operation */
1961 
1962  _Uint8_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange64)((volatile long long *)_Tgt,
1963  _Value, _Old_exp);
1964 
1965  if (_Prev == _Old_exp)
1966  return (1);
1967  else
1968  { /* copy old value */
1969  *_Exp = _Prev;
1970  return (0);
1971  }
1972  }
1973 
1974 inline int _Compare_exchange_relaxed_8(volatile _Uint8_t *_Tgt,
1975  _Uint8_t *_Exp, _Uint8_t _Value)
1976  { /* compare and exchange values atomically with
1977  relaxed memory order */
1978  _Uint8_t _Old_exp = *_Exp; /* read before atomic operation */
1979 
1980  _Uint8_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange64)((volatile long long *)_Tgt,
1981  _Value, _Old_exp);
1982 
1983  if (_Prev == _Old_exp)
1984  return (1);
1985  else
1986  { /* copy old value */
1987  *_Exp = _Prev;
1988  return (0);
1989  }
1990  }
1991 
1992 inline int _Compare_exchange_acquire_8(volatile _Uint8_t *_Tgt,
1993  _Uint8_t *_Exp, _Uint8_t _Value)
1994  { /* compare and exchange values atomically with
1995  acquire memory order */
1996  _Uint8_t _Old_exp = *_Exp; /* read before atomic operation */
1997 
1998  _Uint8_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange64)((volatile long long *)_Tgt,
1999  _Value, _Old_exp);
2000 
2001  if (_Prev == _Old_exp)
2002  return (1);
2003  else
2004  { /* copy old value */
2005  *_Exp = _Prev;
2006  return (0);
2007  }
2008  }
2009 
2010 inline int _Compare_exchange_release_8(volatile _Uint8_t *_Tgt,
2011  _Uint8_t *_Exp, _Uint8_t _Value)
2012  { /* compare and exchange values atomically with
2013  release memory order */
2014  _Uint8_t _Old_exp = *_Exp; /* read before atomic operation */
2015 
2016  _Uint8_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange64)((volatile long long *)_Tgt,
2017  _Value, _Old_exp);
2018 
2019  if (_Prev == _Old_exp)
2020  return (1);
2021  else
2022  { /* copy old value */
2023  *_Exp = _Prev;
2024  return (0);
2025  }
2026  }
2027 
2029  volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value,
2030  memory_order _Order1, memory_order _Order2)
2031  { /* compare and exchange values atomically */
2032  _Validate_compare_exchange_memory_order(_Order1, _Order2);
2033 
2034  switch (_Memory_order_upper_bound(_Order1, _Order2))
2035  {
2036  case memory_order_relaxed:
2037  return (_Compare_exchange_relaxed_8(_Tgt, _Exp, _Value));
2038 
2039  case memory_order_consume:
2040  case memory_order_acquire:
2041  return (_Compare_exchange_acquire_8(_Tgt, _Exp, _Value));
2042 
2043  case memory_order_release:
2044  return (_Compare_exchange_release_8(_Tgt, _Exp, _Value));
2045 
2046  case memory_order_acq_rel:
2047  case memory_order_seq_cst:
2048  return (_Compare_exchange_seq_cst_8(_Tgt, _Exp, _Value));
2049 
2050  default:
2052  return (0);
2053  }
2054  }
2055 
2057  volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value,
2058  memory_order _Order1, memory_order _Order2)
2059  { /* compare and exchange values atomically */
2060  /* No weak compare-exchange is currently available,
2061  even for ARM, so fall back to strong */
2062  return (_Atomic_compare_exchange_strong_8(_Tgt, _Exp, _Value,
2063  _Order1, _Order2));
2064  }
2065 
2066  /* _Atomic_fetch_add_8, _Atomic_fetch_sub_8 */
2067 inline _Uint8_t _Fetch_add_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2068  { /* add _Value to *_Tgt atomically with
2069  sequentially consistent memory order */
2070 
2071  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd64)((volatile long long *)_Tgt, _Value));
2072  }
2073 
2074 inline _Uint8_t _Fetch_add_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2075  { /* add _Value to *_Tgt atomically with
2076  relaxed memory order */
2077 
2078  return (_INTRIN_RELAXED(_InterlockedExchangeAdd64)((volatile long long *)_Tgt, _Value));
2079  }
2080 
2081 inline _Uint8_t _Fetch_add_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2082  { /* add _Value to *_Tgt atomically with
2083  acquire memory order */
2084 
2085  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd64)((volatile long long *)_Tgt, _Value));
2086  }
2087 
2088 inline _Uint8_t _Fetch_add_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2089  { /* add _Value to *_Tgt atomically with
2090  release memory order */
2091 
2092  return (_INTRIN_RELEASE(_InterlockedExchangeAdd64)((volatile long long *)_Tgt, _Value));
2093  }
2094 
2095 inline _Uint8_t _Atomic_fetch_add_8(
2096  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
2097  { /* add _Value to *_Tgt atomically */
2098  switch (_Order)
2099  {
2100  case memory_order_relaxed:
2101  return (_Fetch_add_relaxed_8(_Tgt, _Value));
2102 
2103  case memory_order_consume:
2104  case memory_order_acquire:
2105  return (_Fetch_add_acquire_8(_Tgt, _Value));
2106 
2107  case memory_order_release:
2108  return (_Fetch_add_release_8(_Tgt, _Value));
2109 
2110  case memory_order_acq_rel:
2111  case memory_order_seq_cst:
2112  return (_Fetch_add_seq_cst_8(_Tgt, _Value));
2113 
2114  default:
2116  return (0);
2117  }
2118  }
2119 
2120 inline _Uint8_t _Atomic_fetch_sub_8(
2121  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
2122  { /* subtract _Value from *_Tgt atomically */
2123  return (_Atomic_fetch_add_8(_Tgt, 0 - _Value, _Order));
2124  }
2125 
2126  /* _Atomic_fetch_and_8 */
2127 inline _Uint8_t _Fetch_and_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2128  { /* and _Value with *_Tgt atomically with
2129  sequentially consistent memory order */
2130 
2131  return (_INTRIN_SEQ_CST(_InterlockedAnd64)((volatile long long *)_Tgt, _Value));
2132  }
2133 
2134 inline _Uint8_t _Fetch_and_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2135  { /* and _Value with *_Tgt atomically with
2136  relaxed memory order */
2137 
2138  return (_INTRIN_RELAXED(_InterlockedAnd64)((volatile long long *)_Tgt, _Value));
2139  }
2140 
2141 inline _Uint8_t _Fetch_and_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2142  { /* and _Value with *_Tgt atomically with
2143  acquire memory order */
2144 
2145  return (_INTRIN_ACQUIRE(_InterlockedAnd64)((volatile long long *)_Tgt, _Value));
2146  }
2147 
2148 inline _Uint8_t _Fetch_and_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2149  { /* and _Value with *_Tgt atomically with
2150  release memory order */
2151 
2152  return (_INTRIN_RELEASE(_InterlockedAnd64)((volatile long long *)_Tgt, _Value));
2153  }
2154 
2155 inline _Uint8_t _Atomic_fetch_and_8(
2156  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
2157  { /* and _Value with *_Tgt atomically */
2158  switch (_Order)
2159  {
2160  case memory_order_relaxed:
2161  return (_Fetch_and_relaxed_8(_Tgt, _Value));
2162 
2163  case memory_order_consume:
2164  case memory_order_acquire:
2165  return (_Fetch_and_acquire_8(_Tgt, _Value));
2166 
2167  case memory_order_release:
2168  return (_Fetch_and_release_8(_Tgt, _Value));
2169 
2170  case memory_order_acq_rel:
2171  case memory_order_seq_cst:
2172  return (_Fetch_and_seq_cst_8(_Tgt, _Value));
2173 
2174  default:
2176  return (0);
2177  }
2178  }
2179 
2180  /* _Atomic_fetch_or_8 */
2181 inline _Uint8_t _Fetch_or_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2182  { /* or _Value with *_Tgt atomically with
2183  sequentially consistent memory order */
2184 
2185  return (_INTRIN_SEQ_CST(_InterlockedOr64)((volatile long long *)_Tgt, _Value));
2186  }
2187 
2188 inline _Uint8_t _Fetch_or_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2189  { /* or _Value with *_Tgt atomically with
2190  relaxed memory order */
2191 
2192  return (_INTRIN_RELAXED(_InterlockedOr64)((volatile long long *)_Tgt, _Value));
2193  }
2194 
2195 inline _Uint8_t _Fetch_or_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2196  { /* or _Value with *_Tgt atomically with
2197  acquire memory order */
2198 
2199  return (_INTRIN_ACQUIRE(_InterlockedOr64)((volatile long long *)_Tgt, _Value));
2200  }
2201 
2202 inline _Uint8_t _Fetch_or_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2203  { /* or _Value with *_Tgt atomically with
2204  release memory order */
2205 
2206  return (_INTRIN_RELEASE(_InterlockedOr64)((volatile long long *)_Tgt, _Value));
2207  }
2208 
2209 inline _Uint8_t _Atomic_fetch_or_8(
2210  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
2211  { /* or _Value with *_Tgt atomically */
2212  switch (_Order)
2213  {
2214  case memory_order_relaxed:
2215  return (_Fetch_or_relaxed_8(_Tgt, _Value));
2216 
2217  case memory_order_consume:
2218  case memory_order_acquire:
2219  return (_Fetch_or_acquire_8(_Tgt, _Value));
2220 
2221  case memory_order_release:
2222  return (_Fetch_or_release_8(_Tgt, _Value));
2223 
2224  case memory_order_acq_rel:
2225  case memory_order_seq_cst:
2226  return (_Fetch_or_seq_cst_8(_Tgt, _Value));
2227 
2228  default:
2230  return (0);
2231  }
2232  }
2233 
2234  /* _Atomic_fetch_xor_8 */
2235 inline _Uint8_t _Fetch_xor_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2236  { /* xor _Value with *_Tgt atomically with
2237  sequentially consistent memory order */
2238 
2239  return (_INTRIN_SEQ_CST(_InterlockedXor64)((volatile long long *)_Tgt, _Value));
2240  }
2241 
2242 inline _Uint8_t _Fetch_xor_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2243  { /* xor _Value with *_Tgt atomically with
2244  relaxed memory order */
2245 
2246  return (_INTRIN_RELAXED(_InterlockedXor64)((volatile long long *)_Tgt, _Value));
2247  }
2248 
2249 inline _Uint8_t _Fetch_xor_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2250  { /* xor _Value with *_Tgt atomically with
2251  acquire memory order */
2252 
2253  return (_INTRIN_ACQUIRE(_InterlockedXor64)((volatile long long *)_Tgt, _Value));
2254  }
2255 
2256 inline _Uint8_t _Fetch_xor_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2257  { /* xor _Value with *_Tgt atomically with
2258  release memory order */
2259 
2260  return (_INTRIN_RELEASE(_InterlockedXor64)((volatile long long *)_Tgt, _Value));
2261  }
2262 
2263 inline _Uint8_t _Atomic_fetch_xor_8(
2264  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
2265  { /* xor _Value with *_Tgt atomically */
2266  switch (_Order)
2267  {
2268  case memory_order_relaxed:
2269  return (_Fetch_xor_relaxed_8(_Tgt, _Value));
2270 
2271  case memory_order_consume:
2272  case memory_order_acquire:
2273  return (_Fetch_xor_acquire_8(_Tgt, _Value));
2274 
2275  case memory_order_release:
2276  return (_Fetch_xor_release_8(_Tgt, _Value));
2277 
2278  case memory_order_acq_rel:
2279  case memory_order_seq_cst:
2280  return (_Fetch_xor_seq_cst_8(_Tgt, _Value));
2281 
2282  default:
2284  return (0);
2285  }
2286  }
2287 
2288 inline int _Atomic_flag_test_and_set(volatile _Atomic_flag_t *_Flag,
2289  memory_order _Order)
2290  { /* atomically test flag and set to true */
2291  switch (_Order)
2292  {
2293  case memory_order_relaxed:
2294  return (_INTRIN_RELAXED(_interlockedbittestandset)(_Flag, 0));
2295 
2296  case memory_order_consume:
2297  case memory_order_acquire:
2298  return (_INTRIN_ACQUIRE(_interlockedbittestandset)(_Flag, 0));
2299 
2300  case memory_order_release:
2301  return (_INTRIN_RELEASE(_interlockedbittestandset)(_Flag, 0));
2302 
2303  case memory_order_acq_rel:
2304  case memory_order_seq_cst:
2305  return (_INTRIN_SEQ_CST(_interlockedbittestandset)(_Flag, 0));
2306 
2307  default:
2309  return (0);
2310  }
2311  }
2312 
2313 inline void _Atomic_flag_clear(volatile _Atomic_flag_t *_Flag,
2314  memory_order _Order)
2315  { /* atomically clear flag */
2316  static_assert(sizeof(_Atomic_flag_t) == sizeof(_Uint4_t),
2317  "Unexpected _Atomic_flag_t size");
2318 
2319  switch (_Order)
2320  {
2321  case memory_order_relaxed:
2322  case memory_order_release:
2323  case memory_order_seq_cst:
2324  _Atomic_store_4((volatile _Uint4_t *)_Flag, 0, _Order);
2325  break;
2326 
2327  default:
2329  break;
2330  }
2331  }
2332 
2334  { /* force memory visibility and inhibit compiler reordering */
2335  #if defined(_M_ARM) || defined(_M_ARM64)
2336  if (_Order != memory_order_relaxed)
2337  {
2338  _Memory_barrier();
2339  }
2340 
2341  #else
2343  if (_Order == memory_order_seq_cst)
2344  { /* force visibility */
2345  static _Uint4_t _Guard;
2348  }
2349  #endif
2350  }
2351 
2353  { /* inhibit compiler reordering */
2355  }
2356 
2357  #if defined(_M_ARM) || defined(_M_ARM64)
2358  #define _YIELD_PROCESSOR __yield()
2359 
2360  #else
2361  #define _YIELD_PROCESSOR
2362  #endif
2363 
2364  /* SPIN LOCK FOR LOCKING VERSIONS OF OPERATIONS */
2365  /* Use acquire semantics on lock and release on unlock. Given our
2366  current atomic_flag implementation, this ensures not just
2367  atomicity but also sequential consistency. */
2368 
2369 inline void _Lock_spin_lock(
2370  volatile _Atomic_flag_t *_Flag)
2371  { /* spin until _Flag successfully set */
2374  }
2375 
2376 inline void _Unlock_spin_lock(
2377  volatile _Atomic_flag_t *_Flag)
2378  { /* release previously obtained lock */
2380  }
2381 
2382  /* ATOMIC OPERATIONS FOR OBJECTS WITH SIZES THAT
2383  DON'T MATCH THE SIZE OF ANY INTEGRAL TYPE */
2384 inline void _Atomic_copy(
2385  volatile _Atomic_flag_t *_Flag, size_t _Size,
2386  volatile void *_Tgt, volatile const void *_Src,
2387  memory_order)
2388  { /* atomically copy *_Src to *_Tgt with memory ordering */
2389  _Lock_spin_lock(_Flag);
2390  _CSTD memcpy((void *)_Tgt, (void *)_Src, _Size);
2391  _Unlock_spin_lock(_Flag);
2392  }
2393 
2394 inline void _Atomic_exchange(
2395  volatile _Atomic_flag_t *_Flag, size_t _Size,
2396  volatile void *_Tgt, volatile void *_Src,
2397  memory_order)
2398  { /* atomically swap *_Src and *_Tgt with memory ordering */
2399  unsigned char *_Left = (unsigned char *)_Tgt;
2400  unsigned char *_Right = (unsigned char *)_Src;
2401 
2402  _Lock_spin_lock(_Flag);
2403  for (; 0 < _Size; --_Size)
2404  { /* copy bytes */
2405  unsigned char _Tmp = *_Left;
2406  *_Left++ = *_Right;
2407  *_Right++ = _Tmp;
2408  }
2409  _Unlock_spin_lock(_Flag);
2410  }
2411 
2413  volatile _Atomic_flag_t *_Flag, size_t _Size,
2414  volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src,
2416  { /* atomically compare and exchange with memory ordering */
2417  int _Result;
2418 
2419  _Lock_spin_lock(_Flag);
2420  _Result = _CSTD memcmp((const void *)_Tgt, (const void *)_Exp, _Size) == 0;
2421  if (_Result != 0)
2422  _CSTD memcpy((void *)_Tgt, (void *)_Src, _Size);
2423  else
2424  _CSTD memcpy((void *)_Exp, (void *)_Tgt, _Size);
2425  _Unlock_spin_lock(_Flag);
2426  return (_Result);
2427  }
2428 
2430  volatile _Atomic_flag_t *_Flag, size_t _Size,
2431  volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src,
2432  memory_order _Order1, memory_order _Order2)
2433  { /* atomically compare and exchange with memory ordering */
2434  return (_Atomic_compare_exchange_weak(_Flag, _Size, _Tgt, _Exp, _Src,
2435  _Order1, _Order2));
2436  }
2437 
2438  #if _USE_INTERLOCKED_REFCOUNTING == 0
2439  /* ATOMIC REFERENCE COUNTING */
2441  _Atomic_counter_t& _Counter, memory_order _Order)
2442  { // atomically increment counter and return result
2443  return (_Atomic_fetch_add_4(&_Counter, 1, _Order) + 1);
2444  }
2445 
2447  { // atomically increment counter and return result
2449  }
2450 
2452  _Atomic_counter_t& _Counter, memory_order _Order)
2453  { // atomically decrement counter and return result
2454  return (_Atomic_fetch_sub_4(&_Counter, 1, _Order) - 1);
2455  }
2456 
2458  { // atomically decrement counter and return result
2460  }
2461 
2463  _Atomic_counter_t& _Counter, memory_order _Order)
2464  { // atomically load counter and return result
2465  return (_Atomic_load_4(&_Counter, _Order));
2466  }
2467 
2469  { // atomically load counter and return result
2471  }
2472 
2474  _Atomic_counter_t& _Counter,
2475  _Atomic_integral_t _Expected,
2476  memory_order _Order)
2477  { // atomically compare and increment counter and return result
2479  &_Counter, &_Expected, _Expected + 1,
2480  _Order, _Order));
2481  }
2482 
2484  _Atomic_counter_t& _Counter, _Atomic_integral_t _Expected)
2485  { // atomically compare and increment counter and return result
2487  _Counter, _Expected, memory_order_seq_cst));
2488  }
2489  #endif /* _USE_INTERLOCKED_REFCOUNTING == 0 */
2490 _STD_END
2491 
2492  #if defined(_M_IX86)
2493 #pragma pop_macro("_InterlockedExchange64")
2494 #pragma pop_macro("_InterlockedExchangeAdd64")
2495 #pragma pop_macro("_InterlockedAnd64")
2496 #pragma pop_macro("_InterlockedOr64")
2497 #pragma pop_macro("_InterlockedXor64")
2498  #endif /* defined(_M_IX86) */
2499 
2500  #pragma pop_macro("new")
2501  #pragma warning(pop)
2502  #pragma pack(pop)
2503 #endif /* RC_INVOKED */
2504 #endif /* _XATOMIC_H */
2505 
2506 /*
2507  * Copyright (c) by P.J. Plauger. All rights reserved.
2508  * Consult your license regarding permissions and restrictions.
2509 V6.50:0009 */
int _Atomic_compare_exchange_strong_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:479
_Uint1_t _Atomic_fetch_add_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:546
void _Atomic_store_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:1804
_Uint1_t _Fetch_and_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:585
_Uint4_t _Fetch_or_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1659
_Uint1_t _Fetch_add_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:539
int _Compare_exchange_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:917
_Uint1_t _Fetch_or_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:653
int _Atomic_compare_exchange_strong_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:1499
_Uint2_t _Atomic_load_2(volatile _Uint2_t *_Tgt, memory_order _Order)
Definition: xatomic.h:841
constexpr const _Ty &() _Left
Definition: algorithm:3722
int _Atomic_compare_exchange_strong_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:2028
_Uint4_t _Atomic_fetch_sub_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1591
void _Atomic_store_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:268
_Uint1_t _Fetch_and_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:592
_Uint8_t _Load_relaxed_8(volatile _Uint8_t *_Tgt)
Definition: xatomic.h:1852
_Uint1_t _Exchange_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:367
#define _STD_BEGIN
Definition: yvals.h:553
_Uint4_t _Fetch_or_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1673
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
int _Atomic_compare_exchange_strong_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:989
_Uint1_t _Fetch_and_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:599
_Uint2_t _Load_relaxed_2(volatile _Uint2_t *_Tgt)
Definition: xatomic.h:819
_Check_return_ int __cdecl memcmp(_In_reads_bytes_(_Size) void const *_Buf1, _In_reads_bytes_(_Size) void const *_Buf2, _In_ size_t _Size)
_Uint2_t _Fetch_or_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1156
void _Store_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1250
_Uint4_t _Atomic_exchange_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1401
void _Atomic_store_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1288
_Uint1_t _Fetch_or_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:646
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:42
int _Compare_exchange_release_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:971
_Uint8_t _Atomic_fetch_or_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2209
_Uint4_t _Fetch_xor_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1727
int _Compare_exchange_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:935
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:41
_Uint4_t _Fetch_xor_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1706
_Atomic_integral_t _Inc_atomic_counter_explicit(_Atomic_counter_t &_Counter, memory_order _Order)
Definition: xatomic.h:2440
_Uint8_t _Exchange_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1916
void _Atomic_thread_fence(memory_order _Order)
Definition: xatomic.h:2333
_Uint1_t _Fetch_add_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:525
_Uint2_t _Fetch_and_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1095
_Uint1_t _Fetch_add_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:532
_Uint8_t _Fetch_or_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2181
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:150
_Uint4_t _Atomic_fetch_and_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1626
_Uint2_t _Fetch_and_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1088
_Uint1_t _Load_acquire_1(volatile _Uint1_t *_Tgt)
Definition: xatomic.h:324
_Uint4_t _Fetch_and_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1598
int _Compare_exchange_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:407
_CRT_BEGIN_C_HEADER _Check_return_ _Ret_maybenull_ _In_ size_t _Size
Definition: corecrt_malloc.h:58
_Uint8_t _Fetch_add_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2081
_Uint8_t _Fetch_xor_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2242
_Uint4_t _Atomic_fetch_or_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1680
_Atomic_integral_t _Load_atomic_counter_explicit(_Atomic_counter_t &_Counter, memory_order _Order)
Definition: xatomic.h:2462
_Atomic_integral_t _Dec_atomic_counter_explicit(_Atomic_counter_t &_Counter, memory_order _Order)
Definition: xatomic.h:2451
void _Atomic_signal_fence(memory_order)
Definition: xatomic.h:2352
void _Atomic_flag_clear(volatile _Atomic_flag_t *_Flag, memory_order _Order)
Definition: xatomic.h:2313
void _Store_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1261
_Uint4_t _Atomic_fetch_add_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1566
_Uint1_t _Atomic_load_1(volatile _Uint1_t *_Tgt, memory_order _Order)
Definition: xatomic.h:331
_Uint4_t _Fetch_xor_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1720
_Uint8_t _Fetch_add_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2088
int _Compare_exchange_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1463
_Uint1_t _Exchange_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:360
_Uint2_t _Atomic_fetch_add_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:1056
memory_order _Memory_order_upper_bound(memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:154
_Uint2_t _Fetch_xor_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1196
_Uint8_t _Fetch_add_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2074
_Uint8_t _Fetch_and_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2141
_Uint1_t _Fetch_xor_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:693
_Uint32t _Uint4_t
Definition: xatomic0.h:25
_Uint2_t _Fetch_or_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1149
_Atomic_integral_t _Inc_atomic_counter(_Atomic_counter_t &_Counter)
Definition: xatomic.h:2446
int _Compare_exchange_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:1956
_Uint1_t _Fetch_xor_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:686
int _Compare_exchange_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:953
int _Atomic_compare_exchange_strong(volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:2429
_Uint2_t _Atomic_fetch_or_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:1170
unsigned long long _Uint8_t
Definition: xatomic.h:139
void _Store_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:241
void _Store_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1774
_Uint2_t _Atomic_fetch_and_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:1116
_Uint2_t _Atomic_exchange_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:891
_Uint4_t _Exchange_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1373
void _Store_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:751
_Uint8_t _Fetch_and_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2134
_Atomic_integral_t _Compare_increment_atomic_counter(_Atomic_counter_t &_Counter, _Atomic_integral_t _Expected)
Definition: xatomic.h:2483
_Uint8_t _Atomic_exchange_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:1930
_Uint1_t _Load_seq_cst_1(volatile _Uint1_t *_Tgt)
Definition: xatomic.h:292
_Uint4_t _Fetch_add_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1559
_Uint1_t _Fetch_xor_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:707
int _Atomic_compare_exchange_weak_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:1527
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:40
_Uint4_t _Atomic_fetch_xor_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1734
_Uint2_t _Fetch_add_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1042
_Uint4_t _Atomic_load_4(volatile _Uint4_t *_Tgt, memory_order _Order)
Definition: xatomic.h:1351
_Uint1_t _Load_relaxed_1(volatile _Uint1_t *_Tgt)
Definition: xatomic.h:309
_Uint8_t _Fetch_or_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2188
_Uint4_t _Fetch_xor_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1713
_In_ size_t _Out_opt_ int _In_z_ unsigned char const * _Src
Definition: mbstring.h:1039
void _Store_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:764
_Uint4_t _Atomic_integral_t
Definition: xatomic0.h:26
_Uint8_t _Fetch_or_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2195
_Uint8_t _Atomic_fetch_and_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2155
_Uint4_t _Fetch_or_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1652
_Uint2_t _Exchange_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:884
_Uint2_t _Fetch_and_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1109
_Uint1_t _Fetch_or_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:639
_Uint4_t _Load_relaxed_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1329
_Uint8_t _Load_seq_cst_8(volatile _Uint8_t *_Tgt)
Definition: xatomic.h:1828
int _Compare_exchange_release_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1481
int _Compare_exchange_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1427
void _Store_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:230
_Uint8_t _Fetch_xor_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2235
int _Compare_exchange_release_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:2010
int _Compare_exchange_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1445
_Uint1_t _Atomic_exchange_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:381
_Uint4_t _Exchange_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1394
_Uint8_t _Atomic_fetch_sub_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2120
_Uint4_t _Load_seq_cst_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1312
int _Atomic_flag_test_and_set(volatile _Atomic_flag_t *_Flag, memory_order _Order)
Definition: xatomic.h:2288
memory_order
Definition: xatomic0.h:16
unsigned short _Uint2_t
Definition: xatomic.h:137
int _Compare_exchange_release_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:461
#define _YIELD_PROCESSOR
Definition: xatomic.h:2361
void _Atomic_copy(volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile const void *_Src, memory_order)
Definition: xatomic.h:2384
void _Store_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:254
_Uint1_t _Atomic_fetch_or_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:660
_Uint2_t _Exchange_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:877
_Uint4_t _Exchange_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1380
_Uint1_t _Atomic_fetch_xor_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:714
Definition: xatomic0.h:21
memcpy(_Destination, _Source, _SourceSize)
_Uint8_t _Exchange_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1923
_Uint4_t _Fetch_add_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1538
_Uint1_t _Fetch_or_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:632
_Uint1_t _Exchange_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:374
_Uint2_t _Fetch_add_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1035
#define _STD_END
Definition: yvals.h:554
_Uint4_t _Fetch_or_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1666
_In_ _Value
Definition: corecrt_wstdlib.h:65
_Uint8_t _Fetch_and_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2148
_Uint8_t _Atomic_fetch_add_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2095
_Uint2_t _Atomic_fetch_xor_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:1224
_Atomic_integral_t _Dec_atomic_counter(_Atomic_counter_t &_Counter)
Definition: xatomic.h:2457
Definition: xatomic0.h:18
void _Store_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1274
_Uint8_t _Fetch_and_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2127
void _Lock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2369
_Uint2_t _Atomic_fetch_sub_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:1081
void _Store_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:740
_Uint2_t _Fetch_add_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1028
_Uint2_t _Fetch_xor_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1210
_Uint2_t _Exchange_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:863
_Uint4_t _Exchange_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1387
_Uint8_t _Fetch_or_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2202
_Uint1_t _Fetch_add_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:518
_Uint1_t _Atomic_fetch_and_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:606
_Uint8_t _Fetch_xor_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2249
_Uint8_t _Fetch_xor_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2256
void _Store_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1760
Definition: xatomic0.h:17
int _Atomic_compare_exchange_weak(volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src, memory_order, memory_order)
Definition: xatomic.h:2412
_Uint4_t _Fetch_and_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1619
_Uint1_t _Exchange_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:353
long _Atomic_flag_t
Definition: xatomic.h:142
_Atomic_integral_t _Compare_increment_atomic_counter_explicit(_Atomic_counter_t &_Counter, _Atomic_integral_t _Expected, memory_order _Order)
Definition: xatomic.h:2473
_Uint2_t _Fetch_add_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1049
int _Atomic_compare_exchange_weak_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:507
Definition: xatomic0.h:22
_Uint8_t _Atomic_load_8(volatile _Uint8_t *_Tgt, memory_order _Order)
Definition: xatomic.h:1880
void _Atomic_store_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:778
Definition: xatomic0.h:20
_Uint2_t _Exchange_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:870
_Uint8_t _Load_acquire_8(volatile _Uint8_t *_Tgt)
Definition: xatomic.h:1873
#define _CSTD
Definition: yvals.h:570
#define _Compiler_barrier()
Definition: xatomic.h:22
Definition: xatomic0.h:19
_Uint4_t _Fetch_and_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1612
void _Unlock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2376
_Uint8_t _Atomic_fetch_xor_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2263
_Uint8_t _Exchange_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1909
_Uint1_t _Atomic_fetch_sub_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:571
_STD_BEGIN typedef unsigned char _Uint1_t
Definition: xatomic.h:136
_Uint4_t _Fetch_add_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1552
_Atomic_integral_t _Load_atomic_counter(_Atomic_counter_t &_Counter)
Definition: xatomic.h:2468
_Uint2_t _Fetch_and_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1102
_Uint4_t _Load_acquire_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1344
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:39
_Uint4_t _Fetch_add_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1545
_Uint2_t _Fetch_or_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1163
void _Atomic_exchange(volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile void *_Src, memory_order)
Definition: xatomic.h:2394
_Uint8_t _Exchange_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1902
void _Store_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1790
_Result
Definition: corecrt_wconio.h:362
_Uint1_t _Fetch_xor_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:700
int _Compare_exchange_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:1974
_Uint2_t _Fetch_xor_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1203
_Uint4_t _Fetch_and_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1605
int _Compare_exchange_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:425
int _Atomic_compare_exchange_weak_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:2056
_Uint2_t _Load_acquire_2(volatile _Uint2_t *_Tgt)
Definition: xatomic.h:834
long __cdecl _InterlockedCompareExchange(long volatile *, long, long)
_Uint2_t _Load_seq_cst_2(volatile _Uint2_t *_Tgt)
Definition: xatomic.h:802
int _Compare_exchange_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:443
_Uint2_t _Fetch_or_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1142
_Uint2_t _Fetch_xor_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1217
_Atomic_integral_t _Atomic_counter_t
Definition: xatomic0.h:47
constexpr const _Ty &() _Right
Definition: algorithm:3723
int _Atomic_compare_exchange_weak_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:1017
int _Compare_exchange_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:1992
_Uint8_t _Fetch_add_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2067
_Uint1_t _Fetch_and_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:578
void _Validate_compare_exchange_memory_order(memory_order _Success, memory_order _Failure)
Definition: xatomic.h:196