STLdoc
STLdocumentation
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros
xatomic.h
Go to the documentation of this file.
1 /* xatomic.h internal header */
2 #pragma once
3 #ifndef _XATOMIC_H
4 #define _XATOMIC_H
5 #ifndef RC_INVOKED
6 #include <xatomic0.h>
7 #include <stddef.h> // for size_t
8 #include <stdlib.h>
9 #include <string.h>
10 
11 #include <intrin.h>
12 #include <xutility>
13 
14  #pragma pack(push,_CRT_PACKING)
15  #pragma warning(push,3)
16  #pragma push_macro("new")
17  #undef new
18 
19  /* Defend inline assembler from iso646.h's macros. */
20  #pragma push_macro("and")
21  #pragma push_macro("or")
22  #pragma push_macro("xor")
23  #undef and
24  #undef or
25  #undef xor
26 
27  #pragma warning (disable: 4100 4390 4793 6326)
28 
29  #define _Compiler_barrier() _ReadWriteBarrier()
30 
31  #if defined(_M_ARM)
32  #define _Memory_barrier() __dmb(_ARM_BARRIER_ISH)
33  #endif /* defined(_M_ARM) */
34 
35  #ifndef _CONCAT
36  #define _CONCATX(x, y) x ## y
37  #define _CONCAT(x, y) _CONCATX(x, y)
38  #endif /* _CONCAT */
39 
40 #define ATOMIC_BOOL_LOCK_FREE \
41  (1 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
42 #define _ATOMIC_CHAR_LOCK_FREE \
43  (1 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
44 #define _ATOMIC_CHAR16_T_LOCK_FREE \
45  (2 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
46 #define _ATOMIC_CHAR32_T_LOCK_FREE \
47  (2 <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
48 #define _ATOMIC_WCHAR_T_LOCK_FREE \
49  (_WCHAR_T_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
50 #define _ATOMIC_SHORT_LOCK_FREE \
51  (_SHORT_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
52 #define _ATOMIC_INT_LOCK_FREE \
53  (_INT_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
54 #define _ATOMIC_LONG_LOCK_FREE \
55  (_LONG_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
56 #define _ATOMIC_LLONG_LOCK_FREE \
57  (_LONGLONG_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
58 #define ATOMIC_POINTER_LOCK_FREE \
59  (_ADDR_SIZE <= _ATOMIC_MAXBYTES_LOCK_FREE ? 2 : 0)
60 
61  /* Interlocked intrinsic mapping for _nf/_acq/_rel */
62 
63  #if defined(_M_ARM)
64  #define _INTRIN_RELAXED(x) _CONCAT(x, _nf)
65  #define _INTRIN_ACQUIRE(x) _CONCAT(x, _acq)
66  #define _INTRIN_RELEASE(x) _CONCAT(x, _rel)
67  #define _INTRIN_SEQ_CST(x) x
68 
69  #else /* defined(_M_ARM) */
70  #define _INTRIN_RELAXED(x) x
71  #define _INTRIN_ACQUIRE(x) x
72  #define _INTRIN_RELEASE(x) x
73  #define _INTRIN_SEQ_CST(x) x
74  #endif /* defined(_M_ARM) */
75 
76  #if defined(_M_IX86)
77 #pragma push_macro("_InterlockedExchange64")
78 #pragma push_macro("_InterlockedExchangeAdd64")
79 #pragma push_macro("_InterlockedAnd64")
80 #pragma push_macro("_InterlockedOr64")
81 #pragma push_macro("_InterlockedXor64")
82 
83 #undef _InterlockedExchange64
84 #undef _InterlockedExchangeAdd64
85 #undef _InterlockedAnd64
86 #undef _InterlockedOr64
87 #undef _InterlockedXor64
88 
89 #define _InterlockedExchange64 _InterlockedExchange64_ASM
90 #define _InterlockedExchangeAdd64 _InterlockedExchangeAdd64_ASM
91 #define _InterlockedAnd64 _InterlockedAnd64_ASM
92 #define _InterlockedOr64 _InterlockedOr64_ASM
93 #define _InterlockedXor64 _InterlockedXor64_ASM
94 
95 inline _LONGLONG _InterlockedExchange64(volatile _LONGLONG * _Tgt, _LONGLONG _Value)
96 {
98  __asm
99  {
100  mov esi, _Tgt;
101  mov ecx, dword ptr _Value[4];
102  mov ebx, dword ptr _Value;
103  again:
104  lock cmpxchg8b [esi];
105  jnz again;
106  mov dword ptr _Value[4], edx;
107  mov dword ptr _Value, eax;
108  }
110 
111  return (_Value);
112 }
113 
114 inline _LONGLONG _InterlockedExchangeAdd64(volatile _LONGLONG * _Tgt, _LONGLONG _Value)
115 {
117  __asm
118  {
119  mov esi, _Tgt;
120  mov edx, 4[esi];
121  mov eax, [esi];
122  again:
123  mov ecx, edx;
124  mov ebx, eax;
125  add ebx, dword ptr _Value;
126  adc ecx, dword ptr _Value[4];
127  lock cmpxchg8b [esi];
128  jnz again;
129  mov dword ptr _Value, eax;
130  mov dword ptr _Value[4], edx;
131  }
133 
134  return (_Value);
135 }
136 
137 inline _LONGLONG _InterlockedAnd64(volatile _LONGLONG *_Tgt, _LONGLONG _Value)
138 {
140  __asm
141  {
142  mov esi, _Tgt;
143  mov eax, [esi];
144  mov edx, 4[esi];
145  again:
146  mov ecx, edx;
147  mov ebx, eax;
148  and ebx, dword ptr _Value;
149  and ecx, dword ptr _Value[4];
150  lock cmpxchg8b [esi];
151  jnz again;
152  mov dword ptr _Value, eax;
153  mov dword ptr _Value[4], edx;
154  }
156 
157  return (_Value);
158 }
159 
160 inline _LONGLONG _InterlockedOr64(volatile _LONGLONG *_Tgt, _LONGLONG _Value)
161 {
163  __asm
164  {
165  mov esi, _Tgt;
166  mov eax, [esi];
167  mov edx, 4[esi];
168  again:
169  mov ecx, edx;
170  mov ebx, eax;
171  or ebx, dword ptr _Value;
172  or ecx, dword ptr _Value[4];
173  lock cmpxchg8b [esi];
174  jnz again;
175  mov dword ptr _Value, eax;
176  mov dword ptr _Value[4], edx;
177  }
179 
180  return (_Value);
181 }
182 
183 inline _LONGLONG _InterlockedXor64(volatile _LONGLONG *_Tgt, _LONGLONG _Value)
184 {
186  __asm
187  {
188  mov esi, _Tgt;
189  mov eax, [esi];
190  mov edx, 4[esi];
191  again:
192  mov ecx, edx;
193  mov ebx, eax;
194  xor ebx, dword ptr _Value;
195  xor ecx, dword ptr _Value[4];
196  lock cmpxchg8b [esi];
197  jnz again;
198  mov dword ptr _Value, eax;
199  mov dword ptr _Value[4], edx;
200  }
202 
203  return (_Value);
204 }
205  #endif /* defined(_M_IX86) */
206 
208  /* TYPEDEFS FOR INTERNAL ARITHMETIC TYPES */
209 typedef unsigned char _Uint1_t;
210 typedef unsigned short _Uint2_t;
211 //typedef _Uint32t _Uint4_t;
212 typedef unsigned _LONGLONG _Uint8_t;
213 
214  #define _ATOMIC_FLAG_TEST_AND_SET _Atomic_flag_test_and_set
215  #define _ATOMIC_FLAG_CLEAR _Atomic_flag_clear
216 
217  #define _ATOMIC_THREAD_FENCE _Atomic_thread_fence
218  #define _ATOMIC_SIGNAL_FENCE _Atomic_signal_fence
219 
220  #ifndef _INVALID_MEMORY_ORDER
221 
222  #if _ITERATOR_DEBUG_LEVEL == 2
223  #define _INVALID_MEMORY_ORDER \
224  {_DEBUG_ERROR("Invalid memory_order"); \
225  _SCL_SECURE_INVALID_ARGUMENT}
226 
227  #elif _ITERATOR_DEBUG_LEVEL == 1
228  #define _INVALID_MEMORY_ORDER \
229  _SCL_SECURE_VALIDATE("Invalid memory_order" && 0)
230 
231  #elif _ITERATOR_DEBUG_LEVEL == 0
232  #define _INVALID_MEMORY_ORDER
233  #endif /* _ITERATOR_DEBUG_LEVEL */
234  #endif /* _INVALID_MEMORY_ORDER */
235 
237  memory_order _Order2)
238  { /* find upper bound of two memory orders,
239  based on the following partial order:
240 
241  seq_cst
242  |
243  acq_rel
244  / \
245  acquire release
246  | |
247  consume |
248  \ /
249  relaxed
250 
251  */
252 
253  static const memory_order _Upper[6][6] = { /* combined upper bounds */
265  memory_order_seq_cst, memory_order_seq_cst, memory_order_seq_cst
266  }
267  };
268 
269  if ((_Order1 < 0) || (6 <= _Order1)
270  || (_Order2 < 0) || (6 <= _Order2))
271  { /* launder memory order */
273  return (memory_order_seq_cst);
274  }
275  return (_Upper[_Order1][_Order2]);
276  }
277 
279  memory_order _Success, memory_order _Failure)
280  { /* validate success/failure */
281  /* _Failure may not be memory_order_release or memory_order_acq_rel
282  and may not be stronger than _Success */
283  switch (_Failure)
284  {
286  break;
287 
289  if (_Success != memory_order_seq_cst)
291  break;
292 
294  if ((_Success == memory_order_consume) ||
295  (_Success == memory_order_relaxed))
297  break;
298 
300  if (_Success == memory_order_relaxed)
302  break;
303 
304  default:
306  break;
307  }
308  }
309 
310  /* _Atomic_store_1 */
311 inline void _Store_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
312  { /* store _Value atomically with relaxed memory order */
313  #if defined(_M_ARM)
314  __iso_volatile_store8((volatile char *)_Tgt, _Value);
315 
316  #else
317  *_Tgt = _Value;
318  #endif
319  }
320 
321 inline void _Store_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
322  { /* store _Value atomically with release memory order */
323  #if defined(_M_ARM)
324  _Memory_barrier();
325  __iso_volatile_store8((volatile char *)_Tgt, _Value);
326 
327  #else
329  *_Tgt = _Value;
330  #endif
331  }
332 
333 inline void _Store_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
334  { /* store _Value atomically with
335  sequentially consistent memory order */
336 
337  #if defined(_M_ARM)
338  _Memory_barrier();
339  __iso_volatile_store8((volatile char *)_Tgt, _Value);
340  _Memory_barrier();
341 
342  #else
343  _INTRIN_SEQ_CST(_InterlockedExchange8)((volatile char *)_Tgt, _Value);
344  #endif
345  }
346 
347 inline void _Atomic_store_1(
348  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
349  { /* store _Value atomically */
350  switch (_Order)
351  {
353  _Store_relaxed_1(_Tgt, _Value);
354  break;
355 
357  _Store_release_1(_Tgt, _Value);
358  break;
359 
361  _Store_seq_cst_1(_Tgt, _Value);
362  break;
363 
364  default:
366  break;
367  }
368  }
369 
370  /* _Atomic_load_1 */
371 inline _Uint1_t _Load_seq_cst_1(volatile _Uint1_t *_Tgt)
372  { /* load from *_Tgt atomically with
373  sequentially consistent memory order */
374  _Uint1_t _Value;
375 
376  #if defined(_M_ARM)
377  _Value = __iso_volatile_load8((volatile char *)_Tgt);
378  _Memory_barrier();
379 
380  #else
381  _Value = *_Tgt;
383  #endif
384 
385  return (_Value);
386  }
387 
388 inline _Uint1_t _Load_relaxed_1(volatile _Uint1_t *_Tgt)
389  { /* load from *_Tgt atomically with
390  relaxed memory order */
391  _Uint1_t _Value;
392 
393  #if defined(_M_ARM)
394  _Value = __iso_volatile_load8((volatile char *)_Tgt);
395 
396  #else
397  _Value = *_Tgt;
398  #endif
399 
400  return (_Value);
401  }
402 
403 inline _Uint1_t _Load_acquire_1(volatile _Uint1_t *_Tgt)
404  { /* load from *_Tgt atomically with
405  acquire memory order */
406 
407  return (_Load_seq_cst_1(_Tgt));
408  }
409 
410 inline _Uint1_t _Atomic_load_1(
411  volatile _Uint1_t *_Tgt, memory_order _Order)
412  { /* load from *_Tgt atomically */
413  switch (_Order)
414  {
416  return (_Load_relaxed_1(_Tgt));
417 
420  return (_Load_acquire_1(_Tgt));
421 
423  return (_Load_seq_cst_1(_Tgt));
424 
425  default:
427  return (0);
428  }
429  }
430 
431  /* _Atomic_exchange_1 */
432 inline _Uint1_t _Exchange_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
433  { /* exchange _Value and *_Tgt atomically with
434  sequentially consistent memory order */
435 
436  return (_INTRIN_SEQ_CST(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
437  }
438 
439 inline _Uint1_t _Exchange_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
440  { /* exchange _Value and *_Tgt atomically with
441  relaxed memory order */
442 
443  return (_INTRIN_RELAXED(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
444  }
445 
446 inline _Uint1_t _Exchange_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
447  { /* exchange _Value and *_Tgt atomically with
448  acquire memory order */
449 
450  return (_INTRIN_ACQUIRE(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
451  }
452 
453 inline _Uint1_t _Exchange_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
454  { /* exchange _Value and *_Tgt atomically with
455  release memory order */
456 
457  return (_INTRIN_RELEASE(_InterlockedExchange8)((volatile char *)_Tgt, _Value));
458  }
459 
460 inline _Uint1_t _Atomic_exchange_1(
461  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
462  { /* exchange _Value and *_Tgt atomically */
463  switch (_Order)
464  {
466  return (_Exchange_relaxed_1(_Tgt, _Value));
467 
470  return (_Exchange_acquire_1(_Tgt, _Value));
471 
473  return (_Exchange_release_1(_Tgt, _Value));
474 
477  return (_Exchange_seq_cst_1(_Tgt, _Value));
478 
479  default:
481  return (0);
482  }
483  }
484 
485  /* _Atomic_compare_exchange_weak_1, _Atomic_compare_exchange_strong_1 */
486 inline int _Compare_exchange_seq_cst_1(volatile _Uint1_t *_Tgt,
487  _Uint1_t *_Exp, _Uint1_t _Value)
488  { /* compare and exchange values atomically with
489  sequentially consistent memory order */
490 
491  int _Res;
492 
493  _Uint1_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange8)((volatile char *)_Tgt,
494  _Value, *_Exp);
495 
496  if (_Prev == *_Exp)
497  _Res = 1;
498  else
499  { /* copy old value */
500  _Res = 0;
501  *_Exp = _Prev;
502  }
503 
504  return (_Res);
505  }
506 
507 inline int _Compare_exchange_relaxed_1(volatile _Uint1_t *_Tgt,
508  _Uint1_t *_Exp, _Uint1_t _Value)
509  { /* compare and exchange values atomically with
510  relaxed memory order */
511  int _Res;
512 
513  _Uint1_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange8)((volatile char *)_Tgt,
514  _Value, *_Exp);
515 
516  if (_Prev == *_Exp)
517  _Res = 1;
518  else
519  { /* copy old value */
520  _Res = 0;
521  *_Exp = _Prev;
522  }
523 
524  return (_Res);
525  }
526 
527 inline int _Compare_exchange_acquire_1(volatile _Uint1_t *_Tgt,
528  _Uint1_t *_Exp, _Uint1_t _Value)
529  { /* compare and exchange values atomically with
530  acquire memory order */
531  int _Res;
532 
533  _Uint1_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange8)((volatile char *)_Tgt,
534  _Value, *_Exp);
535 
536  if (_Prev == *_Exp)
537  _Res = 1;
538  else
539  { /* copy old value */
540  _Res = 0;
541  *_Exp = _Prev;
542  }
543 
544  return (_Res);
545  }
546 
547 inline int _Compare_exchange_release_1(volatile _Uint1_t *_Tgt,
548  _Uint1_t *_Exp, _Uint1_t _Value)
549  { /* compare and exchange values atomically with
550  release memory order */
551  int _Res;
552 
553  _Uint1_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange8)((volatile char *)_Tgt,
554  _Value, *_Exp);
555 
556  if (_Prev == *_Exp)
557  _Res = 1;
558  else
559  { /* copy old value */
560  _Res = 0;
561  *_Exp = _Prev;
562  }
563 
564  return (_Res);
565  }
566 
568  volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value,
569  memory_order _Order1, memory_order _Order2)
570  { /* compare and exchange values atomically */
571  _Validate_compare_exchange_memory_order(_Order1, _Order2);
572 
573  switch (_Memory_order_upper_bound(_Order1, _Order2))
574  {
576  return (_Compare_exchange_relaxed_1(_Tgt, _Exp, _Value));
577 
580  return (_Compare_exchange_acquire_1(_Tgt, _Exp, _Value));
581 
583  return (_Compare_exchange_release_1(_Tgt, _Exp, _Value));
584 
587  return (_Compare_exchange_seq_cst_1(_Tgt, _Exp, _Value));
588 
589  default:
591  return (0);
592  }
593  }
594 
596  volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value,
597  memory_order _Order1, memory_order _Order2)
598  { /* compare and exchange values atomically */
599  /* No weak compare-exchange is currently available,
600  even for ARM, so fall back to strong */
601  return (_Atomic_compare_exchange_strong_1(_Tgt, _Exp, _Value,
602  _Order1, _Order2));
603  }
604 
605  /* _Atomic_fetch_add_1, _Atomic_fetch_sub_1 */
606 inline _Uint1_t _Fetch_add_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
607  { /* add _Value to *_Tgt atomically with
608  sequentially consistent memory order */
609 
610  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
611  }
612 
613 inline _Uint1_t _Fetch_add_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
614  { /* add _Value to *_Tgt atomically with
615  relaxed memory order */
616 
617  return (_INTRIN_RELAXED(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
618  }
619 
620 inline _Uint1_t _Fetch_add_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
621  { /* add _Value to *_Tgt atomically with
622  acquire memory order */
623 
624  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
625  }
626 
627 inline _Uint1_t _Fetch_add_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
628  { /* add _Value to *_Tgt atomically with
629  release memory order */
630 
631  return (_INTRIN_RELEASE(_InterlockedExchangeAdd8)((volatile char *)_Tgt, _Value));
632  }
633 
634 inline _Uint1_t _Atomic_fetch_add_1(
635  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
636  { /* add _Value to *_Tgt atomically */
637  switch (_Order)
638  {
640  return (_Fetch_add_relaxed_1(_Tgt, _Value));
641 
644  return (_Fetch_add_acquire_1(_Tgt, _Value));
645 
647  return (_Fetch_add_release_1(_Tgt, _Value));
648 
651  return (_Fetch_add_seq_cst_1(_Tgt, _Value));
652 
653  default:
655  return (0);
656  }
657  }
658 
659 inline _Uint1_t _Atomic_fetch_sub_1(
660  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
661  { /* subtract _Value from *_Tgt atomically */
662  return (_Atomic_fetch_add_1(_Tgt, 0 - _Value, _Order));
663  }
664 
665  /* _Atomic_fetch_and_1 */
666 inline _Uint1_t _Fetch_and_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
667  { /* and _Value with *_Tgt atomically with
668  sequentially consistent memory order */
669 
670  return (_INTRIN_SEQ_CST(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
671  }
672 
673 inline _Uint1_t _Fetch_and_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
674  { /* and _Value with *_Tgt atomically with
675  relaxed memory order */
676 
677  return (_INTRIN_RELAXED(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
678  }
679 
680 inline _Uint1_t _Fetch_and_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
681  { /* and _Value with *_Tgt atomically with
682  acquire memory order */
683 
684  return (_INTRIN_ACQUIRE(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
685  }
686 
687 inline _Uint1_t _Fetch_and_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
688  { /* and _Value with *_Tgt atomically with
689  release memory order */
690 
691  return (_INTRIN_RELEASE(_InterlockedAnd8)((volatile char *)_Tgt, _Value));
692  }
693 
694 inline _Uint1_t _Atomic_fetch_and_1(
695  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
696  { /* and _Value with *_Tgt atomically */
697  switch (_Order)
698  {
700  return (_Fetch_and_relaxed_1(_Tgt, _Value));
701 
704  return (_Fetch_and_acquire_1(_Tgt, _Value));
705 
707  return (_Fetch_and_release_1(_Tgt, _Value));
708 
711  return (_Fetch_and_seq_cst_1(_Tgt, _Value));
712 
713  default:
715  return (0);
716  }
717  }
718 
719  /* _Atomic_fetch_or_1 */
720 inline _Uint1_t _Fetch_or_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
721  { /* or _Value with *_Tgt atomically with
722  sequentially consistent memory order */
723 
724  return (_INTRIN_SEQ_CST(_InterlockedOr8)((volatile char *)_Tgt, _Value));
725  }
726 
727 inline _Uint1_t _Fetch_or_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
728  { /* or _Value with *_Tgt atomically with
729  relaxed memory order */
730 
731  return (_INTRIN_RELAXED(_InterlockedOr8)((volatile char *)_Tgt, _Value));
732  }
733 
734 inline _Uint1_t _Fetch_or_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
735  { /* or _Value with *_Tgt atomically with
736  acquire memory order */
737 
738  return (_INTRIN_ACQUIRE(_InterlockedOr8)((volatile char *)_Tgt, _Value));
739  }
740 
741 inline _Uint1_t _Fetch_or_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
742  { /* or _Value with *_Tgt atomically with
743  release memory order */
744 
745  return (_INTRIN_RELEASE(_InterlockedOr8)((volatile char *)_Tgt, _Value));
746  }
747 
748 inline _Uint1_t _Atomic_fetch_or_1(
749  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
750  { /* or _Value with *_Tgt atomically */
751  switch (_Order)
752  {
754  return (_Fetch_or_relaxed_1(_Tgt, _Value));
755 
758  return (_Fetch_or_acquire_1(_Tgt, _Value));
759 
761  return (_Fetch_or_release_1(_Tgt, _Value));
762 
765  return (_Fetch_or_seq_cst_1(_Tgt, _Value));
766 
767  default:
769  return (0);
770  }
771  }
772 
773  /* _Atomic_fetch_xor_1 */
774 inline _Uint1_t _Fetch_xor_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
775  { /* xor _Value with *_Tgt atomically with
776  sequentially consistent memory order */
777 
778  return (_INTRIN_SEQ_CST(_InterlockedXor8)((volatile char *)_Tgt, _Value));
779  }
780 
781 inline _Uint1_t _Fetch_xor_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
782  { /* xor _Value with *_Tgt atomically with
783  relaxed memory order */
784 
785  return (_INTRIN_RELAXED(_InterlockedXor8)((volatile char *)_Tgt, _Value));
786  }
787 
788 inline _Uint1_t _Fetch_xor_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
789  { /* xor _Value with *_Tgt atomically with
790  acquire memory order */
791 
792  return (_INTRIN_ACQUIRE(_InterlockedXor8)((volatile char *)_Tgt, _Value));
793  }
794 
795 inline _Uint1_t _Fetch_xor_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
796  { /* xor _Value with *_Tgt atomically with
797  release memory order */
798 
799  return (_INTRIN_RELEASE(_InterlockedXor8)((volatile char *)_Tgt, _Value));
800  }
801 
802 inline _Uint1_t _Atomic_fetch_xor_1(
803  volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
804  { /* xor _Value with *_Tgt atomically */
805  switch (_Order)
806  {
808  return (_Fetch_xor_relaxed_1(_Tgt, _Value));
809 
812  return (_Fetch_xor_acquire_1(_Tgt, _Value));
813 
815  return (_Fetch_xor_release_1(_Tgt, _Value));
816 
819  return (_Fetch_xor_seq_cst_1(_Tgt, _Value));
820 
821  default:
823  return (0);
824  }
825  }
826 
827  /* _Atomic_store_2 */
828 inline void _Store_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
829  { /* store _Value atomically with relaxed memory order */
830  #if defined(_M_ARM)
831  __iso_volatile_store16((volatile short *)_Tgt, _Value);
832 
833  #else
834  *_Tgt = _Value;
835  #endif
836  }
837 
838 inline void _Store_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
839  { /* store _Value atomically with release memory order */
840  #if defined(_M_ARM)
841  _Memory_barrier();
842  __iso_volatile_store16((volatile short *)_Tgt, _Value);
843 
844  #else
846  *_Tgt = _Value;
847  #endif
848  }
849 
850 inline void _Store_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
851  { /* store _Value atomically with
852  sequentially consistent memory order */
853 
854  #if defined(_M_ARM)
855  _Memory_barrier();
856  __iso_volatile_store16((volatile short *)_Tgt, _Value);
857  _Memory_barrier();
858 
859  #else
860  _INTRIN_SEQ_CST(_InterlockedExchange16)((volatile short *)_Tgt, _Value);
861  #endif
862  }
863 
864 inline void _Atomic_store_2(
865  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
866  { /* store _Value atomically */
867  switch (_Order)
868  {
870  _Store_relaxed_2(_Tgt, _Value);
871  break;
872 
874  _Store_release_2(_Tgt, _Value);
875  break;
876 
878  _Store_seq_cst_2(_Tgt, _Value);
879  break;
880 
881  default:
883  break;
884  }
885  }
886 
887  /* _Atomic_load_2 */
888 inline _Uint2_t _Load_seq_cst_2(volatile _Uint2_t *_Tgt)
889  { /* load from *_Tgt atomically with
890  sequentially consistent memory order */
891  _Uint2_t _Value;
892 
893  #if defined(_M_ARM)
894  _Value = __iso_volatile_load16((volatile short *)_Tgt);
895  _Memory_barrier();
896 
897  #else
898  _Value = *_Tgt;
900  #endif
901 
902  return (_Value);
903  }
904 
905 inline _Uint2_t _Load_relaxed_2(volatile _Uint2_t *_Tgt)
906  { /* load from *_Tgt atomically with
907  relaxed memory order */
908  _Uint2_t _Value;
909 
910  #if defined(_M_ARM)
911  _Value = __iso_volatile_load16((volatile short *)_Tgt);
912 
913  #else
914  _Value = *_Tgt;
915  #endif
916 
917  return (_Value);
918  }
919 
920 inline _Uint2_t _Load_acquire_2(volatile _Uint2_t *_Tgt)
921  { /* load from *_Tgt atomically with
922  acquire memory order */
923 
924  return (_Load_seq_cst_2(_Tgt));
925  }
926 
927 inline _Uint2_t _Atomic_load_2(
928  volatile _Uint2_t *_Tgt, memory_order _Order)
929  { /* load from *_Tgt atomically */
930  switch (_Order)
931  {
933  return (_Load_relaxed_2(_Tgt));
934 
937  return (_Load_acquire_2(_Tgt));
938 
940  return (_Load_seq_cst_2(_Tgt));
941 
942  default:
944  return (0);
945  }
946  }
947 
948  /* _Atomic_exchange_2 */
949 inline _Uint2_t _Exchange_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
950  { /* exchange _Value and *_Tgt atomically with
951  sequentially consistent memory order */
952 
953  return (_INTRIN_SEQ_CST(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
954  }
955 
956 inline _Uint2_t _Exchange_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
957  { /* exchange _Value and *_Tgt atomically with
958  relaxed memory order */
959 
960  return (_INTRIN_RELAXED(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
961  }
962 
963 inline _Uint2_t _Exchange_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
964  { /* exchange _Value and *_Tgt atomically with
965  acquire memory order */
966 
967  return (_INTRIN_ACQUIRE(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
968  }
969 
970 inline _Uint2_t _Exchange_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
971  { /* exchange _Value and *_Tgt atomically with
972  release memory order */
973 
974  return (_INTRIN_RELEASE(_InterlockedExchange16)((volatile short *)_Tgt, _Value));
975  }
976 
977 inline _Uint2_t _Atomic_exchange_2(
978  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
979  { /* exchange _Value and *_Tgt atomically */
980  switch (_Order)
981  {
983  return (_Exchange_relaxed_2(_Tgt, _Value));
984 
987  return (_Exchange_acquire_2(_Tgt, _Value));
988 
990  return (_Exchange_release_2(_Tgt, _Value));
991 
994  return (_Exchange_seq_cst_2(_Tgt, _Value));
995 
996  default:
998  return (0);
999  }
1000  }
1001 
1002  /* _Atomic_compare_exchange_weak_2, _Atomic_compare_exchange_strong_2 */
1003 inline int _Compare_exchange_seq_cst_2(volatile _Uint2_t *_Tgt,
1004  _Uint2_t *_Exp, _Uint2_t _Value)
1005  { /* compare and exchange values atomically with
1006  sequentially consistent memory order */
1007 
1008  int _Res;
1009 
1010  _Uint2_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange16)((volatile short *)_Tgt,
1011  _Value, *_Exp);
1012 
1013  if (_Prev == *_Exp)
1014  _Res = 1;
1015  else
1016  { /* copy old value */
1017  _Res = 0;
1018  *_Exp = _Prev;
1019  }
1020 
1021  return (_Res);
1022  }
1023 
1024 inline int _Compare_exchange_relaxed_2(volatile _Uint2_t *_Tgt,
1025  _Uint2_t *_Exp, _Uint2_t _Value)
1026  { /* compare and exchange values atomically with
1027  relaxed memory order */
1028  int _Res;
1029 
1030  _Uint2_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange16)((volatile short *)_Tgt,
1031  _Value, *_Exp);
1032 
1033  if (_Prev == *_Exp)
1034  _Res = 1;
1035  else
1036  { /* copy old value */
1037  _Res = 0;
1038  *_Exp = _Prev;
1039  }
1040 
1041  return (_Res);
1042  }
1043 
1044 inline int _Compare_exchange_acquire_2(volatile _Uint2_t *_Tgt,
1045  _Uint2_t *_Exp, _Uint2_t _Value)
1046  { /* compare and exchange values atomically with
1047  acquire memory order */
1048  int _Res;
1049 
1050  _Uint2_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange16)((volatile short *)_Tgt,
1051  _Value, *_Exp);
1052 
1053  if (_Prev == *_Exp)
1054  _Res = 1;
1055  else
1056  { /* copy old value */
1057  _Res = 0;
1058  *_Exp = _Prev;
1059  }
1060 
1061  return (_Res);
1062  }
1063 
1064 inline int _Compare_exchange_release_2(volatile _Uint2_t *_Tgt,
1065  _Uint2_t *_Exp, _Uint2_t _Value)
1066  { /* compare and exchange values atomically with
1067  release memory order */
1068  int _Res;
1069 
1070  _Uint2_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange16)((volatile short *)_Tgt,
1071  _Value, *_Exp);
1072 
1073  if (_Prev == *_Exp)
1074  _Res = 1;
1075  else
1076  { /* copy old value */
1077  _Res = 0;
1078  *_Exp = _Prev;
1079  }
1080 
1081  return (_Res);
1082  }
1083 
1085  volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value,
1086  memory_order _Order1, memory_order _Order2)
1087  { /* compare and exchange values atomically */
1088  _Validate_compare_exchange_memory_order(_Order1, _Order2);
1089 
1090  switch (_Memory_order_upper_bound(_Order1, _Order2))
1091  {
1092  case memory_order_relaxed:
1093  return (_Compare_exchange_relaxed_2(_Tgt, _Exp, _Value));
1094 
1095  case memory_order_consume:
1096  case memory_order_acquire:
1097  return (_Compare_exchange_acquire_2(_Tgt, _Exp, _Value));
1098 
1099  case memory_order_release:
1100  return (_Compare_exchange_release_2(_Tgt, _Exp, _Value));
1101 
1102  case memory_order_acq_rel:
1103  case memory_order_seq_cst:
1104  return (_Compare_exchange_seq_cst_2(_Tgt, _Exp, _Value));
1105 
1106  default:
1108  return (0);
1109  }
1110  }
1111 
1113  volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value,
1114  memory_order _Order1, memory_order _Order2)
1115  { /* compare and exchange values atomically */
1116  /* No weak compare-exchange is currently available,
1117  even for ARM, so fall back to strong */
1118  return (_Atomic_compare_exchange_strong_2(_Tgt, _Exp, _Value,
1119  _Order1, _Order2));
1120  }
1121 
1122  /* _Atomic_fetch_add_2, _Atomic_fetch_sub_2 */
1123 inline _Uint2_t _Fetch_add_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1124  { /* add _Value to *_Tgt atomically with
1125  sequentially consistent memory order */
1126 
1127  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1128  }
1129 
1130 inline _Uint2_t _Fetch_add_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1131  { /* add _Value to *_Tgt atomically with
1132  relaxed memory order */
1133 
1134  return (_INTRIN_RELAXED(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1135  }
1136 
1137 inline _Uint2_t _Fetch_add_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1138  { /* add _Value to *_Tgt atomically with
1139  acquire memory order */
1140 
1141  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1142  }
1143 
1144 inline _Uint2_t _Fetch_add_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1145  { /* add _Value to *_Tgt atomically with
1146  release memory order */
1147 
1148  return (_INTRIN_RELEASE(_InterlockedExchangeAdd16)((volatile short *)_Tgt, _Value));
1149  }
1150 
1151 inline _Uint2_t _Atomic_fetch_add_2(
1152  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
1153  { /* add _Value to *_Tgt atomically */
1154  switch (_Order)
1155  {
1156  case memory_order_relaxed:
1157  return (_Fetch_add_relaxed_2(_Tgt, _Value));
1158 
1159  case memory_order_consume:
1160  case memory_order_acquire:
1161  return (_Fetch_add_acquire_2(_Tgt, _Value));
1162 
1163  case memory_order_release:
1164  return (_Fetch_add_release_2(_Tgt, _Value));
1165 
1166  case memory_order_acq_rel:
1167  case memory_order_seq_cst:
1168  return (_Fetch_add_seq_cst_2(_Tgt, _Value));
1169 
1170  default:
1172  return (0);
1173  }
1174  }
1175 
1176 inline _Uint2_t _Atomic_fetch_sub_2(
1177  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
1178  { /* subtract _Value from *_Tgt atomically */
1179  return (_Atomic_fetch_add_2(_Tgt, 0 - _Value, _Order));
1180  }
1181 
1182  /* _Atomic_fetch_and_2 */
1183 inline _Uint2_t _Fetch_and_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1184  { /* and _Value with *_Tgt atomically with
1185  sequentially consistent memory order */
1186 
1187  return (_INTRIN_SEQ_CST(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1188  }
1189 
1190 inline _Uint2_t _Fetch_and_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1191  { /* and _Value with *_Tgt atomically with
1192  relaxed memory order */
1193 
1194  return (_INTRIN_RELAXED(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1195  }
1196 
1197 inline _Uint2_t _Fetch_and_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1198  { /* and _Value with *_Tgt atomically with
1199  acquire memory order */
1200 
1201  return (_INTRIN_ACQUIRE(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1202  }
1203 
1204 inline _Uint2_t _Fetch_and_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1205  { /* and _Value with *_Tgt atomically with
1206  release memory order */
1207 
1208  return (_INTRIN_RELEASE(_InterlockedAnd16)((volatile short *)_Tgt, _Value));
1209  }
1210 
1211 inline _Uint2_t _Atomic_fetch_and_2(
1212  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
1213  { /* and _Value with *_Tgt atomically */
1214  switch (_Order)
1215  {
1216  case memory_order_relaxed:
1217  return (_Fetch_and_relaxed_2(_Tgt, _Value));
1218 
1219  case memory_order_consume:
1220  case memory_order_acquire:
1221  return (_Fetch_and_acquire_2(_Tgt, _Value));
1222 
1223  case memory_order_release:
1224  return (_Fetch_and_release_2(_Tgt, _Value));
1225 
1226  case memory_order_acq_rel:
1227  case memory_order_seq_cst:
1228  return (_Fetch_and_seq_cst_2(_Tgt, _Value));
1229 
1230  default:
1232  return (0);
1233  }
1234  }
1235 
1236  /* _Atomic_fetch_or_2 */
1237 inline _Uint2_t _Fetch_or_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1238  { /* or _Value with *_Tgt atomically with
1239  sequentially consistent memory order */
1240 
1241  return (_INTRIN_SEQ_CST(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1242  }
1243 
1244 inline _Uint2_t _Fetch_or_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1245  { /* or _Value with *_Tgt atomically with
1246  relaxed memory order */
1247 
1248  return (_INTRIN_RELAXED(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1249  }
1250 
1251 inline _Uint2_t _Fetch_or_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1252  { /* or _Value with *_Tgt atomically with
1253  acquire memory order */
1254 
1255  return (_INTRIN_ACQUIRE(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1256  }
1257 
1258 inline _Uint2_t _Fetch_or_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1259  { /* or _Value with *_Tgt atomically with
1260  release memory order */
1261 
1262  return (_INTRIN_RELEASE(_InterlockedOr16)((volatile short *)_Tgt, _Value));
1263  }
1264 
1265 inline _Uint2_t _Atomic_fetch_or_2(
1266  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
1267  { /* or _Value with *_Tgt atomically */
1268  switch (_Order)
1269  {
1270  case memory_order_relaxed:
1271  return (_Fetch_or_relaxed_2(_Tgt, _Value));
1272 
1273  case memory_order_consume:
1274  case memory_order_acquire:
1275  return (_Fetch_or_acquire_2(_Tgt, _Value));
1276 
1277  case memory_order_release:
1278  return (_Fetch_or_release_2(_Tgt, _Value));
1279 
1280  case memory_order_acq_rel:
1281  case memory_order_seq_cst:
1282  return (_Fetch_or_seq_cst_2(_Tgt, _Value));
1283 
1284  default:
1286  return (0);
1287  }
1288  }
1289 
1290  /* _Atomic_fetch_xor_2 */
1291 inline _Uint2_t _Fetch_xor_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1292  { /* xor _Value with *_Tgt atomically with
1293  sequentially consistent memory order */
1294 
1295  return (_INTRIN_SEQ_CST(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1296  }
1297 
1298 inline _Uint2_t _Fetch_xor_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1299  { /* xor _Value with *_Tgt atomically with
1300  relaxed memory order */
1301 
1302  return (_INTRIN_RELAXED(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1303  }
1304 
1305 inline _Uint2_t _Fetch_xor_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1306  { /* xor _Value with *_Tgt atomically with
1307  acquire memory order */
1308 
1309  return (_INTRIN_ACQUIRE(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1310  }
1311 
1312 inline _Uint2_t _Fetch_xor_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
1313  { /* xor _Value with *_Tgt atomically with
1314  release memory order */
1315 
1316  return (_INTRIN_RELEASE(_InterlockedXor16)((volatile short *)_Tgt, _Value));
1317  }
1318 
1319 inline _Uint2_t _Atomic_fetch_xor_2(
1320  volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
1321  { /* xor _Value with *_Tgt atomically */
1322  switch (_Order)
1323  {
1324  case memory_order_relaxed:
1325  return (_Fetch_xor_relaxed_2(_Tgt, _Value));
1326 
1327  case memory_order_consume:
1328  case memory_order_acquire:
1329  return (_Fetch_xor_acquire_2(_Tgt, _Value));
1330 
1331  case memory_order_release:
1332  return (_Fetch_xor_release_2(_Tgt, _Value));
1333 
1334  case memory_order_acq_rel:
1335  case memory_order_seq_cst:
1336  return (_Fetch_xor_seq_cst_2(_Tgt, _Value));
1337 
1338  default:
1340  return (0);
1341  }
1342  }
1343 
1344  /* _Atomic_store_4 */
1345 inline void _Store_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1346  { /* store _Value atomically with relaxed memory order */
1347  #if defined(_M_ARM)
1348  __iso_volatile_store32((volatile int *)_Tgt, _Value);
1349 
1350  #else
1351  *_Tgt = _Value;
1352  #endif
1353  }
1354 
1355 inline void _Store_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1356  { /* store _Value atomically with release memory order */
1357  #if defined(_M_ARM)
1358  _Memory_barrier();
1359  __iso_volatile_store32((volatile int *)_Tgt, _Value);
1360 
1361  #else
1363  *_Tgt = _Value;
1364  #endif
1365  }
1366 
1367 inline void _Store_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1368  { /* store _Value atomically with
1369  sequentially consistent memory order */
1370 
1371  #if defined(_M_ARM)
1372  _Memory_barrier();
1373  __iso_volatile_store32((volatile int *)_Tgt, _Value);
1374  _Memory_barrier();
1375 
1376  #else
1377  _INTRIN_SEQ_CST(_InterlockedExchange)((volatile long *)_Tgt, _Value);
1378  #endif
1379  }
1380 
1381 inline void _Atomic_store_4(
1382  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1383  { /* store _Value atomically */
1384  switch (_Order)
1385  {
1386  case memory_order_relaxed:
1387  _Store_relaxed_4(_Tgt, _Value);
1388  break;
1389 
1390  case memory_order_release:
1391  _Store_release_4(_Tgt, _Value);
1392  break;
1393 
1394  case memory_order_seq_cst:
1395  _Store_seq_cst_4(_Tgt, _Value);
1396  break;
1397 
1398  default:
1400  break;
1401  }
1402  }
1403 
1404  /* _Atomic_load_4 */
1405 inline _Uint4_t _Load_seq_cst_4(volatile _Uint4_t *_Tgt)
1406  { /* load from *_Tgt atomically with
1407  sequentially consistent memory order */
1408  _Uint4_t _Value;
1409 
1410  #if defined(_M_ARM)
1411  _Value = __iso_volatile_load32((volatile int *)_Tgt);
1412  _Memory_barrier();
1413 
1414  #else
1415  _Value = *_Tgt;
1417  #endif
1418 
1419  return (_Value);
1420  }
1421 
1422 inline _Uint4_t _Load_relaxed_4(volatile _Uint4_t *_Tgt)
1423  { /* load from *_Tgt atomically with
1424  relaxed memory order */
1425  _Uint4_t _Value;
1426 
1427  #if defined(_M_ARM)
1428  _Value = __iso_volatile_load32((volatile int *)_Tgt);
1429 
1430  #else
1431  _Value = *_Tgt;
1432  #endif
1433 
1434  return (_Value);
1435  }
1436 
1437 inline _Uint4_t _Load_acquire_4(volatile _Uint4_t *_Tgt)
1438  { /* load from *_Tgt atomically with
1439  acquire memory order */
1440 
1441  return (_Load_seq_cst_4(_Tgt));
1442  }
1443 
1445  volatile _Uint4_t *_Tgt, memory_order _Order)
1446  { /* load from *_Tgt atomically */
1447  switch (_Order)
1448  {
1449  case memory_order_relaxed:
1450  return (_Load_relaxed_4(_Tgt));
1451 
1452  case memory_order_consume:
1453  case memory_order_acquire:
1454  return (_Load_acquire_4(_Tgt));
1455 
1456  case memory_order_seq_cst:
1457  return (_Load_seq_cst_4(_Tgt));
1458 
1459  default:
1461  return (0);
1462  }
1463  }
1464 
1465  /* _Atomic_exchange_4 */
1466 inline _Uint4_t _Exchange_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1467  { /* exchange _Value and *_Tgt atomically with
1468  sequentially consistent memory order */
1469 
1470  return (_INTRIN_SEQ_CST(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1471  }
1472 
1473 inline _Uint4_t _Exchange_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1474  { /* exchange _Value and *_Tgt atomically with
1475  relaxed memory order */
1476 
1477  return (_INTRIN_RELAXED(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1478  }
1479 
1480 inline _Uint4_t _Exchange_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1481  { /* exchange _Value and *_Tgt atomically with
1482  acquire memory order */
1483 
1484  return (_INTRIN_ACQUIRE(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1485  }
1486 
1487 inline _Uint4_t _Exchange_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1488  { /* exchange _Value and *_Tgt atomically with
1489  release memory order */
1490 
1491  return (_INTRIN_RELEASE(_InterlockedExchange)((volatile long *)_Tgt, _Value));
1492  }
1493 
1495  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1496  { /* exchange _Value and *_Tgt atomically */
1497  switch (_Order)
1498  {
1499  case memory_order_relaxed:
1500  return (_Exchange_relaxed_4(_Tgt, _Value));
1501 
1502  case memory_order_consume:
1503  case memory_order_acquire:
1504  return (_Exchange_acquire_4(_Tgt, _Value));
1505 
1506  case memory_order_release:
1507  return (_Exchange_release_4(_Tgt, _Value));
1508 
1509  case memory_order_acq_rel:
1510  case memory_order_seq_cst:
1511  return (_Exchange_seq_cst_4(_Tgt, _Value));
1512 
1513  default:
1515  return (0);
1516  }
1517  }
1518 
1519  /* _Atomic_compare_exchange_weak_4, _Atomic_compare_exchange_strong_4 */
1520 inline int _Compare_exchange_seq_cst_4(volatile _Uint4_t *_Tgt,
1521  _Uint4_t *_Exp, _Uint4_t _Value)
1522  { /* compare and exchange values atomically with
1523  sequentially consistent memory order */
1524 
1525  int _Res;
1526 
1527  _Uint4_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange)((volatile long *)_Tgt,
1528  _Value, *_Exp);
1529 
1530  if (_Prev == *_Exp)
1531  _Res = 1;
1532  else
1533  { /* copy old value */
1534  _Res = 0;
1535  *_Exp = _Prev;
1536  }
1537 
1538  return (_Res);
1539  }
1540 
1541 inline int _Compare_exchange_relaxed_4(volatile _Uint4_t *_Tgt,
1542  _Uint4_t *_Exp, _Uint4_t _Value)
1543  { /* compare and exchange values atomically with
1544  relaxed memory order */
1545  int _Res;
1546 
1547  _Uint4_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange)((volatile long *)_Tgt,
1548  _Value, *_Exp);
1549 
1550  if (_Prev == *_Exp)
1551  _Res = 1;
1552  else
1553  { /* copy old value */
1554  _Res = 0;
1555  *_Exp = _Prev;
1556  }
1557 
1558  return (_Res);
1559  }
1560 
1561 inline int _Compare_exchange_acquire_4(volatile _Uint4_t *_Tgt,
1562  _Uint4_t *_Exp, _Uint4_t _Value)
1563  { /* compare and exchange values atomically with
1564  acquire memory order */
1565  int _Res;
1566 
1567  _Uint4_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange)((volatile long *)_Tgt,
1568  _Value, *_Exp);
1569 
1570  if (_Prev == *_Exp)
1571  _Res = 1;
1572  else
1573  { /* copy old value */
1574  _Res = 0;
1575  *_Exp = _Prev;
1576  }
1577 
1578  return (_Res);
1579  }
1580 
1581 inline int _Compare_exchange_release_4(volatile _Uint4_t *_Tgt,
1582  _Uint4_t *_Exp, _Uint4_t _Value)
1583  { /* compare and exchange values atomically with
1584  release memory order */
1585  int _Res;
1586 
1587  _Uint4_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange)((volatile long *)_Tgt,
1588  _Value, *_Exp);
1589 
1590  if (_Prev == *_Exp)
1591  _Res = 1;
1592  else
1593  { /* copy old value */
1594  _Res = 0;
1595  *_Exp = _Prev;
1596  }
1597 
1598  return (_Res);
1599  }
1600 
1602  volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value,
1603  memory_order _Order1, memory_order _Order2)
1604  { /* compare and exchange values atomically */
1605  _Validate_compare_exchange_memory_order(_Order1, _Order2);
1606 
1607  switch (_Memory_order_upper_bound(_Order1, _Order2))
1608  {
1609  case memory_order_relaxed:
1610  return (_Compare_exchange_relaxed_4(_Tgt, _Exp, _Value));
1611 
1612  case memory_order_consume:
1613  case memory_order_acquire:
1614  return (_Compare_exchange_acquire_4(_Tgt, _Exp, _Value));
1615 
1616  case memory_order_release:
1617  return (_Compare_exchange_release_4(_Tgt, _Exp, _Value));
1618 
1619  case memory_order_acq_rel:
1620  case memory_order_seq_cst:
1621  return (_Compare_exchange_seq_cst_4(_Tgt, _Exp, _Value));
1622 
1623  default:
1625  return (0);
1626  }
1627  }
1628 
1630  volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value,
1631  memory_order _Order1, memory_order _Order2)
1632  { /* compare and exchange values atomically */
1633  /* No weak compare-exchange is currently available,
1634  even for ARM, so fall back to strong */
1635  return (_Atomic_compare_exchange_strong_4(_Tgt, _Exp, _Value,
1636  _Order1, _Order2));
1637  }
1638 
1639  /* _Atomic_fetch_add_4, _Atomic_fetch_sub_4 */
1640 inline _Uint4_t _Fetch_add_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1641  { /* add _Value to *_Tgt atomically with
1642  sequentially consistent memory order */
1643 
1644  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1645  }
1646 
1647 inline _Uint4_t _Fetch_add_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1648  { /* add _Value to *_Tgt atomically with
1649  relaxed memory order */
1650 
1651  return (_INTRIN_RELAXED(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1652  }
1653 
1654 inline _Uint4_t _Fetch_add_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1655  { /* add _Value to *_Tgt atomically with
1656  acquire memory order */
1657 
1658  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1659  }
1660 
1661 inline _Uint4_t _Fetch_add_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1662  { /* add _Value to *_Tgt atomically with
1663  release memory order */
1664 
1665  return (_INTRIN_RELEASE(_InterlockedExchangeAdd)((volatile long *)_Tgt, _Value));
1666  }
1667 
1669  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1670  { /* add _Value to *_Tgt atomically */
1671  switch (_Order)
1672  {
1673  case memory_order_relaxed:
1674  return (_Fetch_add_relaxed_4(_Tgt, _Value));
1675 
1676  case memory_order_consume:
1677  case memory_order_acquire:
1678  return (_Fetch_add_acquire_4(_Tgt, _Value));
1679 
1680  case memory_order_release:
1681  return (_Fetch_add_release_4(_Tgt, _Value));
1682 
1683  case memory_order_acq_rel:
1684  case memory_order_seq_cst:
1685  return (_Fetch_add_seq_cst_4(_Tgt, _Value));
1686 
1687  default:
1689  return (0);
1690  }
1691  }
1692 
1694  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1695  { /* subtract _Value from *_Tgt atomically */
1696  return (_Atomic_fetch_add_4(_Tgt, 0 - _Value, _Order));
1697  }
1698 
1699  /* _Atomic_fetch_and_4 */
1700 inline _Uint4_t _Fetch_and_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1701  { /* and _Value with *_Tgt atomically with
1702  sequentially consistent memory order */
1703 
1704  return (_INTRIN_SEQ_CST(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1705  }
1706 
1707 inline _Uint4_t _Fetch_and_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1708  { /* and _Value with *_Tgt atomically with
1709  relaxed memory order */
1710 
1711  return (_INTRIN_RELAXED(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1712  }
1713 
1714 inline _Uint4_t _Fetch_and_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1715  { /* and _Value with *_Tgt atomically with
1716  acquire memory order */
1717 
1718  return (_INTRIN_ACQUIRE(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1719  }
1720 
1721 inline _Uint4_t _Fetch_and_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1722  { /* and _Value with *_Tgt atomically with
1723  release memory order */
1724 
1725  return (_INTRIN_RELEASE(_InterlockedAnd)((volatile long *)_Tgt, _Value));
1726  }
1727 
1729  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1730  { /* and _Value with *_Tgt atomically */
1731  switch (_Order)
1732  {
1733  case memory_order_relaxed:
1734  return (_Fetch_and_relaxed_4(_Tgt, _Value));
1735 
1736  case memory_order_consume:
1737  case memory_order_acquire:
1738  return (_Fetch_and_acquire_4(_Tgt, _Value));
1739 
1740  case memory_order_release:
1741  return (_Fetch_and_release_4(_Tgt, _Value));
1742 
1743  case memory_order_acq_rel:
1744  case memory_order_seq_cst:
1745  return (_Fetch_and_seq_cst_4(_Tgt, _Value));
1746 
1747  default:
1749  return (0);
1750  }
1751  }
1752 
1753  /* _Atomic_fetch_or_4 */
1754 inline _Uint4_t _Fetch_or_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1755  { /* or _Value with *_Tgt atomically with
1756  sequentially consistent memory order */
1757 
1758  return (_INTRIN_SEQ_CST(_InterlockedOr)((volatile long *)_Tgt, _Value));
1759  }
1760 
1761 inline _Uint4_t _Fetch_or_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1762  { /* or _Value with *_Tgt atomically with
1763  relaxed memory order */
1764 
1765  return (_INTRIN_RELAXED(_InterlockedOr)((volatile long *)_Tgt, _Value));
1766  }
1767 
1768 inline _Uint4_t _Fetch_or_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1769  { /* or _Value with *_Tgt atomically with
1770  acquire memory order */
1771 
1772  return (_INTRIN_ACQUIRE(_InterlockedOr)((volatile long *)_Tgt, _Value));
1773  }
1774 
1775 inline _Uint4_t _Fetch_or_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1776  { /* or _Value with *_Tgt atomically with
1777  release memory order */
1778 
1779  return (_INTRIN_RELEASE(_InterlockedOr)((volatile long *)_Tgt, _Value));
1780  }
1781 
1783  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1784  { /* or _Value with *_Tgt atomically */
1785  switch (_Order)
1786  {
1787  case memory_order_relaxed:
1788  return (_Fetch_or_relaxed_4(_Tgt, _Value));
1789 
1790  case memory_order_consume:
1791  case memory_order_acquire:
1792  return (_Fetch_or_acquire_4(_Tgt, _Value));
1793 
1794  case memory_order_release:
1795  return (_Fetch_or_release_4(_Tgt, _Value));
1796 
1797  case memory_order_acq_rel:
1798  case memory_order_seq_cst:
1799  return (_Fetch_or_seq_cst_4(_Tgt, _Value));
1800 
1801  default:
1803  return (0);
1804  }
1805  }
1806 
1807  /* _Atomic_fetch_xor_4 */
1808 inline _Uint4_t _Fetch_xor_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1809  { /* xor _Value with *_Tgt atomically with
1810  sequentially consistent memory order */
1811 
1812  return (_INTRIN_SEQ_CST(_InterlockedXor)((volatile long *)_Tgt, _Value));
1813  }
1814 
1815 inline _Uint4_t _Fetch_xor_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1816  { /* xor _Value with *_Tgt atomically with
1817  relaxed memory order */
1818 
1819  return (_INTRIN_RELAXED(_InterlockedXor)((volatile long *)_Tgt, _Value));
1820  }
1821 
1822 inline _Uint4_t _Fetch_xor_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1823  { /* xor _Value with *_Tgt atomically with
1824  acquire memory order */
1825 
1826  return (_INTRIN_ACQUIRE(_InterlockedXor)((volatile long *)_Tgt, _Value));
1827  }
1828 
1829 inline _Uint4_t _Fetch_xor_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
1830  { /* xor _Value with *_Tgt atomically with
1831  release memory order */
1832 
1833  return (_INTRIN_RELEASE(_InterlockedXor)((volatile long *)_Tgt, _Value));
1834  }
1835 
1837  volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
1838  { /* xor _Value with *_Tgt atomically */
1839  switch (_Order)
1840  {
1841  case memory_order_relaxed:
1842  return (_Fetch_xor_relaxed_4(_Tgt, _Value));
1843 
1844  case memory_order_consume:
1845  case memory_order_acquire:
1846  return (_Fetch_xor_acquire_4(_Tgt, _Value));
1847 
1848  case memory_order_release:
1849  return (_Fetch_xor_release_4(_Tgt, _Value));
1850 
1851  case memory_order_acq_rel:
1852  case memory_order_seq_cst:
1853  return (_Fetch_xor_seq_cst_4(_Tgt, _Value));
1854 
1855  default:
1857  return (0);
1858  }
1859  }
1860 
1861  /* _Atomic_store_8 */
1862 inline void _Store_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1863  { /* store _Value atomically with relaxed memory order */
1864  #if _MS_64
1865  *_Tgt = _Value;
1866 
1867  #else
1868  _INTRIN_RELAXED(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value);
1869  #endif
1870  }
1871 
1872 inline void _Store_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1873  { /* store _Value atomically with release memory order */
1874  #if _MS_64
1876  *_Tgt = _Value;
1877 
1878  #else
1879  _INTRIN_RELEASE(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value);
1880  #endif
1881  }
1882 
1883 inline void _Store_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1884  { /* store _Value atomically with
1885  sequentially consistent memory order */
1886  _INTRIN_SEQ_CST(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value);
1887  }
1888 
1889 inline void _Atomic_store_8(
1890  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
1891  { /* store _Value atomically */
1892  switch (_Order)
1893  {
1894  case memory_order_relaxed:
1895  _Store_relaxed_8(_Tgt, _Value);
1896  break;
1897 
1898  case memory_order_release:
1899  _Store_release_8(_Tgt, _Value);
1900  break;
1901 
1902  case memory_order_seq_cst:
1903  _Store_seq_cst_8(_Tgt, _Value);
1904  break;
1905 
1906  default:
1908  break;
1909  }
1910  }
1911 
1912  /* _Atomic_load_8 */
1913 inline _Uint8_t _Load_seq_cst_8(volatile _Uint8_t *_Tgt)
1914  { /* load from *_Tgt atomically with
1915  sequentially consistent memory order */
1916  _Uint8_t _Value;
1917 
1918  #if _MS_64
1919  _Value = *_Tgt;
1921 
1922  #elif defined(_M_ARM)
1923  _Value = __ldrexd((volatile _LONGLONG *)_Tgt);
1924  _Memory_barrier();
1925 
1926  #else
1927  _Value = _InterlockedOr64((volatile _LONGLONG *)_Tgt, 0);
1928  #endif
1929 
1930  return (_Value);
1931  }
1932 
1933 inline _Uint8_t _Load_relaxed_8(volatile _Uint8_t *_Tgt)
1934  { /* load from *_Tgt atomically with
1935  relaxed memory order */
1936  _Uint8_t _Value;
1937 
1938  #if _MS_64
1939  _Value = *_Tgt;
1940 
1941  #elif defined(_M_ARM)
1942  _Value = __ldrexd((volatile _LONGLONG *)_Tgt);
1943 
1944  #else
1945  _Value = _InterlockedOr64((volatile _LONGLONG *)_Tgt, 0);
1946  #endif
1947 
1948  return (_Value);
1949  }
1950 
1951 inline _Uint8_t _Load_acquire_8(volatile _Uint8_t *_Tgt)
1952  { /* load from *_Tgt atomically with
1953  acquire memory order */
1954 
1955  return (_Load_seq_cst_8(_Tgt));
1956  }
1957 
1958 inline _Uint8_t _Atomic_load_8(
1959  volatile _Uint8_t *_Tgt, memory_order _Order)
1960  { /* load from *_Tgt atomically */
1961  switch (_Order)
1962  {
1963  case memory_order_relaxed:
1964  return (_Load_relaxed_8(_Tgt));
1965 
1966  case memory_order_consume:
1967  case memory_order_acquire:
1968  return (_Load_acquire_8(_Tgt));
1969 
1970  case memory_order_seq_cst:
1971  return (_Load_seq_cst_8(_Tgt));
1972 
1973  default:
1975  return (0);
1976  }
1977  }
1978 
1979  /* _Atomic_exchange_8 */
1980 inline _Uint8_t _Exchange_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1981  { /* exchange _Value and *_Tgt atomically with
1982  sequentially consistent memory order */
1983 
1984  return (_INTRIN_SEQ_CST(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value));
1985  }
1986 
1987 inline _Uint8_t _Exchange_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1988  { /* exchange _Value and *_Tgt atomically with
1989  relaxed memory order */
1990 
1991  return (_INTRIN_RELAXED(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value));
1992  }
1993 
1994 inline _Uint8_t _Exchange_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
1995  { /* exchange _Value and *_Tgt atomically with
1996  acquire memory order */
1997 
1998  return (_INTRIN_ACQUIRE(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value));
1999  }
2000 
2001 inline _Uint8_t _Exchange_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2002  { /* exchange _Value and *_Tgt atomically with
2003  release memory order */
2004 
2005  return (_INTRIN_RELEASE(_InterlockedExchange64)((volatile _LONGLONG *)_Tgt, _Value));
2006  }
2007 
2008 inline _Uint8_t _Atomic_exchange_8(
2009  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
2010  { /* exchange _Value and *_Tgt atomically */
2011  switch (_Order)
2012  {
2013  case memory_order_relaxed:
2014  return (_Exchange_relaxed_8(_Tgt, _Value));
2015 
2016  case memory_order_consume:
2017  case memory_order_acquire:
2018  return (_Exchange_acquire_8(_Tgt, _Value));
2019 
2020  case memory_order_release:
2021  return (_Exchange_release_8(_Tgt, _Value));
2022 
2023  case memory_order_acq_rel:
2024  case memory_order_seq_cst:
2025  return (_Exchange_seq_cst_8(_Tgt, _Value));
2026 
2027  default:
2029  return (0);
2030  }
2031  }
2032 
2033  /* _Atomic_compare_exchange_weak_8, _Atomic_compare_exchange_strong_8 */
2034 inline int _Compare_exchange_seq_cst_8(volatile _Uint8_t *_Tgt,
2035  _Uint8_t *_Exp, _Uint8_t _Value)
2036  { /* compare and exchange values atomically with
2037  sequentially consistent memory order */
2038 
2039  int _Res;
2040 
2041  _Uint8_t _Prev = _INTRIN_SEQ_CST(_InterlockedCompareExchange64)((volatile _LONGLONG *)_Tgt,
2042  _Value, *_Exp);
2043 
2044  if (_Prev == *_Exp)
2045  _Res = 1;
2046  else
2047  { /* copy old value */
2048  _Res = 0;
2049  *_Exp = _Prev;
2050  }
2051 
2052  return (_Res);
2053  }
2054 
2055 inline int _Compare_exchange_relaxed_8(volatile _Uint8_t *_Tgt,
2056  _Uint8_t *_Exp, _Uint8_t _Value)
2057  { /* compare and exchange values atomically with
2058  relaxed memory order */
2059  int _Res;
2060 
2061  _Uint8_t _Prev = _INTRIN_RELAXED(_InterlockedCompareExchange64)((volatile _LONGLONG *)_Tgt,
2062  _Value, *_Exp);
2063 
2064  if (_Prev == *_Exp)
2065  _Res = 1;
2066  else
2067  { /* copy old value */
2068  _Res = 0;
2069  *_Exp = _Prev;
2070  }
2071 
2072  return (_Res);
2073  }
2074 
2075 inline int _Compare_exchange_acquire_8(volatile _Uint8_t *_Tgt,
2076  _Uint8_t *_Exp, _Uint8_t _Value)
2077  { /* compare and exchange values atomically with
2078  acquire memory order */
2079  int _Res;
2080 
2081  _Uint8_t _Prev = _INTRIN_ACQUIRE(_InterlockedCompareExchange64)((volatile _LONGLONG *)_Tgt,
2082  _Value, *_Exp);
2083 
2084  if (_Prev == *_Exp)
2085  _Res = 1;
2086  else
2087  { /* copy old value */
2088  _Res = 0;
2089  *_Exp = _Prev;
2090  }
2091 
2092  return (_Res);
2093  }
2094 
2095 inline int _Compare_exchange_release_8(volatile _Uint8_t *_Tgt,
2096  _Uint8_t *_Exp, _Uint8_t _Value)
2097  { /* compare and exchange values atomically with
2098  release memory order */
2099  int _Res;
2100 
2101  _Uint8_t _Prev = _INTRIN_RELEASE(_InterlockedCompareExchange64)((volatile _LONGLONG *)_Tgt,
2102  _Value, *_Exp);
2103 
2104  if (_Prev == *_Exp)
2105  _Res = 1;
2106  else
2107  { /* copy old value */
2108  _Res = 0;
2109  *_Exp = _Prev;
2110  }
2111 
2112  return (_Res);
2113  }
2114 
2116  volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value,
2117  memory_order _Order1, memory_order _Order2)
2118  { /* compare and exchange values atomically */
2119  _Validate_compare_exchange_memory_order(_Order1, _Order2);
2120 
2121  switch (_Memory_order_upper_bound(_Order1, _Order2))
2122  {
2123  case memory_order_relaxed:
2124  return (_Compare_exchange_relaxed_8(_Tgt, _Exp, _Value));
2125 
2126  case memory_order_consume:
2127  case memory_order_acquire:
2128  return (_Compare_exchange_acquire_8(_Tgt, _Exp, _Value));
2129 
2130  case memory_order_release:
2131  return (_Compare_exchange_release_8(_Tgt, _Exp, _Value));
2132 
2133  case memory_order_acq_rel:
2134  case memory_order_seq_cst:
2135  return (_Compare_exchange_seq_cst_8(_Tgt, _Exp, _Value));
2136 
2137  default:
2139  return (0);
2140  }
2141  }
2142 
2144  volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value,
2145  memory_order _Order1, memory_order _Order2)
2146  { /* compare and exchange values atomically */
2147  /* No weak compare-exchange is currently available,
2148  even for ARM, so fall back to strong */
2149  return (_Atomic_compare_exchange_strong_8(_Tgt, _Exp, _Value,
2150  _Order1, _Order2));
2151  }
2152 
2153  /* _Atomic_fetch_add_8, _Atomic_fetch_sub_8 */
2154 inline _Uint8_t _Fetch_add_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2155  { /* add _Value to *_Tgt atomically with
2156  sequentially consistent memory order */
2157 
2158  return (_INTRIN_SEQ_CST(_InterlockedExchangeAdd64)((volatile _LONGLONG *)_Tgt, _Value));
2159  }
2160 
2161 inline _Uint8_t _Fetch_add_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2162  { /* add _Value to *_Tgt atomically with
2163  relaxed memory order */
2164 
2165  return (_INTRIN_RELAXED(_InterlockedExchangeAdd64)((volatile _LONGLONG *)_Tgt, _Value));
2166  }
2167 
2168 inline _Uint8_t _Fetch_add_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2169  { /* add _Value to *_Tgt atomically with
2170  acquire memory order */
2171 
2172  return (_INTRIN_ACQUIRE(_InterlockedExchangeAdd64)((volatile _LONGLONG *)_Tgt, _Value));
2173  }
2174 
2175 inline _Uint8_t _Fetch_add_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2176  { /* add _Value to *_Tgt atomically with
2177  release memory order */
2178 
2179  return (_INTRIN_RELEASE(_InterlockedExchangeAdd64)((volatile _LONGLONG *)_Tgt, _Value));
2180  }
2181 
2182 inline _Uint8_t _Atomic_fetch_add_8(
2183  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
2184  { /* add _Value to *_Tgt atomically */
2185  switch (_Order)
2186  {
2187  case memory_order_relaxed:
2188  return (_Fetch_add_relaxed_8(_Tgt, _Value));
2189 
2190  case memory_order_consume:
2191  case memory_order_acquire:
2192  return (_Fetch_add_acquire_8(_Tgt, _Value));
2193 
2194  case memory_order_release:
2195  return (_Fetch_add_release_8(_Tgt, _Value));
2196 
2197  case memory_order_acq_rel:
2198  case memory_order_seq_cst:
2199  return (_Fetch_add_seq_cst_8(_Tgt, _Value));
2200 
2201  default:
2203  return (0);
2204  }
2205  }
2206 
2207 inline _Uint8_t _Atomic_fetch_sub_8(
2208  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
2209  { /* subtract _Value from *_Tgt atomically */
2210  return (_Atomic_fetch_add_8(_Tgt, 0 - _Value, _Order));
2211  }
2212 
2213  /* _Atomic_fetch_and_8 */
2214 inline _Uint8_t _Fetch_and_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2215  { /* and _Value with *_Tgt atomically with
2216  sequentially consistent memory order */
2217 
2218  return (_INTRIN_SEQ_CST(_InterlockedAnd64)((volatile _LONGLONG *)_Tgt, _Value));
2219  }
2220 
2221 inline _Uint8_t _Fetch_and_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2222  { /* and _Value with *_Tgt atomically with
2223  relaxed memory order */
2224 
2225  return (_INTRIN_RELAXED(_InterlockedAnd64)((volatile _LONGLONG *)_Tgt, _Value));
2226  }
2227 
2228 inline _Uint8_t _Fetch_and_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2229  { /* and _Value with *_Tgt atomically with
2230  acquire memory order */
2231 
2232  return (_INTRIN_ACQUIRE(_InterlockedAnd64)((volatile _LONGLONG *)_Tgt, _Value));
2233  }
2234 
2235 inline _Uint8_t _Fetch_and_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2236  { /* and _Value with *_Tgt atomically with
2237  release memory order */
2238 
2239  return (_INTRIN_RELEASE(_InterlockedAnd64)((volatile _LONGLONG *)_Tgt, _Value));
2240  }
2241 
2242 inline _Uint8_t _Atomic_fetch_and_8(
2243  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
2244  { /* and _Value with *_Tgt atomically */
2245  switch (_Order)
2246  {
2247  case memory_order_relaxed:
2248  return (_Fetch_and_relaxed_8(_Tgt, _Value));
2249 
2250  case memory_order_consume:
2251  case memory_order_acquire:
2252  return (_Fetch_and_acquire_8(_Tgt, _Value));
2253 
2254  case memory_order_release:
2255  return (_Fetch_and_release_8(_Tgt, _Value));
2256 
2257  case memory_order_acq_rel:
2258  case memory_order_seq_cst:
2259  return (_Fetch_and_seq_cst_8(_Tgt, _Value));
2260 
2261  default:
2263  return (0);
2264  }
2265  }
2266 
2267  /* _Atomic_fetch_or_8 */
2268 inline _Uint8_t _Fetch_or_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2269  { /* or _Value with *_Tgt atomically with
2270  sequentially consistent memory order */
2271 
2272  return (_INTRIN_SEQ_CST(_InterlockedOr64)((volatile _LONGLONG *)_Tgt, _Value));
2273  }
2274 
2275 inline _Uint8_t _Fetch_or_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2276  { /* or _Value with *_Tgt atomically with
2277  relaxed memory order */
2278 
2279  return (_INTRIN_RELAXED(_InterlockedOr64)((volatile _LONGLONG *)_Tgt, _Value));
2280  }
2281 
2282 inline _Uint8_t _Fetch_or_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2283  { /* or _Value with *_Tgt atomically with
2284  acquire memory order */
2285 
2286  return (_INTRIN_ACQUIRE(_InterlockedOr64)((volatile _LONGLONG *)_Tgt, _Value));
2287  }
2288 
2289 inline _Uint8_t _Fetch_or_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2290  { /* or _Value with *_Tgt atomically with
2291  release memory order */
2292 
2293  return (_INTRIN_RELEASE(_InterlockedOr64)((volatile _LONGLONG *)_Tgt, _Value));
2294  }
2295 
2296 inline _Uint8_t _Atomic_fetch_or_8(
2297  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
2298  { /* or _Value with *_Tgt atomically */
2299  switch (_Order)
2300  {
2301  case memory_order_relaxed:
2302  return (_Fetch_or_relaxed_8(_Tgt, _Value));
2303 
2304  case memory_order_consume:
2305  case memory_order_acquire:
2306  return (_Fetch_or_acquire_8(_Tgt, _Value));
2307 
2308  case memory_order_release:
2309  return (_Fetch_or_release_8(_Tgt, _Value));
2310 
2311  case memory_order_acq_rel:
2312  case memory_order_seq_cst:
2313  return (_Fetch_or_seq_cst_8(_Tgt, _Value));
2314 
2315  default:
2317  return (0);
2318  }
2319  }
2320 
2321  /* _Atomic_fetch_xor_8 */
2322 inline _Uint8_t _Fetch_xor_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2323  { /* xor _Value with *_Tgt atomically with
2324  sequentially consistent memory order */
2325 
2326  return (_INTRIN_SEQ_CST(_InterlockedXor64)((volatile _LONGLONG *)_Tgt, _Value));
2327  }
2328 
2329 inline _Uint8_t _Fetch_xor_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2330  { /* xor _Value with *_Tgt atomically with
2331  relaxed memory order */
2332 
2333  return (_INTRIN_RELAXED(_InterlockedXor64)((volatile _LONGLONG *)_Tgt, _Value));
2334  }
2335 
2336 inline _Uint8_t _Fetch_xor_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2337  { /* xor _Value with *_Tgt atomically with
2338  acquire memory order */
2339 
2340  return (_INTRIN_ACQUIRE(_InterlockedXor64)((volatile _LONGLONG *)_Tgt, _Value));
2341  }
2342 
2343 inline _Uint8_t _Fetch_xor_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
2344  { /* xor _Value with *_Tgt atomically with
2345  release memory order */
2346 
2347  return (_INTRIN_RELEASE(_InterlockedXor64)((volatile _LONGLONG *)_Tgt, _Value));
2348  }
2349 
2350 inline _Uint8_t _Atomic_fetch_xor_8(
2351  volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
2352  { /* xor _Value with *_Tgt atomically */
2353  switch (_Order)
2354  {
2355  case memory_order_relaxed:
2356  return (_Fetch_xor_relaxed_8(_Tgt, _Value));
2357 
2358  case memory_order_consume:
2359  case memory_order_acquire:
2360  return (_Fetch_xor_acquire_8(_Tgt, _Value));
2361 
2362  case memory_order_release:
2363  return (_Fetch_xor_release_8(_Tgt, _Value));
2364 
2365  case memory_order_acq_rel:
2366  case memory_order_seq_cst:
2367  return (_Fetch_xor_seq_cst_8(_Tgt, _Value));
2368 
2369  default:
2371  return (0);
2372  }
2373  }
2374 
2375 inline int _Atomic_flag_test_and_set(volatile _Atomic_flag_t *_Flag,
2376  memory_order _Order)
2377  { /* atomically test flag and set to true */
2378  switch (_Order)
2379  {
2380  case memory_order_relaxed:
2381  return (_INTRIN_RELAXED(_interlockedbittestandset)(_Flag, 0));
2382 
2383  case memory_order_consume:
2384  case memory_order_acquire:
2385  return (_INTRIN_ACQUIRE(_interlockedbittestandset)(_Flag, 0));
2386 
2387  case memory_order_release:
2388  return (_INTRIN_RELEASE(_interlockedbittestandset)(_Flag, 0));
2389 
2390  case memory_order_acq_rel:
2391  case memory_order_seq_cst:
2392  return (_INTRIN_SEQ_CST(_interlockedbittestandset)(_Flag, 0));
2393 
2394  default:
2396  return (0);
2397  }
2398  }
2399 
2400 inline void _Atomic_flag_clear(volatile _Atomic_flag_t *_Flag,
2401  memory_order _Order)
2402  { /* atomically clear flag */
2403  static_assert(sizeof(_Atomic_flag_t) == sizeof(_Uint4_t),
2404  "Unexpected _Atomic_flag_t size");
2405 
2406  switch (_Order)
2407  {
2408  case memory_order_relaxed:
2409  case memory_order_release:
2410  case memory_order_seq_cst:
2411  _Atomic_store_4((volatile _Uint4_t *)_Flag, 0, _Order);
2412  break;
2413 
2414  default:
2416  break;
2417  }
2418  }
2419 
2421  { /* force memory visibility and inhibit compiler reordering */
2422  #if defined(_M_ARM)
2423  if (_Order != memory_order_relaxed)
2424  {
2425  _Memory_barrier();
2426  }
2427 
2428  #else
2430  if (_Order == memory_order_seq_cst)
2431  { /* force visibility */
2432  static _Uint4_t _Guard;
2435  }
2436  #endif
2437  }
2438 
2440  { /* inhibit compiler reordering */
2442  }
2443 
2444  #if defined(_M_ARM)
2445  #define _YIELD_PROCESSOR __yield()
2446 
2447  #else
2448  #define _YIELD_PROCESSOR
2449  #endif
2450 
2451  /* SPIN LOCK FOR LOCKING VERSIONS OF OPERATIONS */
2452  /* Use acquire semantics on lock and release on unlock. Given our
2453  current atomic_flag implementation, this ensures not just
2454  atomicity but also sequential consistency. */
2455 
2456 inline void _Lock_spin_lock(
2457  volatile _Atomic_flag_t *_Flag)
2458  { /* spin until _Flag successfully set */
2461  }
2462 
2463 inline void _Unlock_spin_lock(
2464  volatile _Atomic_flag_t *_Flag)
2465  { /* release previously obtained lock */
2467  }
2468 
2469  /* ATOMIC OPERATIONS FOR OBJECTS WITH SIZES THAT
2470  DON'T MATCH THE SIZE OF ANY INTEGRAL TYPE */
2471 inline void _Atomic_copy(
2472  volatile _Atomic_flag_t *_Flag, size_t _Size,
2473  volatile void *_Tgt, volatile const void *_Src,
2474  memory_order _Order)
2475  { /* atomically copy *_Src to *_Tgt with memory ordering */
2476  _Lock_spin_lock(_Flag);
2477  memcpy((void *)_Tgt, (void *)_Src, _Size);
2478  _Unlock_spin_lock(_Flag);
2479  }
2480 
2481 inline void _Atomic_exchange(
2482  volatile _Atomic_flag_t *_Flag, size_t _Size,
2483  volatile void *_Tgt, volatile void *_Src,
2484  memory_order _Order)
2485  { /* atomically swap *_Src and *_Tgt with memory ordering */
2486  unsigned char *_Left = (unsigned char *)_Tgt;
2487  unsigned char *_Right = (unsigned char *)_Src;
2488 
2489  _Lock_spin_lock(_Flag);
2490  for (; 0 < _Size; --_Size)
2491  { /* copy bytes */
2492  unsigned char _Tmp = *_Left;
2493  *_Left++ = *_Right;
2494  *_Right++ = _Tmp;
2495  }
2496  _Unlock_spin_lock(_Flag);
2497  }
2498 
2500  volatile _Atomic_flag_t *_Flag, size_t _Size,
2501  volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src,
2502  memory_order _Order1, memory_order _Order2)
2503  { /* atomically compare and exchange with memory ordering */
2504  int _Result;
2505 
2506  _Lock_spin_lock(_Flag);
2507  _Result = memcmp((const void *)_Tgt, (const void *)_Exp, _Size) == 0;
2508  if (_Result != 0)
2509  memcpy((void *)_Tgt, (void *)_Src, _Size);
2510  else
2511  memcpy((void *)_Exp, (void *)_Tgt, _Size);
2512  _Unlock_spin_lock(_Flag);
2513  return (_Result);
2514  }
2515 
2517  volatile _Atomic_flag_t *_Flag, size_t _Size,
2518  volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src,
2519  memory_order _Order1, memory_order _Order2)
2520  { /* atomically compare and exchange with memory ordering */
2521  return (_Atomic_compare_exchange_weak(_Flag, _Size, _Tgt, _Exp, _Src,
2522  _Order1, _Order2));
2523  }
2524 
2525  /* LOCK-FREE PROPERTY FOR INTEGRAL TYPES */
2526 inline int _Atomic_is_lock_free_1(void)
2527  { /* return true if 1-byte atomic values are lock-free */
2528  return (1 <= _ATOMIC_MAXBYTES_LOCK_FREE);
2529  }
2530 
2531 inline int _Atomic_is_lock_free_2(void)
2532  { /* return true if 2-byte atomic values are lock-free */
2533  return (2 <= _ATOMIC_MAXBYTES_LOCK_FREE);
2534  }
2535 
2536 inline int _Atomic_is_lock_free_4(void)
2537  { /* return true if 4-byte atomic values are lock-free */
2538  return (4 <= _ATOMIC_MAXBYTES_LOCK_FREE);
2539  }
2540 
2541 inline int _Atomic_is_lock_free_8(void)
2542  { /* return true if 8-byte atomic values are lock-free */
2543  return (8 <= _ATOMIC_MAXBYTES_LOCK_FREE);
2544  }
2545 _STD_END
2546 
2547  #if defined(_M_IX86)
2548 #pragma pop_macro("_InterlockedExchange64")
2549 #pragma pop_macro("_InterlockedExchangeAdd64")
2550 #pragma pop_macro("_InterlockedAnd64")
2551 #pragma pop_macro("_InterlockedOr64")
2552 #pragma pop_macro("_InterlockedXor64")
2553  #endif /* defined(_M_IX86) */
2554 
2555  #pragma pop_macro("and")
2556  #pragma pop_macro("or")
2557  #pragma pop_macro("xor")
2558  #pragma pop_macro("new")
2559  #pragma warning(pop)
2560  #pragma pack(pop)
2561 #endif /* RC_INVOKED */
2562 #endif /* _XATOMIC_H */
2563 
2564 /*
2565  * Copyright (c) 1992-2012 by P.J. Plauger. ALL RIGHTS RESERVED.
2566  * Consult your license regarding permissions and restrictions.
2567 V6.00:0009 */
int _Atomic_compare_exchange_strong_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:567
_Uint1_t _Atomic_fetch_add_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:634
void _Atomic_store_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:1889
_Uint1_t _Fetch_and_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:673
_Uint4_t _Fetch_or_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1761
_Uint1_t _Fetch_add_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:627
int _Compare_exchange_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:1003
_Uint1_t _Fetch_or_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:741
int _Atomic_compare_exchange_strong_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:1601
_Uint2_t _Atomic_load_2(volatile _Uint2_t *_Tgt, memory_order _Order)
Definition: xatomic.h:927
int _Atomic_is_lock_free_2(void)
Definition: xatomic.h:2531
_CRTIMP _In_ int _Value
Definition: setjmp.h:190
int _Atomic_compare_exchange_strong_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:2115
_Uint4_t _Atomic_fetch_sub_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1693
void _Atomic_store_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:347
_Uint1_t _Fetch_and_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:680
_Uint8_t _Load_relaxed_8(volatile _Uint8_t *_Tgt)
Definition: xatomic.h:1933
_Uint1_t _Exchange_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:446
#define _STD_BEGIN
Definition: yvals.h:553
_Uint4_t _Fetch_or_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1775
_CRTIMP2_PURE short __CLRCALL_PURE_OR_CDECL _Exp(double *, double, short)
#define _ATOMIC_MAXBYTES_LOCK_FREE
Definition: xatomic0.h:72
int _Atomic_compare_exchange_strong_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:1084
_Uint1_t _Fetch_and_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:687
_Uint2_t _Load_relaxed_2(volatile _Uint2_t *_Tgt)
Definition: xatomic.h:905
#define _ATOMIC_FLAG_TEST_AND_SET
Definition: xatomic.h:214
_Uint2_t _Fetch_or_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1251
void _Store_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1345
_Uint4_t _Atomic_exchange_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1494
void _Atomic_store_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1381
_Uint1_t _Fetch_or_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:734
#define _INTRIN_SEQ_CST(x)
Definition: xatomic.h:73
int _Compare_exchange_release_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:1064
_Uint8_t _Atomic_fetch_or_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2296
_Uint4_t _Fetch_xor_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1829
int _Compare_exchange_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:1024
void _Atomic_exchange(volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile void *_Src, memory_order _Order)
Definition: xatomic.h:2481
#define _INTRIN_RELEASE(x)
Definition: xatomic.h:72
_Uint4_t _Fetch_xor_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1808
_Uint8_t _Exchange_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1994
#define _ATOMIC_FLAG_CLEAR
Definition: xatomic.h:215
void _Atomic_thread_fence(memory_order _Order)
Definition: xatomic.h:2420
_Uint1_t _Fetch_add_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:613
_Uint2_t _Fetch_and_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1190
_Uint1_t _Fetch_add_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:620
_Uint8_t _Fetch_or_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2268
#define _INVALID_MEMORY_ORDER
Definition: xatomic.h:232
_Uint4_t _Atomic_fetch_and_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1728
_Uint2_t _Fetch_and_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1183
_Uint1_t _Load_acquire_1(volatile _Uint1_t *_Tgt)
Definition: xatomic.h:403
_Uint4_t _Fetch_and_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1700
int _Compare_exchange_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:486
_Uint8_t _Fetch_add_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2168
_Uint8_t _Fetch_xor_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2329
int _Atomic_is_lock_free_8(void)
Definition: xatomic.h:2541
_Uint4_t _Atomic_fetch_or_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1782
void _Atomic_flag_clear(volatile _Atomic_flag_t *_Flag, memory_order _Order)
Definition: xatomic.h:2400
void _Store_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1355
_Uint4_t _Atomic_fetch_add_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1668
_Uint1_t _Atomic_load_1(volatile _Uint1_t *_Tgt, memory_order _Order)
Definition: xatomic.h:410
_Uint4_t _Fetch_xor_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1822
_Uint8_t _Fetch_add_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2175
long _Atomic_flag_t
Definition: xatomic0.h:70
int _Compare_exchange_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1561
_Uint1_t _Exchange_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:439
_Uint2_t _Atomic_fetch_add_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:1151
memory_order _Memory_order_upper_bound(memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:236
_Uint2_t _Fetch_xor_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1291
_Uint8_t _Fetch_add_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2161
_Uint8_t _Fetch_and_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2228
_Uint1_t _Fetch_xor_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:781
_Uint32t _Uint4_t
Definition: xatomic0.h:28
_Uint2_t _Fetch_or_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1244
int _Compare_exchange_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:2034
_Uint1_t _Fetch_xor_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:774
void _Atomic_copy(volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile const void *_Src, memory_order _Order)
Definition: xatomic.h:2471
int _Compare_exchange_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value)
Definition: xatomic.h:1044
int _Atomic_compare_exchange_strong(volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:2516
_Uint2_t _Atomic_fetch_or_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:1265
void _Store_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:321
_In_ size_t _In_z_ const unsigned char * _Src
Definition: mbstring.h:95
void _Store_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1872
_Uint2_t _Atomic_fetch_and_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:1211
_Uint2_t _Atomic_exchange_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:977
_Uint4_t _Exchange_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1466
void lock(_Lock0 &_Lk0, _Lock1 &_Lk1, _LockN &..._LkN)
Definition: mutex:417
void _Store_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:838
_Uint8_t _Fetch_and_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2221
_Uint8_t _Atomic_exchange_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2008
_Uint1_t _Load_seq_cst_1(volatile _Uint1_t *_Tgt)
Definition: xatomic.h:371
_Uint4_t _Fetch_add_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1661
_Uint1_t _Fetch_xor_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:795
int _Atomic_compare_exchange_weak_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:1629
#define _INTRIN_ACQUIRE(x)
Definition: xatomic.h:71
_Uint4_t _Atomic_fetch_xor_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value, memory_order _Order)
Definition: xatomic.h:1836
_Uint2_t _Fetch_add_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1137
_Uint4_t _Atomic_load_4(volatile _Uint4_t *_Tgt, memory_order _Order)
Definition: xatomic.h:1444
_Uint1_t _Load_relaxed_1(volatile _Uint1_t *_Tgt)
Definition: xatomic.h:388
_Uint8_t _Fetch_or_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2275
_Uint4_t _Fetch_xor_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1815
void _Store_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:850
int _Atomic_is_lock_free_1(void)
Definition: xatomic.h:2526
_Uint8_t _Fetch_or_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2282
_Uint8_t _Atomic_fetch_and_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2242
_Uint4_t _Fetch_or_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1754
_Uint2_t _Exchange_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:970
_Uint2_t _Fetch_and_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1204
_Uint1_t _Fetch_or_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:727
_Uint4_t _Load_relaxed_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1422
void _Atomic_signal_fence(memory_order _Order)
Definition: xatomic.h:2439
_Uint8_t _Load_seq_cst_8(volatile _Uint8_t *_Tgt)
Definition: xatomic.h:1913
#define or
Definition: iso646.h:15
#define _LONGLONG
Definition: yvals.h:574
int _Compare_exchange_release_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1581
int _Compare_exchange_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1520
void _Store_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:311
_Uint8_t _Fetch_xor_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2322
int _Compare_exchange_release_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:2095
int _Compare_exchange_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t *_Exp, _Uint4_t _Value)
Definition: xatomic.h:1541
_Uint1_t _Atomic_exchange_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:460
_Uint4_t _Exchange_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1487
unsigned _LONGLONG _Uint8_t
Definition: xatomic.h:212
_Uint8_t _Atomic_fetch_sub_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2207
_Uint4_t _Load_seq_cst_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1405
int _Atomic_flag_test_and_set(volatile _Atomic_flag_t *_Flag, memory_order _Order)
Definition: xatomic.h:2375
memory_order
Definition: xatomic0.h:19
unsigned short _Uint2_t
Definition: xatomic.h:210
int _Compare_exchange_release_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:547
#define _YIELD_PROCESSOR
Definition: xatomic.h:2448
void _Store_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:333
#define xor
Definition: iso646.h:17
_Uint1_t _Atomic_fetch_or_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:748
_Uint2_t _Exchange_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:963
_Uint4_t _Exchange_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1473
_Uint1_t _Atomic_fetch_xor_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:802
Definition: xatomic0.h:24
_Uint8_t _Exchange_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2001
_Uint4_t _Fetch_add_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1640
int _Atomic_compare_exchange_weak(volatile _Atomic_flag_t *_Flag, size_t _Size, volatile void *_Tgt, volatile void *_Exp, const volatile void *_Src, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:2499
_Uint1_t _Fetch_or_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:720
_Uint1_t _Exchange_release_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:453
_Uint2_t _Fetch_add_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1130
#define _STD_END
Definition: yvals.h:554
_Uint4_t _Fetch_or_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1768
_Uint8_t _Fetch_and_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2235
_Uint8_t _Atomic_fetch_add_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2182
int _Atomic_is_lock_free_4(void)
Definition: xatomic.h:2536
_Uint2_t _Atomic_fetch_xor_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:1319
Definition: xatomic0.h:21
void _Store_seq_cst_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1367
_Uint8_t _Fetch_and_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2214
void _Lock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2456
_Uint2_t _Atomic_fetch_sub_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:1176
void _Store_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:828
_Uint2_t _Fetch_add_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1123
_Uint2_t _Fetch_xor_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1305
_Uint2_t _Exchange_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:949
_Uint4_t _Exchange_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1480
_Uint8_t _Fetch_or_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2289
_Uint1_t _Fetch_add_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:606
_Uint1_t _Atomic_fetch_and_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:694
_Uint8_t _Fetch_xor_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2336
_Uint8_t _Fetch_xor_release_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2343
void _Store_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1862
Definition: xatomic0.h:20
_Uint4_t _Fetch_and_release_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1721
_Uint1_t _Exchange_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:432
_Uint2_t _Fetch_add_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1144
int _Atomic_compare_exchange_weak_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:595
Definition: xatomic0.h:25
_Uint8_t _Atomic_load_8(volatile _Uint8_t *_Tgt, memory_order _Order)
Definition: xatomic.h:1958
void _Atomic_store_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value, memory_order _Order)
Definition: xatomic.h:864
Definition: xatomic0.h:23
_Uint2_t _Exchange_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:956
_Uint8_t _Load_acquire_8(volatile _Uint8_t *_Tgt)
Definition: xatomic.h:1951
#define _Compiler_barrier()
Definition: xatomic.h:29
Definition: xatomic0.h:22
_Uint4_t _Fetch_and_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1714
void _Unlock_spin_lock(volatile _Atomic_flag_t *_Flag)
Definition: xatomic.h:2463
_Uint8_t _Atomic_fetch_xor_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value, memory_order _Order)
Definition: xatomic.h:2350
_Uint8_t _Exchange_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1987
_Uint1_t _Atomic_fetch_sub_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value, memory_order _Order)
Definition: xatomic.h:659
_STD_BEGIN typedef unsigned char _Uint1_t
Definition: xatomic.h:209
_Uint4_t _Fetch_add_acquire_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1654
_Uint2_t _Fetch_and_acquire_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1197
_Uint4_t _Load_acquire_4(volatile _Uint4_t *_Tgt)
Definition: xatomic.h:1437
#define _INTRIN_RELAXED(x)
Definition: xatomic.h:70
_Uint4_t _Fetch_add_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1647
_Uint2_t _Fetch_or_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1258
_Uint8_t _Exchange_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1980
void _Store_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:1883
_Uint1_t _Fetch_xor_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:788
int _Compare_exchange_relaxed_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:2055
_Check_return_ _In_ long _Size
Definition: io.h:325
_Uint2_t _Fetch_xor_relaxed_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1298
_Uint4_t _Fetch_and_relaxed_4(volatile _Uint4_t *_Tgt, _Uint4_t _Value)
Definition: xatomic.h:1707
#define and
Definition: iso646.h:8
int _Compare_exchange_relaxed_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:507
int _Atomic_compare_exchange_weak_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:2143
_Uint2_t _Load_acquire_2(volatile _Uint2_t *_Tgt)
Definition: xatomic.h:920
long __cdecl _InterlockedCompareExchange(long volatile *, long, long)
_Uint2_t _Load_seq_cst_2(volatile _Uint2_t *_Tgt)
Definition: xatomic.h:888
int _Compare_exchange_acquire_1(volatile _Uint1_t *_Tgt, _Uint1_t *_Exp, _Uint1_t _Value)
Definition: xatomic.h:527
_Uint2_t _Fetch_or_seq_cst_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1237
_Uint2_t _Fetch_xor_release_2(volatile _Uint2_t *_Tgt, _Uint2_t _Value)
Definition: xatomic.h:1312
_Check_return_ int __cdecl memcmp(_In_reads_bytes_(_Size) const void *_Buf1, _In_reads_bytes_(_Size) const void *_Buf2, _In_ size_t _Size)
const _Ty & _Right
Definition: algorithm:4087
int _Atomic_compare_exchange_weak_2(volatile _Uint2_t *_Tgt, _Uint2_t *_Exp, _Uint2_t _Value, memory_order _Order1, memory_order _Order2)
Definition: xatomic.h:1112
int _Compare_exchange_acquire_8(volatile _Uint8_t *_Tgt, _Uint8_t *_Exp, _Uint8_t _Value)
Definition: xatomic.h:2075
_Uint8_t _Fetch_add_seq_cst_8(volatile _Uint8_t *_Tgt, _Uint8_t _Value)
Definition: xatomic.h:2154
_Uint1_t _Fetch_and_seq_cst_1(volatile _Uint1_t *_Tgt, _Uint1_t _Value)
Definition: xatomic.h:666
void _Validate_compare_exchange_memory_order(memory_order _Success, memory_order _Failure)
Definition: xatomic.h:278