17 #ifndef _atomic_h_cleanup_
18 #define _atomic_h_cleanup_ "atomic/generic-msvc.h"
25 #undef MY_ATOMIC_HAS_8_16
36 #if (_MSC_VER >= 1500)
41 LONG _InterlockedCompareExchange (LONG
volatile *
Target, LONG Value, LONG
Comp);
42 LONGLONG _InterlockedCompareExchange64 (LONGLONG
volatile *
Target,
43 LONGLONG Value, LONGLONG
Comp);
46 #pragma intrinsic(_InterlockedCompareExchange)
47 #pragma intrinsic(_InterlockedCompareExchange64)
50 #define InterlockedCompareExchange _InterlockedCompareExchange
51 #define InterlockedCompareExchange64 _InterlockedCompareExchange64
59 #define MY_ATOMIC_MODE "msvc-intrinsics"
61 #define IL_COMP_EXCHG32(X,Y,Z) \
62 InterlockedCompareExchange((volatile LONG *)(X),(Y),(Z))
63 #define IL_COMP_EXCHG64(X,Y,Z) \
64 InterlockedCompareExchange64((volatile LONGLONG *)(X), \
65 (LONGLONG)(Y),(LONGLONG)(Z))
66 #define IL_COMP_EXCHGptr InterlockedCompareExchangePointer
68 #define make_atomic_cas_body(S) \
69 int ## S initial_cmp= *cmp; \
70 int ## S initial_a= IL_COMP_EXCHG ## S (a, set, initial_cmp); \
71 if (!(ret= (initial_a == initial_cmp))) *cmp= initial_a;
75 #define IL_EXCHG_ADD32(X,Y) \
76 InterlockedExchangeAdd((volatile LONG *)(X),(Y))
77 #define IL_EXCHG_ADD64(X,Y) \
78 InterlockedExchangeAdd64((volatile LONGLONG *)(X),(LONGLONG)(Y))
79 #define IL_EXCHG32(X,Y) \
80 InterlockedExchange((volatile LONG *)(X),(Y))
81 #define IL_EXCHG64(X,Y) \
82 InterlockedExchange64((volatile LONGLONG *)(X),(LONGLONG)(Y))
83 #define IL_EXCHGptr InterlockedExchangePointer
85 #define make_atomic_add_body(S) \
86 v= IL_EXCHG_ADD ## S (a, v)
87 #define make_atomic_swap_body(S) \
88 v= IL_EXCHG ## S (a, v)
89 #define make_atomic_load_body(S) \
91 ret= IL_COMP_EXCHG ## S (a, ret, ret);
105 #define YIELD_LOOPS 200
108 static __inline
int my_yield_processor()
111 for(i=0; i<YIELD_LOOPS; i++)
113 #if (_MSC_VER <= 1310)
123 #define LF_BACKOFF my_yield_processor()
126 #undef IL_EXCHG_ADD32
127 #undef IL_EXCHG_ADD64
128 #undef IL_COMP_EXCHG32
129 #undef IL_COMP_EXCHG64
130 #undef IL_COMP_EXCHGptr