#if !defined(__TBB_machine_H) || defined(__TBB_machine_windows_ia32_H)
#error Do not #include this internal file directly; use public TBB headers instead.
#endif
#define __TBB_machine_windows_ia32_H
#include "msvc_ia32_common.h"
#define __TBB_WORDSIZE 4
#define __TBB_BIG_ENDIAN 0
#if __INTEL_COMPILER && (__INTEL_COMPILER < 1100)
#define __TBB_compiler_fence() __asm { __asm nop }
#define __TBB_full_memory_fence() __asm { __asm mfence }
#elif _MSC_VER >= 1300 || __INTEL_COMPILER
#pragma intrinsic(_ReadWriteBarrier)
#pragma intrinsic(_mm_mfence)
#define __TBB_compiler_fence() _ReadWriteBarrier()
#define __TBB_full_memory_fence() _mm_mfence()
#else
#error Unsupported compiler - need to define __TBB_{control,acquire,release}_consistency_helper to support it
#endif
#define __TBB_control_consistency_helper() __TBB_compiler_fence()
#define __TBB_acquire_consistency_helper() __TBB_compiler_fence()
#define __TBB_release_consistency_helper() __TBB_compiler_fence()
#if defined(_MSC_VER) && !defined(__INTEL_COMPILER)
#pragma warning (push)
#pragma warning (disable: 4244 4267)
#endif
extern "C" {
__int64 __TBB_EXPORTED_FUNC __TBB_machine_cmpswp8 (volatile void *ptr, __int64 value, __int64 comparand );
__int64 __TBB_EXPORTED_FUNC __TBB_machine_fetchadd8 (volatile void *ptr, __int64 addend );
__int64 __TBB_EXPORTED_FUNC __TBB_machine_fetchstore8 (volatile void *ptr, __int64 value );
void __TBB_EXPORTED_FUNC __TBB_machine_store8 (volatile void *ptr, __int64 value );
__int64 __TBB_EXPORTED_FUNC __TBB_machine_load8 (const volatile void *ptr);
}
#define __TBB_MACHINE_DEFINE_ATOMICS(S,T,U,A,C) \
static inline T __TBB_machine_cmpswp##S ( volatile void * ptr, U value, U comparand ) { \
T result; \
volatile T *p = (T *)ptr; \
__asm \
{ \
__asm mov edx, p \
__asm mov C , value \
__asm mov A , comparand \
__asm lock cmpxchg [edx], C \
__asm mov result, A \
} \
return result; \
} \
\
static inline T __TBB_machine_fetchadd##S ( volatile void * ptr, U addend ) { \
T result; \
volatile T *p = (T *)ptr; \
__asm \
{ \
__asm mov edx, p \
__asm mov A, addend \
__asm lock xadd [edx], A \
__asm mov result, A \
} \
return result; \
}\
\
static inline T __TBB_machine_fetchstore##S ( volatile void * ptr, U value ) { \
T result; \
volatile T *p = (T *)ptr; \
__asm \
{ \
__asm mov edx, p \
__asm mov A, value \
__asm lock xchg [edx], A \
__asm mov result, A \
} \
return result; \
}
__TBB_MACHINE_DEFINE_ATOMICS(1, __int8, __int8, al, cl)
__TBB_MACHINE_DEFINE_ATOMICS(2, __int16, __int16, ax, cx)
__TBB_MACHINE_DEFINE_ATOMICS(4, ptrdiff_t, ptrdiff_t, eax, ecx)
#undef __TBB_MACHINE_DEFINE_ATOMICS
static inline void __TBB_machine_OR( volatile void *operand, __int32 addend ) {
__asm
{
mov eax, addend
mov edx, [operand]
lock or [edx], eax
}
}
static inline void __TBB_machine_AND( volatile void *operand, __int32 addend ) {
__asm
{
mov eax, addend
mov edx, [operand]
lock and [edx], eax
}
}
#define __TBB_AtomicOR(P,V) __TBB_machine_OR(P,V)
#define __TBB_AtomicAND(P,V) __TBB_machine_AND(P,V)
#define __TBB_USE_FETCHSTORE_AS_FULL_FENCED_STORE 1
#define __TBB_USE_GENERIC_HALF_FENCED_LOAD_STORE 1
#define __TBB_USE_GENERIC_RELAXED_LOAD_STORE 1
#define __TBB_USE_GENERIC_SEQUENTIAL_CONSISTENCY_LOAD_STORE 1
#if defined(_MSC_VER) && !defined(__INTEL_COMPILER)
#pragma warning (pop)
#endif