7 #ifndef __platform_Count_h__ 8 #define __platform_Count_h__ 10 #define FE_COUNT_MT TRUE 12 #if FE_COUNT_MT && FE_HW==FE_X86 && FE_OS!=FE_OSX 13 #define FE_COUNT_ASM_IMPL 28 #ifdef FE_COUNT_ASM_IMPL 30 static inline int __attribute__ ((__unused__))
31 feAsmSwapIncr(
volatile int *pInt,
int incr)
34 __asm__
volatile(
"lock; xaddl %0, %1" 35 :
"+r" (incr),
"+m" (*pInt)
45 __asm__ __volatile__ (
"lock; xaddl %0,%2" 46 :
"=r" (r) :
"0" (incr),
"m" (*pInt) :
"memory");
51 static inline void __attribute__ ((__unused__))
52 feAsmIncr(
volatile int* pInt,
int incr)
54 feAsmSwapIncr(pInt, incr);
56 __asm__ __volatile__ (
"lock; addl %0,%1" 57 : :
"ir" (incr),
"m" (*pInt) :
"memory");
61 #if FE_OS==FE_WIN32 || FE_OS==FE_WIN64 63 feAsmSwapIncr(
volatile int *pInt,
int incr)
65 return InterlockedExchangeAdd(
66 reinterpret_cast<volatile LONG *>(pInt), incr);
72 lock xadd dword ptr [ecx], eax
78 feAsmIncr(
volatile int* pInt,
int incr)
80 feAsmSwapIncr(pInt, incr);