arch.h   arch.h 
skipping to change at line 63 skipping to change at line 63
* kernels should think twice before enabling this", but for now let's * kernels should think twice before enabling this", but for now let's
* be conservative and leave the full barrier on 32-bit processors. Also, * be conservative and leave the full barrier on 32-bit processors. Also,
* IDT WinChip supports weak store ordering, and the kernel may enable it * IDT WinChip supports weak store ordering, and the kernel may enable it
* under our feet; cmm_smp_wmb() ceases to be a nop for these processors. * under our feet; cmm_smp_wmb() ceases to be a nop for these processors.
*/ */
#define cmm_mb() __asm__ __volatile__ ("lock; addl $0,0(%%esp)":::"memor y") #define cmm_mb() __asm__ __volatile__ ("lock; addl $0,0(%%esp)":::"memor y")
#define cmm_rmb() __asm__ __volatile__ ("lock; addl $0,0(%%esp)":::"memor y") #define cmm_rmb() __asm__ __volatile__ ("lock; addl $0,0(%%esp)":::"memor y")
#define cmm_wmb() __asm__ __volatile__ ("lock; addl $0,0(%%esp)"::: "memo ry") #define cmm_wmb() __asm__ __volatile__ ("lock; addl $0,0(%%esp)"::: "memo ry")
#endif #endif
#define caa_cpu_relax() __asm__ __volatile__ ("rep; nop" : : : "memo ry"); #define caa_cpu_relax() __asm__ __volatile__ ("rep; nop" : : : "memo ry")
#define rdtscll(val) \ #define rdtscll(val) \
do { \ do { \
unsigned int __a, __d; \ unsigned int __a, __d; \
__asm__ __volatile__ ("rdtsc" : "=a" (__a), "=d" (__d)); \ __asm__ __volatile__ ("rdtsc" : "=a" (__a), "=d" (__d)); \
(val) = ((unsigned long long)__a) \ (val) = ((unsigned long long)__a) \
| (((unsigned long long)__d) << 32); \ | (((unsigned long long)__d) << 32); \
} while(0) } while(0)
typedef unsigned long long cycles_t; typedef unsigned long long cycles_t;
 End of changes. 1 change blocks. 
1 lines changed or deleted 1 lines changed or added

This html diff was produced by rfcdiff 1.41. The latest version is available from http://tools.ietf.org/tools/rfcdiff/