X-Git-Url: http://git.liburcu.org/?p=urcu.git;a=blobdiff_plain;f=urcu%2Farch_x86.h;h=d0a58e80caa5abfc980a58ad51407f82b0ceeee6;hp=56250565e8001a08155f568148e1201f63324782;hb=5481ddb381061bda64aebc039900d21cac6a6caf;hpb=6982d6d71aeed16d2d929bd0ed221e8f444b706e diff --git a/urcu/arch_x86.h b/urcu/arch_x86.h index 5625056..d0a58e8 100644 --- a/urcu/arch_x86.h +++ b/urcu/arch_x86.h @@ -11,7 +11,7 @@ * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. -* + * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU @@ -32,17 +32,17 @@ extern "C" { #define CACHE_LINE_SIZE 128 #ifdef CONFIG_RCU_HAVE_FENCE -#define mb() asm volatile("mfence":::"memory") -#define rmb() asm volatile("lfence":::"memory") -#define wmb() asm volatile("sfence"::: "memory") +#define cmm_mb() asm volatile("mfence":::"memory") +#define cmm_rmb() asm volatile("lfence":::"memory") +#define cmm_wmb() asm volatile("sfence"::: "memory") #else /* - * Some non-Intel clones support out of order store. wmb() ceases to be a + * Some non-Intel clones support out of order store. cmm_wmb() ceases to be a * nop for these. */ -#define mb() asm volatile("lock; addl $0,0(%%esp)":::"memory") -#define rmb() asm volatile("lock; addl $0,0(%%esp)":::"memory") -#define wmb() asm volatile("lock; addl $0,0(%%esp)"::: "memory") +#define cmm_mb() asm volatile("lock; addl $0,0(%%esp)":::"memory") +#define cmm_rmb() asm volatile("lock; addl $0,0(%%esp)":::"memory") +#define cmm_wmb() asm volatile("lock; addl $0,0(%%esp)"::: "memory") #endif #define cpu_relax() asm volatile("rep; nop" : : : "memory");