aboutsummaryrefslogtreecommitdiffhomepage
path: root/src/google/protobuf/stubs/atomicops_internals_x86_gcc.h
diff options
context:
space:
mode:
authorGravatar Gergely Nagy <ngg@ngg.hu>2016-12-11 14:50:18 +0100
committerGravatar Gergely Nagy <ngg@ngg.hu>2016-12-12 23:04:09 +0100
commitec021f5429d458467e9fd8dcdd452f886b5bf324 (patch)
tree125a320ecae3cd1149589673c8e2ad621ed1b6b2 /src/google/protobuf/stubs/atomicops_internals_x86_gcc.h
parent29fb87e1d554a739e47b647a7e8a1a20be8851c9 (diff)
Add support for Windows ARM builds
Diffstat (limited to 'src/google/protobuf/stubs/atomicops_internals_x86_gcc.h')
-rw-r--r--src/google/protobuf/stubs/atomicops_internals_x86_gcc.h12
1 files changed, 6 insertions, 6 deletions
diff --git a/src/google/protobuf/stubs/atomicops_internals_x86_gcc.h b/src/google/protobuf/stubs/atomicops_internals_x86_gcc.h
index edccc59d..e80121fd 100644
--- a/src/google/protobuf/stubs/atomicops_internals_x86_gcc.h
+++ b/src/google/protobuf/stubs/atomicops_internals_x86_gcc.h
@@ -119,18 +119,18 @@ inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
// 64-bit implementations of memory barrier can be simpler, because it
// "mfence" is guaranteed to exist.
-inline void MemoryBarrier() {
+inline void MemoryBarrierInternal() {
__asm__ __volatile__("mfence" : : : "memory");
}
inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
*ptr = value;
- MemoryBarrier();
+ MemoryBarrierInternal();
}
#else
-inline void MemoryBarrier() {
+inline void MemoryBarrierInternal() {
if (AtomicOps_Internalx86CPUFeatures.has_sse2) {
__asm__ __volatile__("mfence" : : : "memory");
} else { // mfence is faster but not present on PIII
@@ -168,7 +168,7 @@ inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
}
inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
- MemoryBarrier();
+ MemoryBarrierInternal();
return *ptr;
}
@@ -225,7 +225,7 @@ inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
*ptr = value;
- MemoryBarrier();
+ MemoryBarrierInternal();
}
inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
@@ -262,7 +262,7 @@ inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
}
inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
- MemoryBarrier();
+ MemoryBarrierInternal();
return *ptr;
}