summaryrefslogtreecommitdiff
path: root/absl/base/prefetch.h
diff options
context:
space:
mode:
Diffstat (limited to 'absl/base/prefetch.h')
-rw-r--r--absl/base/prefetch.h13
1 files changed, 10 insertions, 3 deletions
diff --git a/absl/base/prefetch.h b/absl/base/prefetch.h
index 4d428462..6bc98637 100644
--- a/absl/base/prefetch.h
+++ b/absl/base/prefetch.h
@@ -30,9 +30,11 @@
#include <xmmintrin.h>
#endif
-#if defined(_MSC_VER) && defined(ABSL_INTERNAL_HAVE_SSE)
+#if defined(_MSC_VER) && _MSC_VER >= 1900 && \
+ (defined(_M_X64) || defined(_M_IX86))
#include <intrin.h>
#pragma intrinsic(_mm_prefetch)
+#pragma intrinsic(_m_prefetchw)
#endif
namespace absl {
@@ -174,10 +176,15 @@ inline void PrefetchToLocalCacheNta(const void* addr) {
inline void PrefetchToLocalCacheForWrite(const void* addr) {
#if defined(_MM_HINT_ET0)
_mm_prefetch(reinterpret_cast<const char*>(addr), _MM_HINT_ET0);
-#elif defined(__x86_64__)
+#elif defined(_MSC_VER) && _MSC_VER >= 1900 && \
+ (defined(_M_X64) || defined(_M_IX86))
+ // MSVC 2015 and up on x86/x64 supports prefetchw (feature listed as 3DNOW)
+ _m_prefetchw(const_cast<void*>(addr));
+#elif !defined(_MSC_VER) && defined(__x86_64__)
// _MM_HINT_ET0 is not universally supported. As we commented further
// up, PREFETCHW is recognized as a no-op on older Intel processors
- // and has been present on AMD processors since the K6-2
+ // and has been present on AMD processors since the K6-2. We have this
+ // disabled for MSVC compilers as this miscompiles on older MSVC compilers.
asm("prefetchw (%0)" : : "r"(addr));
#endif
}