aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
-rw-r--r--BUILD.gn2
-rw-r--r--src/jumper/SkJumper.cpp20
2 files changed, 18 insertions, 4 deletions
diff --git a/BUILD.gn b/BUILD.gn
index f4298be5a3..8aaa913049 100644
--- a/BUILD.gn
+++ b/BUILD.gn
@@ -29,7 +29,7 @@ declare_args() {
skia_enable_android_framework_defines = false
skia_enable_discrete_gpu = true
skia_enable_effects = true
- skia_enable_jumper = is_skia_standalone && sanitize != "MSAN"
+ skia_enable_jumper = is_skia_standalone
skia_enable_gpu = true
skia_enable_pdf = true
skia_enable_tools = is_skia_standalone
diff --git a/src/jumper/SkJumper.cpp b/src/jumper/SkJumper.cpp
index 2140fd06a0..60b34791c3 100644
--- a/src/jumper/SkJumper.cpp
+++ b/src/jumper/SkJumper.cpp
@@ -10,6 +10,11 @@
#include "SkRasterPipeline.h"
#include "SkTemplates.h"
+// We'll use __has_feature(memory_sanitizer) to detect MSAN.
+// SkJumper_generated.S is not compiled with MSAN, so MSAN would yell really loud.
+#if !defined(__has_feature)
+ #define __has_feature(x) 0
+#endif
// Stages expect these constants to be set to these values.
// It's fine to rearrange and add new ones if you update SkJumper_constants.
@@ -64,7 +69,10 @@ using StageFn = void(void);
extern "C" {
-#if defined(__aarch64__)
+#if __has_feature(memory_sanitizer)
+ // We'll just run portable code.
+
+#elif defined(__aarch64__)
size_t ASM(start_pipeline,aarch64)(size_t, void**, K*, size_t);
StageFn ASM(just_return,aarch64);
#define M(st) StageFn ASM(st,aarch64);
@@ -108,7 +116,10 @@ extern "C" {
// Translate SkRasterPipeline's StockStage enum to StageFn function pointers.
-#if defined(__aarch64__)
+#if __has_feature(memory_sanitizer)
+ // We'll just run portable code.
+
+#elif defined(__aarch64__)
static StageFn* lookup_aarch64(SkRasterPipeline::StockStage st) {
switch (st) {
default: return nullptr;
@@ -190,7 +201,10 @@ bool SkRasterPipeline::run_with_jumper(size_t x, size_t n) const {
};
// While possible, build and run at full vector stride.
-#if defined(__aarch64__)
+#if __has_feature(memory_sanitizer)
+ // We'll just run portable code.
+
+#elif defined(__aarch64__)
if (!build_and_run(4, lookup_aarch64, ASM(just_return,aarch64), ASM(start_pipeline,aarch64))) {
return false;
}