aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar borenet <borenet@chromium.org>2016-08-03 08:23:10 -0700
committerGravatar Commit bot <commit-bot@chromium.org>2016-08-03 08:23:10 -0700
commit1436a09e1fc3be4655af960d4ffb31066bfe4cdd (patch)
treee0f37f3938b2bf1dcd7a432943343839e994714c
parent7a9f3766aa07f69323f37670a7aeb13605f266bb (diff)
Re-organize Skia recipes
Break Skia recipe module into: - skia_vars: defines and stores variables (eg. paths) - skia_step: utilities for running Skia steps - skia_flavor: flavor-specific stuff - skia: top-level setup, checkout, test/perf steps, etc etc This establishes a saner dependency structure for the recipes; skia_vars is at the bottom level, skia_step depends on it, skia_flavor depends on both of them, skia depends on all of the above, and the recipes themselves may depend on any or all of them. Next steps: - Merge buildbot_spec into skia_vars - Move test_steps and perf_steps from skia recipe_module into swarm_test and swarm_perf recipes - Cleaner checkout_steps process BUG=skia:5578 GOLD_TRYBOT_URL= https://gold.skia.org/search?issue=2198173002 Review-Url: https://codereview.chromium.org/2198173002
-rw-r--r--infra/bots/recipe_modules/README.md11
-rw-r--r--infra/bots/recipe_modules/core/__init__.py (renamed from infra/bots/recipe_modules/skia/__init__.py)11
-rwxr-xr-xinfra/bots/recipe_modules/core/android_devices.py (renamed from infra/bots/recipe_modules/skia/android_devices.py)0
-rw-r--r--infra/bots/recipe_modules/core/api.py498
-rw-r--r--infra/bots/recipe_modules/core/fake_specs.py (renamed from infra/bots/recipe_modules/skia/fake_specs.py)0
-rw-r--r--infra/bots/recipe_modules/core/resources/binary_size_utils.py (renamed from infra/bots/recipe_modules/skia/resources/binary_size_utils.py)0
-rw-r--r--infra/bots/recipe_modules/core/resources/elf_symbolizer.py (renamed from infra/bots/recipe_modules/skia/resources/elf_symbolizer.py)0
-rwxr-xr-xinfra/bots/recipe_modules/core/resources/generate_and_upload_doxygen.py (renamed from infra/bots/recipe_modules/skia/resources/generate_and_upload_doxygen.py)0
-rwxr-xr-xinfra/bots/recipe_modules/core/resources/run_binary_size_analysis.py (renamed from infra/bots/recipe_modules/skia/resources/run_binary_size_analysis.py)0
-rwxr-xr-xinfra/bots/recipe_modules/core/resources/upload_bench_results.py (renamed from infra/bots/recipe_modules/skia/resources/upload_bench_results.py)0
-rwxr-xr-xinfra/bots/recipe_modules/core/resources/upload_dm_results.py (renamed from infra/bots/recipe_modules/skia/resources/upload_dm_results.py)0
-rwxr-xr-xinfra/bots/recipe_modules/core/ssh_devices.py (renamed from infra/bots/recipe_modules/skia/ssh_devices.py)0
-rw-r--r--infra/bots/recipe_modules/flavor/__init__.py15
-rw-r--r--infra/bots/recipe_modules/flavor/android_flavor.py (renamed from infra/bots/recipe_modules/skia/android_flavor.py)134
-rw-r--r--infra/bots/recipe_modules/flavor/api.py126
-rw-r--r--infra/bots/recipe_modules/flavor/cmake_flavor.py (renamed from infra/bots/recipe_modules/skia/cmake_flavor.py)6
-rw-r--r--infra/bots/recipe_modules/flavor/coverage_flavor.py (renamed from infra/bots/recipe_modules/skia/coverage_flavor.py)39
-rw-r--r--infra/bots/recipe_modules/flavor/default_flavor.py (renamed from infra/bots/recipe_modules/skia/default_flavor.py)114
-rw-r--r--infra/bots/recipe_modules/flavor/gn_flavor.py (renamed from infra/bots/recipe_modules/skia/gn_flavor.py)31
-rw-r--r--infra/bots/recipe_modules/flavor/ios_flavor.py (renamed from infra/bots/recipe_modules/skia/ios_flavor.py)104
-rw-r--r--infra/bots/recipe_modules/flavor/pdfium_flavor.py (renamed from infra/bots/recipe_modules/skia/pdfium_flavor.py)24
-rw-r--r--infra/bots/recipe_modules/flavor/valgrind_flavor.py (renamed from infra/bots/recipe_modules/skia/valgrind_flavor.py)10
-rw-r--r--infra/bots/recipe_modules/flavor/xsan_flavor.py (renamed from infra/bots/recipe_modules/skia/xsan_flavor.py)35
-rw-r--r--infra/bots/recipe_modules/run/__init__.py13
-rw-r--r--infra/bots/recipe_modules/run/api.py152
-rw-r--r--infra/bots/recipe_modules/skia/api.py842
-rw-r--r--infra/bots/recipe_modules/swarming/__init__.py (renamed from infra/bots/recipe_modules/skia_swarming/__init__.py)2
-rw-r--r--infra/bots/recipe_modules/swarming/api.py (renamed from infra/bots/recipe_modules/skia_swarming/api.py)2
-rw-r--r--infra/bots/recipe_modules/vars/__init__.py11
-rw-r--r--infra/bots/recipe_modules/vars/api.py144
-rw-r--r--infra/bots/recipes/swarm_RecreateSKPs.py15
-rw-r--r--infra/bots/recipes/swarm_compile.expected/Build-Win-MSVC-x86_64-Release-Vulkan.json2
-rw-r--r--infra/bots/recipes/swarm_compile.py36
-rw-r--r--infra/bots/recipes/swarm_housekeeper.expected/Housekeeper-PerCommit-Trybot.json2
-rw-r--r--infra/bots/recipes/swarm_housekeeper.expected/Housekeeper-PerCommit.json4
-rw-r--r--infra/bots/recipes/swarm_housekeeper.py27
-rw-r--r--infra/bots/recipes/swarm_perf.py11
-rw-r--r--infra/bots/recipes/swarm_test.expected/legacy_skimage_version.json249
-rw-r--r--infra/bots/recipes/swarm_test.expected/legacy_skp_version.json249
-rw-r--r--infra/bots/recipes/swarm_test.py46
-rw-r--r--infra/bots/recipes/swarm_trigger.expected/Perf-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Release-Trybot.json2
-rw-r--r--infra/bots/recipes/swarm_trigger.expected/Test-Android-GCC-NVIDIA_Shield-GPU-TegraX1-Arm64-Debug-Vulkan.json2
-rw-r--r--infra/bots/recipes/swarm_trigger.expected/Test-Android-GCC-Nexus7v2-GPU-Tegra3-Arm7-Release.json2
-rw-r--r--infra/bots/recipes/swarm_trigger.expected/Test-Mac-Clang-MacMini6.2-CPU-AVX-x86_64-Release.json2
-rw-r--r--infra/bots/recipes/swarm_trigger.expected/Test-Ubuntu-Clang-GCE-CPU-AVX2-x86_64-Coverage-Trybot.json2
-rw-r--r--infra/bots/recipes/swarm_trigger.expected/Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug.json2
-rw-r--r--infra/bots/recipes/swarm_trigger.expected/Test-Win8-MSVC-ShuttleA-GPU-HD7770-x86_64-Release.json2
-rw-r--r--infra/bots/recipes/swarm_trigger.expected/Test-Win8-MSVC-ShuttleB-CPU-AVX2-x86_64-Release.json2
-rw-r--r--infra/bots/recipes/swarm_trigger.expected/Test-iOS-Clang-iPad4-GPU-SGX554-Arm7-Release.json2
-rw-r--r--infra/bots/recipes/swarm_trigger.py52
50 files changed, 1322 insertions, 1713 deletions
diff --git a/infra/bots/recipe_modules/README.md b/infra/bots/recipe_modules/README.md
new file mode 100644
index 0000000000..05ad2cc76b
--- /dev/null
+++ b/infra/bots/recipe_modules/README.md
@@ -0,0 +1,11 @@
+Skia Recipe Modules
+===================
+
+This directory contains recipe modules designed to be used by recipes. They
+are all Skia-specific and some are interrelated:
+
+ * vars - Common variables used by Skia recipes.
+ * run - Utilities for running commands. Depends on vars.
+ * flavor - Run meta-commands for various platforms. Depends on vars and run.
+ * skia - Main module for Skia recipes. Depends on vars, run, and flavor.
+ * swarming - Utilities for running Swarming tasks.
diff --git a/infra/bots/recipe_modules/skia/__init__.py b/infra/bots/recipe_modules/core/__init__.py
index 5f49d42e21..df4078b271 100644
--- a/infra/bots/recipe_modules/skia/__init__.py
+++ b/infra/bots/recipe_modules/core/__init__.py
@@ -3,20 +3,15 @@
# found in the LICENSE file.
DEPS = [
- 'build/adb',
'build/file',
- 'build/gsutil',
- 'build/swarming',
- 'build/swarming_client',
'depot_tools/gclient',
- 'depot_tools/git',
'depot_tools/tryserver',
- 'recipe_engine/json',
+ 'flavor',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/python',
- 'recipe_engine/raw_io',
'recipe_engine/step',
- 'recipe_engine/time',
+ 'run',
+ 'vars',
]
diff --git a/infra/bots/recipe_modules/skia/android_devices.py b/infra/bots/recipe_modules/core/android_devices.py
index 1a59c77333..1a59c77333 100755
--- a/infra/bots/recipe_modules/skia/android_devices.py
+++ b/infra/bots/recipe_modules/core/android_devices.py
diff --git a/infra/bots/recipe_modules/core/api.py b/infra/bots/recipe_modules/core/api.py
new file mode 100644
index 0000000000..6d477efd06
--- /dev/null
+++ b/infra/bots/recipe_modules/core/api.py
@@ -0,0 +1,498 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+# pylint: disable=W0201
+
+
+import json
+import os
+import re
+import sys
+
+from recipe_engine import recipe_api
+from recipe_engine import config_types
+
+from . import fake_specs
+
+
+TEST_EXPECTED_SKP_VERSION = '42'
+TEST_EXPECTED_SK_IMAGE_VERSION = '42'
+
+VERSION_FILE_SK_IMAGE = 'SK_IMAGE_VERSION'
+VERSION_FILE_SKP = 'SKP_VERSION'
+
+VERSION_NONE = -1
+
+
+class SkiaApi(recipe_api.RecipeApi):
+
+ def get_builder_spec(self, skia_dir, builder_name):
+ """Obtain the buildbot spec for the given builder."""
+ fake_spec = None
+ if self._test_data.enabled:
+ fake_spec = fake_specs.FAKE_SPECS[builder_name]
+ builder_spec = self.m.run.json_from_file(
+ skia_dir.join('tools', 'buildbot_spec.py'),
+ skia_dir,
+ builder_name,
+ fake_spec)
+ return builder_spec
+
+ def setup(self):
+ """Prepare the bot to run."""
+ # Setup dependencies.
+ self.m.vars.setup()
+
+ # Check out the Skia code.
+ self.checkout_steps()
+
+ # Obtain the spec for this builder from the Skia repo. Use it to set more
+ # properties.
+ builder_spec = self.get_builder_spec(self.m.vars.skia_dir,
+ self.m.vars.builder_name)
+
+ # Continue setting up vars with the builder_spec.
+ self.m.vars.update_with_builder_spec(builder_spec)
+
+
+ if not self.m.path.exists(self.m.vars.tmp_dir):
+ self.m.run.run_once(self.m.file.makedirs,
+ 'tmp_dir',
+ self.m.vars.tmp_dir,
+ infra_step=True)
+
+ self.m.flavor.setup()
+
+ def update_repo(self, parent_dir, repo):
+ """Update an existing repo. This is safe to call without gen_steps."""
+ repo_path = parent_dir.join(repo.name)
+ if self.m.path.exists(repo_path): # pragma: nocover
+ if self.m.platform.is_win:
+ git = 'git.bat'
+ else:
+ git = 'git'
+ self.m.step('git remote set-url',
+ cmd=[git, 'remote', 'set-url', 'origin', repo.url],
+ cwd=repo_path,
+ infra_step=True)
+ self.m.step('git fetch',
+ cmd=[git, 'fetch'],
+ cwd=repo_path,
+ infra_step=True)
+ self.m.step('git reset',
+ cmd=[git, 'reset', '--hard', repo.revision],
+ cwd=repo_path,
+ infra_step=True)
+ self.m.step('git clean',
+ cmd=[git, 'clean', '-d', '-f'],
+ cwd=repo_path,
+ infra_step=True)
+
+ def checkout_steps(self):
+ """Run the steps to obtain a checkout of Skia."""
+ cfg_kwargs = {}
+ if not self.m.vars.persistent_checkout:
+ # We should've obtained the Skia checkout through isolates, so we don't
+ # need to perform the checkout ourselves.
+ return
+
+ # Use a persistent gclient cache for Swarming.
+ cfg_kwargs['CACHE_DIR'] = self.m.vars.gclient_cache
+
+ # Create the checkout path if necessary.
+ if not self.m.path.exists(self.m.vars.checkout_root):
+ self.m.file.makedirs('checkout_path',
+ self.m.vars.checkout_root,
+ infra_step=True)
+
+ # Initial cleanup.
+ gclient_cfg = self.m.gclient.make_config(**cfg_kwargs)
+ skia = gclient_cfg.solutions.add()
+ skia.name = 'skia'
+ skia.managed = False
+ skia.url = 'https://skia.googlesource.com/skia.git'
+ skia.revision = self.m.properties.get('revision') or 'origin/master'
+ self.update_repo(self.m.vars.checkout_root, skia)
+
+ # TODO(rmistry): Remove the below block after there is a solution for
+ # crbug.com/616443
+ entries_file = self.m.vars.checkout_root.join('.gclient_entries')
+ if self.m.path.exists(entries_file):
+ self.m.file.remove('remove %s' % entries_file,
+ entries_file,
+ infra_step=True) # pragma: no cover
+
+ if self.m.vars.need_chromium_checkout:
+ chromium = gclient_cfg.solutions.add()
+ chromium.name = 'src'
+ chromium.managed = False
+ chromium.url = 'https://chromium.googlesource.com/chromium/src.git'
+ chromium.revision = 'origin/lkgr'
+ self.update_repo(self.m.vars.checkout_root, chromium)
+
+ if self.m.vars.need_pdfium_checkout:
+ pdfium = gclient_cfg.solutions.add()
+ pdfium.name = 'pdfium'
+ pdfium.managed = False
+ pdfium.url = 'https://pdfium.googlesource.com/pdfium.git'
+ pdfium.revision = 'origin/master'
+ self.update_repo(self.m.vars.checkout_root, pdfium)
+
+ # Run 'gclient sync'.
+ gclient_cfg.got_revision_mapping['skia'] = 'got_revision'
+ gclient_cfg.target_os.add('llvm')
+ checkout_kwargs = {}
+ checkout_kwargs['env'] = self.m.vars.default_env
+
+ # api.gclient.revert() assumes things about the layout of the code, so it
+ # fails for us. Run an appropriate revert sequence for trybots instead.
+ gclient_file = self.m.vars.checkout_root.join('.gclient')
+ if (self.m.tryserver.is_tryserver and
+ self.m.path.exists(gclient_file)): # pragma: no cover
+ # These steps taken from:
+ # https://chromium.googlesource.com/chromium/tools/build/+/
+ # 81a696760ab7c25f6606c54fc781b90b8af9fdd2/scripts/slave/
+ # gclient_safe_revert.py
+ if self.m.path.exists(entries_file):
+ self.m.gclient('recurse', [
+ 'recurse', '-i', 'sh', '-c',
+ 'if [ -e .git ]; then git remote update; fi'])
+ self.m.gclient(
+ 'revert',
+ ['revert', '-v', '-v', '-v', '--nohooks', '--upstream'],
+ cwd=self.m.vars.checkout_root)
+
+ update_step = self.m.gclient.checkout(gclient_config=gclient_cfg,
+ cwd=self.m.vars.checkout_root,
+ revert=False,
+ **checkout_kwargs)
+
+ self.m.vars.got_revision = (
+ update_step.presentation.properties['got_revision'])
+ self.m.tryserver.maybe_apply_issue()
+
+ if self.m.vars.need_chromium_checkout:
+ self.m.gclient.runhooks(cwd=self.m.vars.checkout_root,
+ env=self.m.vars.gclient_env)
+
+ def copy_dir(self, host_version, version_file, tmp_dir,
+ host_path, device_path, test_expected_version,
+ test_actual_version):
+ actual_version_file = self.m.path.join(tmp_dir, version_file)
+ # Copy to device.
+ device_version_file = self.m.flavor.device_path_join(
+ self.m.flavor.device_dirs.tmp_dir, version_file)
+ if str(actual_version_file) != str(device_version_file):
+ try:
+ device_version = (
+ self.m.flavor.read_file_on_device(device_version_file))
+ except self.m.step.StepFailure:
+ device_version = VERSION_NONE
+ if device_version != host_version:
+ self.m.flavor.remove_file_on_device(device_version_file)
+ self.m.flavor.create_clean_device_dir(device_path)
+ self.m.flavor.copy_directory_contents_to_device(
+ host_path, device_path)
+
+ # Copy the new version file.
+ self.m.flavor.copy_file_to_device(actual_version_file,
+ device_version_file)
+
+ def _copy_images(self):
+ """Download and copy test images if needed."""
+ version_file = self.m.vars.infrabots_dir.join(
+ 'assets', 'skimage', 'VERSION')
+ test_data = self.m.properties.get(
+ 'test_downloaded_sk_image_version', TEST_EXPECTED_SK_IMAGE_VERSION)
+ version = self.m.run.readfile(
+ version_file,
+ name='Get downloaded skimage VERSION',
+ test_data=test_data).rstrip()
+ self.m.run.writefile(
+ self.m.path.join(self.m.vars.tmp_dir, VERSION_FILE_SK_IMAGE),
+ version)
+ self.copy_dir(
+ version,
+ VERSION_FILE_SK_IMAGE,
+ self.m.vars.tmp_dir,
+ self.m.vars.images_dir,
+ self.m.flavor.device_dirs.images_dir,
+ test_expected_version=self.m.properties.get(
+ 'test_downloaded_sk_image_version',
+ TEST_EXPECTED_SK_IMAGE_VERSION),
+ test_actual_version=self.m.properties.get(
+ 'test_downloaded_sk_image_version',
+ TEST_EXPECTED_SK_IMAGE_VERSION))
+ return version
+
+ def _copy_skps(self):
+ """Download and copy the SKPs if needed."""
+ version_file = self.m.vars.infrabots_dir.join(
+ 'assets', 'skp', 'VERSION')
+ test_data = self.m.properties.get(
+ 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION)
+ version = self.m.run.readfile(
+ version_file,
+ name='Get downloaded SKP VERSION',
+ test_data=test_data).rstrip()
+ self.m.run.writefile(
+ self.m.path.join(self.m.vars.tmp_dir, VERSION_FILE_SKP),
+ version)
+ self.copy_dir(
+ version,
+ VERSION_FILE_SKP,
+ self.m.vars.tmp_dir,
+ self.m.vars.local_skp_dir,
+ self.m.flavor.device_dirs.skp_dir,
+ test_expected_version=self.m.properties.get(
+ 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION),
+ test_actual_version=self.m.properties.get(
+ 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION))
+ return version
+
+ def install(self):
+ """Copy the required executables and files to the device."""
+ # Run any device-specific installation.
+ self.m.flavor.install()
+
+ # TODO(borenet): Only copy files which have changed.
+ # Resources
+ self.m.flavor.copy_directory_contents_to_device(
+ self.m.vars.resource_dir,
+ self.m.flavor.device_dirs.resource_dir)
+
+ def test_steps(self):
+ """Run the DM test."""
+ self.m.run.run_once(self.install)
+ self.m.run.run_once(self._copy_skps)
+ self.m.run.run_once(self._copy_images)
+
+ use_hash_file = False
+ if self.m.vars.upload_dm_results:
+ # This must run before we write anything into
+ # self.m.flavor.device_dirs.dm_dir or we may end up deleting our
+ # output on machines where they're the same.
+ self.m.flavor.create_clean_host_dir(self.m.vars.dm_dir)
+ host_dm_dir = str(self.m.vars.dm_dir)
+ device_dm_dir = str(self.m.flavor.device_dirs.dm_dir)
+ if host_dm_dir != device_dm_dir:
+ self.m.flavor.create_clean_device_dir(device_dm_dir)
+
+ # Obtain the list of already-generated hashes.
+ hash_filename = 'uninteresting_hashes.txt'
+
+ # Ensure that the tmp_dir exists.
+ self.m.run.run_once(self.m.file.makedirs,
+ 'tmp_dir',
+ self.m.vars.tmp_dir,
+ infra_step=True)
+
+ host_hashes_file = self.m.vars.tmp_dir.join(hash_filename)
+ hashes_file = self.m.flavor.device_path_join(
+ self.m.flavor.device_dirs.tmp_dir, hash_filename)
+ self.m.run(
+ self.m.python.inline,
+ 'get uninteresting hashes',
+ program="""
+ import contextlib
+ import math
+ import socket
+ import sys
+ import time
+ import urllib2
+
+ HASHES_URL = 'https://gold.skia.org/_/hashes'
+ RETRIES = 5
+ TIMEOUT = 60
+ WAIT_BASE = 15
+
+ socket.setdefaulttimeout(TIMEOUT)
+ for retry in range(RETRIES):
+ try:
+ with contextlib.closing(
+ urllib2.urlopen(HASHES_URL, timeout=TIMEOUT)) as w:
+ hashes = w.read()
+ with open(sys.argv[1], 'w') as f:
+ f.write(hashes)
+ break
+ except Exception as e:
+ print 'Failed to get uninteresting hashes from %s:' % HASHES_URL
+ print e
+ if retry == RETRIES:
+ raise
+ waittime = WAIT_BASE * math.pow(2, retry)
+ print 'Retry in %d seconds.' % waittime
+ time.sleep(waittime)
+ """,
+ args=[host_hashes_file],
+ cwd=self.m.vars.skia_dir,
+ abort_on_failure=False,
+ fail_build_on_failure=False,
+ infra_step=True)
+
+ if self.m.path.exists(host_hashes_file):
+ self.m.flavor.copy_file_to_device(host_hashes_file, hashes_file)
+ use_hash_file = True
+
+ # Run DM.
+ properties = [
+ 'gitHash', self.m.vars.got_revision,
+ 'master', self.m.vars.master_name,
+ 'builder', self.m.vars.builder_name,
+ 'build_number', self.m.vars.build_number,
+ ]
+ if self.m.vars.is_trybot:
+ properties.extend([
+ 'issue', self.m.vars.issue,
+ 'patchset', self.m.vars.patchset,
+ ])
+
+ args = [
+ 'dm',
+ '--undefok', # This helps branches that may not know new flags.
+ '--resourcePath', self.m.flavor.device_dirs.resource_dir,
+ '--skps', self.m.flavor.device_dirs.skp_dir,
+ '--images', self.m.flavor.device_path_join(
+ self.m.flavor.device_dirs.images_dir, 'dm'),
+ '--colorImages', self.m.flavor.device_path_join(
+ self.m.flavor.device_dirs.images_dir, 'colorspace'),
+ '--nameByHash',
+ '--properties'
+ ] + properties
+
+ args.append('--key')
+ args.extend(self._KeyParams())
+ if use_hash_file:
+ args.extend(['--uninterestingHashesFile', hashes_file])
+ if self.m.vars.upload_dm_results:
+ args.extend(['--writePath', self.m.flavor.device_dirs.dm_dir])
+
+ skip_flag = None
+ if self.m.vars.builder_cfg.get('cpu_or_gpu') == 'CPU':
+ skip_flag = '--nogpu'
+ elif self.m.vars.builder_cfg.get('cpu_or_gpu') == 'GPU':
+ skip_flag = '--nocpu'
+ if skip_flag:
+ args.append(skip_flag)
+ args.extend(self.m.vars.dm_flags)
+
+ self.m.run(self.m.flavor.step, 'dm', cmd=args,
+ abort_on_failure=False,
+ env=self.m.vars.default_env)
+
+ if self.m.vars.upload_dm_results:
+ # Copy images and JSON to host machine if needed.
+ self.m.flavor.copy_directory_contents_to_host(
+ self.m.flavor.device_dirs.dm_dir, self.m.vars.dm_dir)
+
+ # See skia:2789.
+ if ('Valgrind' in self.m.vars.builder_name and
+ self.m.vars.builder_cfg.get('cpu_or_gpu') == 'GPU'):
+ abandonGpuContext = list(args)
+ abandonGpuContext.append('--abandonGpuContext')
+ self.m.run(self.m.flavor.step, 'dm --abandonGpuContext',
+ cmd=abandonGpuContext, abort_on_failure=False)
+ preAbandonGpuContext = list(args)
+ preAbandonGpuContext.append('--preAbandonGpuContext')
+ self.m.run(self.m.flavor.step, 'dm --preAbandonGpuContext',
+ cmd=preAbandonGpuContext, abort_on_failure=False,
+ env=self.m.vars.default_env)
+
+ def perf_steps(self):
+ """Run Skia benchmarks."""
+ self.m.run.run_once(self.install)
+ self.m.run.run_once(self._copy_skps)
+ self.m.run.run_once(self._copy_images)
+
+ if self.m.vars.upload_perf_results:
+ self.m.flavor.create_clean_device_dir(
+ self.m.flavor.device_dirs.perf_data_dir)
+
+ # Run nanobench.
+ properties = [
+ '--properties',
+ 'gitHash', self.m.vars.got_revision,
+ 'build_number', self.m.vars.build_number,
+ ]
+ if self.m.vars.is_trybot:
+ properties.extend([
+ 'issue', self.m.vars.issue,
+ 'patchset', self.m.vars.patchset,
+ ])
+
+ target = 'nanobench'
+ if 'VisualBench' in self.m.vars.builder_name:
+ target = 'visualbench'
+ args = [
+ target,
+ '--undefok', # This helps branches that may not know new flags.
+ '-i', self.m.flavor.device_dirs.resource_dir,
+ '--skps', self.m.flavor.device_dirs.skp_dir,
+ '--images', self.m.flavor.device_path_join(
+ self.m.flavor.device_dirs.images_dir, 'nanobench'),
+ ]
+
+ skip_flag = None
+ if self.m.vars.builder_cfg.get('cpu_or_gpu') == 'CPU':
+ skip_flag = '--nogpu'
+ elif self.m.vars.builder_cfg.get('cpu_or_gpu') == 'GPU':
+ skip_flag = '--nocpu'
+ if skip_flag:
+ args.append(skip_flag)
+ args.extend(self.m.vars.nanobench_flags)
+
+ if self.m.vars.upload_perf_results:
+ json_path = self.m.flavor.device_path_join(
+ self.m.flavor.device_dirs.perf_data_dir,
+ 'nanobench_%s.json' % self.m.vars.got_revision)
+ args.extend(['--outResultsFile', json_path])
+ args.extend(properties)
+
+ keys_blacklist = ['configuration', 'role', 'is_trybot']
+ args.append('--key')
+ for k in sorted(self.m.vars.builder_cfg.keys()):
+ if not k in keys_blacklist:
+ args.extend([k, self.m.vars.builder_cfg[k]])
+
+ self.m.run(self.m.flavor.step, target, cmd=args,
+ abort_on_failure=False,
+ env=self.m.vars.default_env)
+
+ # See skia:2789.
+ if ('Valgrind' in self.m.vars.builder_name and
+ self.m.vars.builder_cfg.get('cpu_or_gpu') == 'GPU'):
+ abandonGpuContext = list(args)
+ abandonGpuContext.extend(['--abandonGpuContext', '--nocpu'])
+ self.m.run(self.m.flavor.step,
+ '%s --abandonGpuContext' % target,
+ cmd=abandonGpuContext, abort_on_failure=False,
+ env=self.m.vars.default_env)
+
+ # Upload results.
+ if self.m.vars.upload_perf_results:
+ self.m.file.makedirs('perf_dir', self.m.vars.perf_data_dir)
+ self.m.flavor.copy_directory_contents_to_host(
+ self.m.flavor.device_dirs.perf_data_dir,
+ self.m.vars.perf_data_dir)
+
+ def cleanup_steps(self):
+ """Run any cleanup steps."""
+ self.m.flavor.cleanup_steps()
+
+ def _KeyParams(self):
+ """Build a unique key from the builder name (as a list).
+
+ E.g. arch x86 gpu GeForce320M mode MacMini4.1 os Mac10.6
+ """
+ # Don't bother to include role, which is always Test.
+ # TryBots are uploaded elsewhere so they can use the same key.
+ blacklist = ['role', 'is_trybot']
+
+ flat = []
+ for k in sorted(self.m.vars.builder_cfg.keys()):
+ if k not in blacklist:
+ flat.append(k)
+ flat.append(self.m.vars.builder_cfg[k])
+ return flat
diff --git a/infra/bots/recipe_modules/skia/fake_specs.py b/infra/bots/recipe_modules/core/fake_specs.py
index 4b922b52e7..4b922b52e7 100644
--- a/infra/bots/recipe_modules/skia/fake_specs.py
+++ b/infra/bots/recipe_modules/core/fake_specs.py
diff --git a/infra/bots/recipe_modules/skia/resources/binary_size_utils.py b/infra/bots/recipe_modules/core/resources/binary_size_utils.py
index c09a65dccd..c09a65dccd 100644
--- a/infra/bots/recipe_modules/skia/resources/binary_size_utils.py
+++ b/infra/bots/recipe_modules/core/resources/binary_size_utils.py
diff --git a/infra/bots/recipe_modules/skia/resources/elf_symbolizer.py b/infra/bots/recipe_modules/core/resources/elf_symbolizer.py
index de9c141219..de9c141219 100644
--- a/infra/bots/recipe_modules/skia/resources/elf_symbolizer.py
+++ b/infra/bots/recipe_modules/core/resources/elf_symbolizer.py
diff --git a/infra/bots/recipe_modules/skia/resources/generate_and_upload_doxygen.py b/infra/bots/recipe_modules/core/resources/generate_and_upload_doxygen.py
index f06ea96bc7..f06ea96bc7 100755
--- a/infra/bots/recipe_modules/skia/resources/generate_and_upload_doxygen.py
+++ b/infra/bots/recipe_modules/core/resources/generate_and_upload_doxygen.py
diff --git a/infra/bots/recipe_modules/skia/resources/run_binary_size_analysis.py b/infra/bots/recipe_modules/core/resources/run_binary_size_analysis.py
index 5cb24d967a..5cb24d967a 100755
--- a/infra/bots/recipe_modules/skia/resources/run_binary_size_analysis.py
+++ b/infra/bots/recipe_modules/core/resources/run_binary_size_analysis.py
diff --git a/infra/bots/recipe_modules/skia/resources/upload_bench_results.py b/infra/bots/recipe_modules/core/resources/upload_bench_results.py
index 25cfcc2631..25cfcc2631 100755
--- a/infra/bots/recipe_modules/skia/resources/upload_bench_results.py
+++ b/infra/bots/recipe_modules/core/resources/upload_bench_results.py
diff --git a/infra/bots/recipe_modules/skia/resources/upload_dm_results.py b/infra/bots/recipe_modules/core/resources/upload_dm_results.py
index 1bee64fb78..1bee64fb78 100755
--- a/infra/bots/recipe_modules/skia/resources/upload_dm_results.py
+++ b/infra/bots/recipe_modules/core/resources/upload_dm_results.py
diff --git a/infra/bots/recipe_modules/skia/ssh_devices.py b/infra/bots/recipe_modules/core/ssh_devices.py
index d8ce937572..d8ce937572 100755
--- a/infra/bots/recipe_modules/skia/ssh_devices.py
+++ b/infra/bots/recipe_modules/core/ssh_devices.py
diff --git a/infra/bots/recipe_modules/flavor/__init__.py b/infra/bots/recipe_modules/flavor/__init__.py
new file mode 100644
index 0000000000..ce433e047d
--- /dev/null
+++ b/infra/bots/recipe_modules/flavor/__init__.py
@@ -0,0 +1,15 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+DEPS = [
+ 'build/adb',
+ 'build/file',
+ 'recipe_engine/path',
+ 'recipe_engine/platform',
+ 'recipe_engine/python',
+ 'recipe_engine/raw_io',
+ 'recipe_engine/step',
+ 'run',
+ 'vars',
+]
diff --git a/infra/bots/recipe_modules/skia/android_flavor.py b/infra/bots/recipe_modules/flavor/android_flavor.py
index 442604e74e..35e0b35b0b 100644
--- a/infra/bots/recipe_modules/skia/android_flavor.py
+++ b/infra/bots/recipe_modules/flavor/android_flavor.py
@@ -6,7 +6,6 @@
# pylint: disable=W0201
-import android_devices
import copy
import default_flavor
@@ -21,9 +20,9 @@ class _ADBWrapper(object):
out on our bots. This wrapper ensures that we set a custom ADB path before
attempting to use the module.
"""
- def __init__(self, adb_api, path_to_adb, serial_args, android_flavor):
- self._adb = adb_api
- self._adb.set_adb_path(path_to_adb)
+ def __init__(self, m, path_to_adb, serial_args, android_flavor):
+ self.m = m
+ self.m.adb.set_adb_path(path_to_adb)
self._has_root = False # This is set in install().
self._serial_args = serial_args
self._wait_count = 0
@@ -35,8 +34,8 @@ class _ADBWrapper(object):
cmd = [
self._android_flavor.android_bin.join('adb_wait_for_device')
] + self._serial_args
- self._android_flavor._skia_api.run(
- self._android_flavor._skia_api.m.step,
+ self.m.run(
+ self.m.step,
name='wait for device (%d)' % self._wait_count,
cmd=cmd,
env=self._android_flavor._default_env,
@@ -45,8 +44,8 @@ class _ADBWrapper(object):
cmd = [
self._android_flavor.android_bin.join('adb_wait_for_charge'),
] + self._serial_args
- self._android_flavor._skia_api.run(
- self._android_flavor._skia_api.m.step,
+ self.m.run(
+ self.m.step,
name='wait for charge (%d)' % self._wait_count,
cmd=cmd,
env=self._android_flavor._default_env,
@@ -59,30 +58,30 @@ class _ADBWrapper(object):
def __call__(self, *args, **kwargs):
self.maybe_wait_for_device()
- return self._android_flavor._skia_api.run(self._adb, *args, **kwargs)
+ return self.m.run(self.m.adb, *args, **kwargs)
class AndroidFlavorUtils(default_flavor.DefaultFlavorUtils):
- def __init__(self, skia_api):
- super(AndroidFlavorUtils, self).__init__(skia_api)
- self.device = self._skia_api.builder_spec['device_cfg']
- self.android_bin = self._skia_api.skia_dir.join(
+ def __init__(self, m):
+ super(AndroidFlavorUtils, self).__init__(m)
+ self.device = self.m.vars.builder_spec['device_cfg']
+ self.android_bin = self.m.vars.skia_dir.join(
'platform_tools', 'android', 'bin')
- self._android_sdk_root = self._skia_api.slave_dir.join(
+ self._android_sdk_root = self.m.vars.slave_dir.join(
'android_sdk', 'android-sdk')
self.serial = None
self.serial_args = []
try:
- path_to_adb = self._skia_api.m.step(
+ path_to_adb = self.m.step(
'which adb',
cmd=['which', 'adb'],
- stdout=self._skia_api.m.raw_io.output(),
+ stdout=self.m.raw_io.output(),
infra_step=True).stdout.rstrip()
- except self._skia_api.m.step.StepFailure:
- path_to_adb = self._skia_api.m.path.join(self._android_sdk_root,
+ except self.m.step.StepFailure:
+ path_to_adb = self.m.path.join(self._android_sdk_root,
'platform-tools', 'adb')
self._adb = _ADBWrapper(
- self._skia_api.m.adb, path_to_adb, self.serial_args, self)
+ self.m, path_to_adb, self.serial_args, self)
self._default_env = {'ANDROID_SDK_ROOT': self._android_sdk_root,
'ANDROID_HOME': self._android_sdk_root,
'SKIA_ANDROID_VERBOSE_SETUP': 1}
@@ -95,30 +94,30 @@ class AndroidFlavorUtils(default_flavor.DefaultFlavorUtils):
'--logcat',
'-d', self.device,
] + self.serial_args + [
- '-t', self._skia_api.configuration,
+ '-t', self.m.vars.configuration,
]
env = dict(env or {})
env.update(self._default_env)
- return self._skia_api.run(self._skia_api.m.step, name=name, cmd=args + cmd,
+ return self.m.run(self.m.step, name=name, cmd=args + cmd,
env=env, **kwargs)
def compile(self, target):
"""Build the given target."""
env = dict(self._default_env)
- ccache = self._skia_api.ccache()
+ ccache = self.m.run.ccache()
if ccache:
env['ANDROID_MAKE_CCACHE'] = ccache
cmd = [self.android_bin.join('android_ninja'), target, '-d', self.device]
- if 'Clang' in self._skia_api.builder_name:
+ if 'Clang' in self.m.vars.builder_name:
cmd.append('--clang')
- if 'GCC' in self._skia_api.builder_name:
+ if 'GCC' in self.m.vars.builder_name:
cmd.append('--gcc')
- if 'Vulkan' in self._skia_api.builder_name:
+ if 'Vulkan' in self.m.vars.builder_name:
cmd.append('--vulkan')
- self._skia_api.run(self._skia_api.m.step, 'build %s' % target, cmd=cmd,
- env=env, cwd=self._skia_api.m.path['checkout'])
+ self.m.run(self.m.step, 'build %s' % target, cmd=cmd,
+ env=env, cwd=self.m.path['checkout'])
def device_path_join(self, *args):
"""Like os.path.join(), but for paths on a connected Android device."""
@@ -128,17 +127,17 @@ class AndroidFlavorUtils(default_flavor.DefaultFlavorUtils):
"""Like os.path.exists(), but for paths on a connected device."""
exists_str = 'FILE_EXISTS'
return exists_str in self._adb(
- name='exists %s' % self._skia_api.m.path.basename(path),
+ name='exists %s' % self.m.path.basename(path),
serial=self.serial,
cmd=['shell', 'if', '[', '-e', path, '];',
'then', 'echo', exists_str + ';', 'fi'],
- stdout=self._skia_api.m.raw_io.output(),
+ stdout=self.m.raw_io.output(),
infra_step=True
).stdout
def _remove_device_dir(self, path):
"""Remove the directory on the device."""
- self._adb(name='rmdir %s' % self._skia_api.m.path.basename(path),
+ self._adb(name='rmdir %s' % self.m.path.basename(path),
serial=self.serial,
cmd=['shell', 'rm', '-r', path],
infra_step=True)
@@ -148,16 +147,16 @@ class AndroidFlavorUtils(default_flavor.DefaultFlavorUtils):
def _create_device_dir(self, path):
"""Create the directory on the device."""
- self._adb(name='mkdir %s' % self._skia_api.m.path.basename(path),
+ self._adb(name='mkdir %s' % self.m.path.basename(path),
serial=self.serial,
cmd=['shell', 'mkdir', '-p', path],
infra_step=True)
def copy_directory_contents_to_device(self, host_dir, device_dir):
"""Like shutil.copytree(), but for copying to a connected device."""
- self._skia_api.run(
- self._skia_api.m.step,
- name='push %s' % self._skia_api.m.path.basename(host_dir),
+ self.m.run(
+ self.m.step,
+ name='push %s' % self.m.path.basename(host_dir),
cmd=[
self.android_bin.join('adb_push_if_needed'), '--verbose',
] + self.serial_args + [
@@ -168,9 +167,9 @@ class AndroidFlavorUtils(default_flavor.DefaultFlavorUtils):
def copy_directory_contents_to_host(self, device_dir, host_dir):
"""Like shutil.copytree(), but for copying from a connected device."""
- self._skia_api.run(
- self._skia_api.m.step,
- name='pull %s' % self._skia_api.m.path.basename(device_dir),
+ self.m.run(
+ self.m.step,
+ name='pull %s' % self.m.path.basename(device_dir),
cmd=[
self.android_bin.join('adb_pull_if_needed'), '--verbose',
] + self.serial_args + [
@@ -181,7 +180,7 @@ class AndroidFlavorUtils(default_flavor.DefaultFlavorUtils):
def copy_file_to_device(self, host_path, device_path):
"""Like shutil.copyfile, but for copying to a connected device."""
- self._adb(name='push %s' % self._skia_api.m.path.basename(host_path),
+ self._adb(name='push %s' % self.m.path.basename(host_path),
serial=self.serial,
cmd=['push', host_path, device_path],
infra_step=True)
@@ -194,7 +193,7 @@ class AndroidFlavorUtils(default_flavor.DefaultFlavorUtils):
def has_root(self):
"""Determine if we have root access on this device."""
# Special case: GalaxyS3 hangs on `adb root`. Don't bother.
- if 'GalaxyS3' in self._skia_api.builder_name:
+ if 'GalaxyS3' in self.m.vars.builder_name:
return False
# Determine if we have root access.
@@ -203,16 +202,16 @@ class AndroidFlavorUtils(default_flavor.DefaultFlavorUtils):
output = self._adb(name='adb root',
serial=self.serial,
cmd=['root'],
- stdout=self._skia_api.m.raw_io.output(),
+ stdout=self.m.raw_io.output(),
infra_step=True).stdout.rstrip()
if ('restarting adbd as root' in output or
'adbd is already running as root' in output):
has_root = True
- except self._skia_api.m.step.StepFailure: # pragma: nocover
+ except self.m.step.StepFailure: # pragma: nocover
pass
# Wait for the device to reconnect.
- self._skia_api.run(
- self._skia_api.m.step,
+ self.m.run(
+ self.m.step,
name='wait',
cmd=['sleep', '10'],
infra_step=True)
@@ -221,8 +220,24 @@ class AndroidFlavorUtils(default_flavor.DefaultFlavorUtils):
def install(self):
"""Run device-specific installation steps."""
+ device_scratch_dir = self._adb(
+ name='get EXTERNAL_STORAGE dir',
+ serial=self.serial,
+ cmd=['shell', 'echo', '$EXTERNAL_STORAGE'],
+ stdout=self.m.raw_io.output(),
+ infra_step=True,
+ ).stdout.rstrip()
+ prefix = self.device_path_join(device_scratch_dir, 'skiabot', 'skia_')
+ self.device_dirs = default_flavor.DeviceDirs(
+ dm_dir=prefix + 'dm',
+ perf_data_dir=prefix + 'perf',
+ resource_dir=prefix + 'resources',
+ images_dir=prefix + 'images',
+ skp_dir=prefix + 'skp/skps',
+ tmp_dir=prefix + 'tmp_dir')
+
self._has_root = self.has_root()
- self._skia_api.run(self._skia_api.m.step,
+ self.m.run(self.m.step,
name='kill skia',
cmd=[
self.android_bin.join('android_kill_skia'),
@@ -257,7 +272,7 @@ class AndroidFlavorUtils(default_flavor.DefaultFlavorUtils):
def cleanup_steps(self):
"""Run any device-specific cleanup steps."""
- if self._skia_api.do_test_steps or self._skia_api.do_perf_steps:
+ if self.m.vars.do_test_steps or self.m.vars.do_perf_steps:
self._adb(name='final battery stats',
serial=self.serial,
cmd=['shell', 'dumpsys', 'batteryproperties'],
@@ -266,8 +281,8 @@ class AndroidFlavorUtils(default_flavor.DefaultFlavorUtils):
serial=self.serial,
cmd=['reboot'],
infra_step=True)
- self._skia_api.run(
- self._skia_api.m.step,
+ self.m.run(
+ self.m.step,
name='wait for reboot',
cmd=['sleep', '10'],
infra_step=True)
@@ -281,36 +296,17 @@ class AndroidFlavorUtils(default_flavor.DefaultFlavorUtils):
def read_file_on_device(self, path, *args, **kwargs):
"""Read the given file."""
- return self._adb(name='read %s' % self._skia_api.m.path.basename(path),
+ return self._adb(name='read %s' % self.m.path.basename(path),
serial=self.serial,
cmd=['shell', 'cat', path],
- stdout=self._skia_api.m.raw_io.output(),
+ stdout=self.m.raw_io.output(),
infra_step=True).stdout.rstrip()
def remove_file_on_device(self, path, *args, **kwargs):
"""Delete the given file."""
- return self._adb(name='rm %s' % self._skia_api.m.path.basename(path),
+ return self._adb(name='rm %s' % self.m.path.basename(path),
serial=self.serial,
cmd=['shell', 'rm', '-f', path],
infra_step=True,
*args,
**kwargs)
-
- def get_device_dirs(self):
- """ Set the directories which will be used by the build steps."""
- device_scratch_dir = self._adb(
- name='get EXTERNAL_STORAGE dir',
- serial=self.serial,
- cmd=['shell', 'echo', '$EXTERNAL_STORAGE'],
- stdout=self._skia_api.m.raw_io.output(),
- infra_step=True,
- ).stdout.rstrip()
- prefix = self.device_path_join(device_scratch_dir, 'skiabot', 'skia_')
- return default_flavor.DeviceDirs(
- dm_dir=prefix + 'dm',
- perf_data_dir=prefix + 'perf',
- resource_dir=prefix + 'resources',
- images_dir=prefix + 'images',
- skp_dir=prefix + 'skp/skps',
- tmp_dir=prefix + 'tmp_dir')
-
diff --git a/infra/bots/recipe_modules/flavor/api.py b/infra/bots/recipe_modules/flavor/api.py
new file mode 100644
index 0000000000..fbfa9ba1cd
--- /dev/null
+++ b/infra/bots/recipe_modules/flavor/api.py
@@ -0,0 +1,126 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+# pylint: disable=W0201
+
+
+from recipe_engine import recipe_api
+
+from . import android_flavor
+from . import cmake_flavor
+from . import coverage_flavor
+from . import default_flavor
+from . import gn_flavor
+from . import ios_flavor
+from . import pdfium_flavor
+from . import valgrind_flavor
+from . import xsan_flavor
+
+
+def is_android(builder_cfg):
+ """Determine whether the given builder is an Android builder."""
+ return ('Android' in builder_cfg.get('extra_config', '') or
+ builder_cfg.get('os') == 'Android')
+
+
+def is_cmake(builder_cfg):
+ return 'CMake' in builder_cfg.get('extra_config', '')
+
+
+def is_gn(builder_cfg):
+ return 'GN' == builder_cfg.get('extra_config', '')
+
+
+def is_ios(builder_cfg):
+ return ('iOS' in builder_cfg.get('extra_config', '') or
+ builder_cfg.get('os') == 'iOS')
+
+
+def is_pdfium(builder_cfg):
+ return 'PDFium' in builder_cfg.get('extra_config', '')
+
+
+def is_valgrind(builder_cfg):
+ return 'Valgrind' in builder_cfg.get('extra_config', '')
+
+
+def is_xsan(builder_cfg):
+ return ('ASAN' in builder_cfg.get('extra_config', '') or
+ 'MSAN' in builder_cfg.get('extra_config', '') or
+ 'TSAN' in builder_cfg.get('extra_config', ''))
+
+
+class SkiaFlavorApi(recipe_api.RecipeApi):
+ def get_flavor(self, builder_cfg):
+ """Return a flavor utils object specific to the given builder."""
+ if is_android(builder_cfg):
+ return android_flavor.AndroidFlavorUtils(self.m)
+ elif is_cmake(builder_cfg):
+ return cmake_flavor.CMakeFlavorUtils(self.m)
+ elif is_gn(builder_cfg):
+ return gn_flavor.GNFlavorUtils(self.m)
+ elif is_ios(builder_cfg):
+ return ios_flavor.iOSFlavorUtils(self.m)
+ elif is_pdfium(builder_cfg):
+ return pdfium_flavor.PDFiumFlavorUtils(self.m)
+ elif is_valgrind(builder_cfg):
+ return valgrind_flavor.ValgrindFlavorUtils(self.m)
+ elif is_xsan(builder_cfg):
+ return xsan_flavor.XSanFlavorUtils(self.m)
+ elif builder_cfg.get('configuration') == 'Coverage':
+ return coverage_flavor.CoverageFlavorUtils(self.m)
+ else:
+ return default_flavor.DefaultFlavorUtils(self.m)
+
+ def setup(self):
+ self._f = self.get_flavor(self.m.vars.builder_cfg)
+
+ def step(self, name, cmd, **kwargs):
+ return self._f.step(name, cmd, **kwargs)
+
+ def compile(self, target):
+ return self._f.compile(target)
+
+ def copy_extra_build_products(self, swarming_out_dir):
+ return self._f.copy_extra_build_products(swarming_out_dir)
+
+ @property
+ def out_dir(self):
+ return self._f.out_dir
+
+ def device_path_join(self, *args):
+ return self._f.device_path_join(*args)
+
+ def device_path_exists(self, path):
+ return self._f.device_path_exists(path) # pragma: no cover
+
+ def copy_directory_contents_to_device(self, host_dir, device_dir):
+ return self._f.copy_directory_contents_to_device(host_dir, device_dir)
+
+ def copy_directory_contents_to_host(self, device_dir, host_dir):
+ return self._f.copy_directory_contents_to_host(device_dir, host_dir)
+
+ def copy_file_to_device(self, host_path, device_path):
+ return self._f.copy_file_to_device(host_path, device_path)
+
+ def create_clean_host_dir(self, path):
+ return self._f.create_clean_host_dir(path)
+
+ def create_clean_device_dir(self, path):
+ return self._f.create_clean_device_dir(path)
+
+ def read_file_on_device(self, path):
+ return self._f.read_file_on_device(path)
+
+ def remove_file_on_device(self, path):
+ return self._f.remove_file_on_device(path)
+
+ def install(self):
+ rv = self._f.install()
+ self.device_dirs = self._f.device_dirs
+ return rv
+
+ def cleanup_steps(self):
+ return self._f.cleanup_steps()
diff --git a/infra/bots/recipe_modules/skia/cmake_flavor.py b/infra/bots/recipe_modules/flavor/cmake_flavor.py
index c88d369451..8b254eceb7 100644
--- a/infra/bots/recipe_modules/skia/cmake_flavor.py
+++ b/infra/bots/recipe_modules/flavor/cmake_flavor.py
@@ -9,6 +9,6 @@ import default_flavor
class CMakeFlavorUtils(default_flavor.DefaultFlavorUtils):
def compile(self, target):
"""Build Skia with CMake. Ignores `target`."""
- cmake_build = self._skia_api.skia_dir.join('cmake', 'cmake_build')
- self._skia_api.run(self._skia_api.m.step, 'cmake_build', cmd=[cmake_build],
- cwd=self._skia_api.m.path['checkout'])
+ cmake_build = self.m.vars.skia_dir.join('cmake', 'cmake_build')
+ self.m.run(self.m.step, 'cmake_build', cmd=[cmake_build],
+ cwd=self.m.path['checkout'])
diff --git a/infra/bots/recipe_modules/skia/coverage_flavor.py b/infra/bots/recipe_modules/flavor/coverage_flavor.py
index 5fd853363d..a3bb5828ce 100644
--- a/infra/bots/recipe_modules/skia/coverage_flavor.py
+++ b/infra/bots/recipe_modules/flavor/coverage_flavor.py
@@ -6,7 +6,6 @@
import datetime
import default_flavor
import posixpath
-import ssh_devices
"""Utils for running coverage tests."""
@@ -17,12 +16,12 @@ class CoverageFlavorUtils(default_flavor.DefaultFlavorUtils):
def step(self, name, cmd, **kwargs):
"""Run the given step through coverage."""
compile_target = 'dm'
- build_cmd = [self._skia_api.skia_dir.join('tools', 'llvm_coverage_build'),
+ build_cmd = [self.m.vars.skia_dir.join('tools', 'llvm_coverage_build'),
compile_target]
- self._skia_api.run(self._skia_api.m.step,
- 'build %s' % compile_target,
- cmd=build_cmd,
- cwd=self._skia_api.m.path['checkout'])
+ self.m.run(self.m.step,
+ 'build %s' % compile_target,
+ cmd=build_cmd,
+ cwd=self.m.path['checkout'])
# Slice out the 'key' and 'properties' arguments to be reused.
key = []
@@ -38,39 +37,39 @@ class CoverageFlavorUtils(default_flavor.DefaultFlavorUtils):
if current is not None:
current.append(cmd[i])
- results_dir = self._skia_api.skia_out.join('coverage_results')
+ results_dir = self.m.vars.skia_out.join('coverage_results')
self.create_clean_host_dir(results_dir)
# Run DM under coverage.
- report_file_basename = '%s.cov' % self._skia_api.got_revision
+ report_file_basename = '%s.cov' % self.m.vars.got_revision
report_file = results_dir.join(report_file_basename)
args = [
'python',
- self._skia_api.skia_dir.join('tools', 'llvm_coverage_run.py'),
+ self.m.vars.skia_dir.join('tools', 'llvm_coverage_run.py'),
] + cmd + ['--outResultsFile', report_file]
- self._skia_api.run(self._skia_api.m.step, name=name, cmd=args,
- cwd=self._skia_api.m.path['checkout'], **kwargs)
+ self.m.run(self.m.step, name=name, cmd=args,
+ cwd=self.m.path['checkout'], **kwargs)
# Generate nanobench-style JSON output from the coverage report.
nanobench_json = results_dir.join('nanobench_%s.json' % (
- self._skia_api.got_revision))
+ self.m.vars.got_revision))
line_by_line_basename = ('coverage_by_line_%s.json' % (
- self._skia_api.got_revision))
+ self.m.vars.got_revision))
line_by_line = results_dir.join(line_by_line_basename)
args = [
'python',
- self._skia_api.skia_dir.join('tools', 'parse_llvm_coverage.py'),
+ self.m.vars.skia_dir.join('tools', 'parse_llvm_coverage.py'),
'--report', report_file, '--nanobench', nanobench_json,
'--linebyline', line_by_line]
args.extend(key)
args.extend(properties)
- self._skia_api.run(
- self._skia_api.m.step,
+ self.m.run(
+ self.m.step,
'Generate Coverage Data',
- cmd=args, cwd=self._skia_api.m.path['checkout'])
+ cmd=args, cwd=self.m.path['checkout'])
# Copy files from results_dir into swarming_out_dir.
- for r in self._skia_api.m.file.listdir('results_dir', results_dir):
- self._skia_api.m.file.copy(
+ for r in self.m.file.listdir('results_dir', results_dir):
+ self.m.file.copy(
'Copy to swarming out', results_dir.join(r),
- self._skia_api.swarming_out_dir)
+ self.m.vars.swarming_out_dir)
diff --git a/infra/bots/recipe_modules/skia/default_flavor.py b/infra/bots/recipe_modules/flavor/default_flavor.py
index 9c0734047e..4cdbaaa671 100644
--- a/infra/bots/recipe_modules/skia/default_flavor.py
+++ b/infra/bots/recipe_modules/flavor/default_flavor.py
@@ -3,6 +3,9 @@
# found in the LICENSE file.
+# pylint: disable=W0201
+
+
"""Default flavor utils class, used for desktop builders."""
@@ -66,28 +69,28 @@ class DefaultFlavorUtils(object):
copying files between the host and Android device, as well as the
'step' function, so that commands may be run through ADB.
"""
- def __init__(self, skia_api, *args, **kwargs):
- self._skia_api = skia_api
+ def __init__(self, m):
+ self.m = m
self._chrome_path = None
- self._win_toolchain_dir = self._skia_api.slave_dir.join(WIN_TOOLCHAIN_DIR)
- win_toolchain_asset_path = self._skia_api.infrabots_dir.join(
+ self._win_toolchain_dir = self.m.vars.slave_dir.join(WIN_TOOLCHAIN_DIR)
+ win_toolchain_asset_path = self.m.vars.infrabots_dir.join(
'assets', 'win_toolchain', 'VERSION')
- if not self._skia_api.m.path.exists(win_toolchain_asset_path):
- self._win_toolchain_dir = self._skia_api.slave_dir
+ if not self.m.path.exists(win_toolchain_asset_path):
+ self._win_toolchain_dir = self.m.vars.slave_dir
def step(self, name, cmd, **kwargs):
"""Wrapper for the Step API; runs a step as appropriate for this flavor."""
- path_to_app = self._skia_api.skia_out.join(
- self._skia_api.configuration, cmd[0])
- if (self._skia_api.m.platform.is_linux and
- 'x86_64' in self._skia_api.builder_name and
- not 'TSAN' in self._skia_api.builder_name):
+ path_to_app = self.m.vars.skia_out.join(
+ self.m.vars.configuration, cmd[0])
+ if (self.m.platform.is_linux and
+ 'x86_64' in self.m.vars.builder_name and
+ not 'TSAN' in self.m.vars.builder_name):
new_cmd = ['catchsegv', path_to_app]
else:
new_cmd = [path_to_app]
new_cmd.extend(cmd[1:])
- return self._skia_api.run(self._skia_api.m.step,
+ return self.m.run(self.m.step,
name, cmd=new_cmd, **kwargs)
@property
@@ -97,11 +100,11 @@ class DefaultFlavorUtils(object):
def bootstrap_win_toolchain(self):
"""Run bootstrapping script for the Windows toolchain."""
- bootstrap_script = self._skia_api.infrabots_dir.join(
+ bootstrap_script = self.m.vars.infrabots_dir.join(
'bootstrap_win_toolchain_json.py')
win_toolchain_json = self._win_toolchain_dir.join(
'src', 'build', 'win_toolchain.json')
- self._skia_api.m.python(
+ self.m.python(
'bootstrap win toolchain',
script=bootstrap_script,
args=['--win_toolchain_json', win_toolchain_json,
@@ -110,13 +113,13 @@ class DefaultFlavorUtils(object):
def build_command_buffer(self):
"""Build command_buffer."""
- script = self._skia_api.skia_dir.join('tools', 'build_command_buffer.py')
- self._skia_api.run(
- self._skia_api.m.python, 'build command_buffer',
+ script = self.m.vars.skia_dir.join('tools', 'build_command_buffer.py')
+ self.m.run(
+ self.m.python, 'build command_buffer',
script=script,
- args=['--chrome-dir', self._skia_api.checkout_root,
+ args=['--chrome-dir', self.m.vars.checkout_root,
'--output-dir', self.out_dir,
- '--chrome-build-type', self._skia_api.configuration,
+ '--chrome-build-type', self.m.vars.configuration,
'--no-sync'])
def compile(self, target):
@@ -124,56 +127,52 @@ class DefaultFlavorUtils(object):
# The CHROME_PATH environment variable is needed for builders that use
# toolchains downloaded by Chrome.
env = {'CHROME_PATH': self.chrome_path}
- if self._skia_api.m.platform.is_win:
+ if self.m.platform.is_win:
make_cmd = ['python', 'make.py']
- self._skia_api._run_once(self.bootstrap_win_toolchain)
- if 'Vulkan' in self._skia_api.builder_name:
- env['VK_SDK_PATH'] = self._skia_api.slave_dir.join('win_vulkan_sdk')
- if not self._skia_api.m.path.exists(self._skia_api.infrabots_dir.join(
- 'assets', 'win_vulkan_sdk', 'VERSION')):
- # TODO(kjlubick): Remove this once enough time has passed.
- env['VK_SDK_PATH'] = self._skia_api.slave_dir.join('vulkan_1.0.17.0')
+ self.m.run.run_once(self.bootstrap_win_toolchain)
+ if 'Vulkan' in self.m.vars.builder_name:
+ env['VK_SDK_PATH'] = self.m.vars.slave_dir.join('win_vulkan_sdk')
else:
make_cmd = ['make']
cmd = make_cmd + [target]
try:
- self._skia_api.run(self._skia_api.m.step, 'build %s' % target, cmd=cmd,
- env=env, cwd=self._skia_api.m.path['checkout'])
- except self._skia_api.m.step.StepFailure:
- if self._skia_api.m.platform.is_win:
+ self.m.run(self.m.step, 'build %s' % target, cmd=cmd,
+ env=env, cwd=self.m.path['checkout'])
+ except self.m.step.StepFailure:
+ if self.m.platform.is_win:
# The linker occasionally crashes on Windows. Try again.
- self._skia_api.run(self._skia_api.m.step, 'build %s' % target, cmd=cmd,
- env=env, cwd=self._skia_api.m.path['checkout'])
+ self.m.run(self.m.step, 'build %s' % target, cmd=cmd,
+ env=env, cwd=self.m.path['checkout'])
else:
raise
- if 'CommandBuffer' in self._skia_api.builder_name:
- self._skia_api._run_once(self.build_command_buffer)
+ if 'CommandBuffer' in self.m.vars.builder_name:
+ self.m.run.run_once(self.build_command_buffer)
def copy_extra_build_products(self, swarming_out_dir):
"""Copy extra build products to specified directory.
Copy flavor-specific build products to swarming_out_dir for use in test and
perf steps."""
- if ("Win" in self._skia_api.builder_name and
- "Vulkan" in self._skia_api.builder_name):
+ if ("Win" in self.m.vars.builder_name and
+ "Vulkan" in self.m.vars.builder_name):
# This copies vulkan-1.dll that has been bundled into win_vulkan_sdk
# since version 2 See skia/api BUILD_PRODUCTS_ISOLATE_WHITELIST
- self._skia_api.copy_build_products(
- self._skia_api.m.path['slave_build'].join('win_vulkan_sdk'),
- swarming_out_dir)
+ self.m.run.copy_build_products(
+ self.m.path['slave_build'].join('win_vulkan_sdk'),
+ swarming_out_dir)
@property
def out_dir(self):
"""Flavor-specific out directory."""
- return self._skia_api.skia_out.join(self._skia_api.configuration)
+ return self.m.vars.skia_out.join(self.m.vars.configuration)
def device_path_join(self, *args):
"""Like os.path.join(), but for paths on a connected device."""
- return self._skia_api.m.path.join(*args)
+ return self.m.path.join(*args)
def device_path_exists(self, path): # pragma: no cover
"""Like os.path.exists(), but for paths on a connected device."""
- return self._skia_api.m.path.exists(path, infra_step=True)
+ return self.m.path.exists(path, infra_step=True)
def copy_directory_contents_to_device(self, host_dir, device_dir):
"""Like shutil.copytree(), but for copying to a connected device."""
@@ -211,32 +210,23 @@ class DefaultFlavorUtils(object):
def create_clean_host_dir(self, path):
"""Convenience function for creating a clean directory."""
- self._skia_api.rmtree(path)
- self._skia_api.m.file.makedirs(
- self._skia_api.m.path.basename(path), path, infra_step=True)
+ self.m.run.rmtree(path)
+ self.m.file.makedirs(
+ self.m.path.basename(path), path, infra_step=True)
def install(self):
"""Run device-specific installation steps."""
- pass
+ self.device_dirs = DeviceDirs(
+ dm_dir=self.m.vars.dm_dir,
+ perf_data_dir=self.m.vars.perf_data_dir,
+ resource_dir=self.m.vars.resource_dir,
+ images_dir=self.m.vars.images_dir,
+ skp_dir=self.m.vars.local_skp_dir,
+ tmp_dir=self.m.vars.tmp_dir)
def cleanup_steps(self):
"""Run any device-specific cleanup steps."""
pass
- def get_device_dirs(self):
- """ Set the directories which will be used by the build steps.
-
- These refer to paths on the same device where the test executables will
- run, for example, for Android bots these are paths on the Android device
- itself. For desktop bots, these are just local paths.
- """
- return DeviceDirs(
- dm_dir=self._skia_api.dm_dir,
- perf_data_dir=self._skia_api.perf_data_dir,
- resource_dir=self._skia_api.resource_dir,
- images_dir=self._skia_api.images_dir,
- skp_dir=self._skia_api.local_skp_dir,
- tmp_dir=self._skia_api.tmp_dir)
-
def __repr__(self):
return '<%s object>' % self.__class__.__name__ # pragma: no cover
diff --git a/infra/bots/recipe_modules/skia/gn_flavor.py b/infra/bots/recipe_modules/flavor/gn_flavor.py
index 7b60b9a1a0..ea5bbb6934 100644
--- a/infra/bots/recipe_modules/skia/gn_flavor.py
+++ b/infra/bots/recipe_modules/flavor/gn_flavor.py
@@ -9,17 +9,18 @@ class GNFlavorUtils(default_flavor.DefaultFlavorUtils):
def compile(self, target):
"""Build Skia with GN."""
# Get the gn executable.
- fetch_gn = self._skia_api.skia_dir.join('bin', 'fetch-gn')
- self._skia_api.run(self._skia_api.m.step, 'fetch-gn', cmd=[fetch_gn],
- cwd=self._skia_api.skia_dir)
+ fetch_gn = self.m.vars.skia_dir.join('bin', 'fetch-gn')
+ self.m.run(self.m.step, 'fetch-gn',
+ cmd=[fetch_gn],
+ cwd=self.m.vars.skia_dir)
is_debug = 'is_debug=true'
- if self._skia_api.configuration != 'Debug':
- is_debug = 'is_debug=false'
+ if self.m.vars.configuration != 'Debug':
+ is_debug = 'is_debug=false'
gn_args = [is_debug]
- is_clang = 'Clang' in self._skia_api.builder_name
- is_gcc = 'GCC' in self._skia_api.builder_name
+ is_clang = 'Clang' in self.m.vars.builder_name
+ is_gcc = 'GCC' in self.m.vars.builder_name
cc, cxx = 'cc', 'c++'
if is_clang:
@@ -27,7 +28,7 @@ class GNFlavorUtils(default_flavor.DefaultFlavorUtils):
elif is_gcc:
cc, cxx = 'gcc', 'g++'
- ccache = self._skia_api.ccache()
+ ccache = self.m.run.ccache()
if ccache:
cc, cxx = '%s %s' % (ccache, cc), '%s %s' % (ccache, cxx)
if is_clang:
@@ -39,14 +40,14 @@ class GNFlavorUtils(default_flavor.DefaultFlavorUtils):
# Run gn gen.
gn_exe = 'gn'
- if self._skia_api.m.platform.is_win:
- gn_exe = 'gn.exe'
+ if self.m.platform.is_win:
+ gn_exe = 'gn.exe'
gn_gen = [gn_exe, 'gen', self.out_dir, '--args=%s' % ' '.join(gn_args)]
- self._skia_api.run(self._skia_api.m.step, 'gn_gen', cmd=gn_gen,
- cwd=self._skia_api.skia_dir)
+ self.m.run(self.m.step, 'gn_gen', cmd=gn_gen,
+ cwd=self.m.vars.skia_dir)
# Run ninja.
ninja_cmd = ['ninja', '-C', self.out_dir]
- self._skia_api.run(self._skia_api.m.step, 'compile %s' % target,
- cmd=ninja_cmd,
- cwd=self._skia_api.skia_dir)
+ self.m.run(self.m.step, 'compile %s' % target,
+ cmd=ninja_cmd,
+ cwd=self.m.vars.skia_dir)
diff --git a/infra/bots/recipe_modules/skia/ios_flavor.py b/infra/bots/recipe_modules/flavor/ios_flavor.py
index d83cf213de..d0d60f4284 100644
--- a/infra/bots/recipe_modules/skia/ios_flavor.py
+++ b/infra/bots/recipe_modules/flavor/ios_flavor.py
@@ -3,6 +3,9 @@
# found in the LICENSE file.
+# pylint: disable=W0201
+
+
import copy
import default_flavor
@@ -11,12 +14,12 @@ import default_flavor
class iOSFlavorUtils(default_flavor.DefaultFlavorUtils):
- def __init__(self, skia_api):
- super(iOSFlavorUtils, self).__init__(skia_api)
+ def __init__(self, m):
+ super(iOSFlavorUtils, self).__init__(m)
self.default_env = {}
self.default_env['XCODEBUILD'] = (
- self._skia_api.slave_dir.join('xcodebuild'))
- self.ios_bin = self._skia_api.skia_dir.join(
+ self.m.vars.slave_dir.join('xcodebuild'))
+ self.ios_bin = self.m.vars.skia_dir.join(
'platform_tools', 'ios', 'bin')
def step(self, name, cmd, **kwargs):
@@ -29,15 +32,14 @@ class iOSFlavorUtils(default_flavor.DefaultFlavorUtils):
# one is being run.
cmd = ["--" + c if c in ['dm', 'nanobench'] else c
for c in cmd]
- return self._skia_api.run(self._skia_api.m.step, name=name, cmd=args + cmd,
- env=env,
- **kwargs)
+ return self.m.run(self.m.step, name=name, cmd=args + cmd,
+ env=env, **kwargs)
def compile(self, target):
"""Build the given target."""
cmd = [self.ios_bin.join('ios_ninja')]
- self._skia_api.run(self._skia_api.m.step, 'build iOSShell', cmd=cmd,
- cwd=self._skia_api.m.path['checkout'])
+ self.m.run(self.m.step, 'build iOSShell', cmd=cmd,
+ cwd=self.m.path['checkout'])
def device_path_join(self, *args):
"""Like os.path.join(), but for paths on a connected iOS device."""
@@ -45,8 +47,8 @@ class iOSFlavorUtils(default_flavor.DefaultFlavorUtils):
def device_path_exists(self, path):
"""Like os.path.exists(), but for paths on a connected device."""
- return self._skia_api.run(
- self._skia_api.m.step,
+ return self.m.run(
+ self.m.step,
'exists %s' % path,
cmd=[self.ios_bin.join('ios_path_exists'), path],
env=self.default_env,
@@ -55,8 +57,8 @@ class iOSFlavorUtils(default_flavor.DefaultFlavorUtils):
def _remove_device_dir(self, path):
"""Remove the directory on the device."""
- return self._skia_api.run(
- self._skia_api.m.step,
+ return self.m.run(
+ self.m.step,
'rmdir %s' % path,
cmd=[self.ios_bin.join('ios_rm'), path],
env=self.default_env,
@@ -65,8 +67,8 @@ class iOSFlavorUtils(default_flavor.DefaultFlavorUtils):
def _create_device_dir(self, path):
"""Create the directory on the device."""
- return self._skia_api.run(
- self._skia_api.m.step,
+ return self.m.run(
+ self.m.step,
'mkdir %s' % path,
cmd=[self.ios_bin.join('ios_mkdir'), path],
env=self.default_env,
@@ -75,10 +77,10 @@ class iOSFlavorUtils(default_flavor.DefaultFlavorUtils):
def copy_directory_contents_to_device(self, host_dir, device_dir):
"""Like shutil.copytree(), but for copying to a connected device."""
- return self._skia_api.run(
- self._skia_api.m.step,
- name='push %s to %s' % (self._skia_api.m.path.basename(host_dir),
- self._skia_api.m.path.basename(device_dir)),
+ return self.m.run(
+ self.m.step,
+ name='push %s to %s' % (self.m.path.basename(host_dir),
+ self.m.path.basename(device_dir)),
cmd=[self.ios_bin.join('ios_push_if_needed'),
host_dir, device_dir],
env=self.default_env,
@@ -87,9 +89,9 @@ class iOSFlavorUtils(default_flavor.DefaultFlavorUtils):
def copy_directory_contents_to_host(self, device_dir, host_dir):
"""Like shutil.copytree(), but for copying from a connected device."""
- self._skia_api.run(
- self._skia_api.m.step,
- name='pull %s' % self._skia_api.m.path.basename(device_dir),
+ self.m.run(
+ self.m.step,
+ name='pull %s' % self.m.path.basename(device_dir),
cmd=[self.ios_bin.join('ios_pull_if_needed'),
device_dir, host_dir],
env=self.default_env,
@@ -98,8 +100,8 @@ class iOSFlavorUtils(default_flavor.DefaultFlavorUtils):
def copy_file_to_device(self, host_path, device_path):
"""Like shutil.copyfile, but for copying to a connected device."""
- self._skia_api.run(
- self._skia_api.m.step,
+ self.m.run(
+ self.m.step,
name='push %s' % host_path,
cmd=[self.ios_bin.join('ios_push_file'), host_path, device_path],
env=self.default_env,
@@ -107,10 +109,10 @@ class iOSFlavorUtils(default_flavor.DefaultFlavorUtils):
) # pragma: no cover
def copy_extra_build_products(self, swarming_out_dir):
- xcode_dir = self._skia_api.m.path.join(
- 'xcodebuild', '%s-iphoneos' % self._skia_api.configuration)
- self._skia_api.copy_build_products(
- self._skia_api.skia_dir.join(xcode_dir),
+ xcode_dir = self.m.path.join(
+ 'xcodebuild', '%s-iphoneos' % self.m.vars.configuration)
+ self.m.run.copy_build_products(
+ self.m.vars.skia_dir.join(xcode_dir),
swarming_out_dir.join(xcode_dir))
def create_clean_device_dir(self, path):
@@ -120,8 +122,17 @@ class iOSFlavorUtils(default_flavor.DefaultFlavorUtils):
def install(self):
"""Run device-specific installation steps."""
- self._skia_api.run(
- self._skia_api.m.step,
+ prefix = self.device_path_join('skiabot', 'skia_')
+ self.device_dirs = default_flavor.DeviceDirs(
+ dm_dir=prefix + 'dm',
+ perf_data_dir=prefix + 'perf',
+ resource_dir=prefix + 'resources',
+ images_dir=prefix + 'images',
+ skp_dir=prefix + 'skp/skps',
+ tmp_dir=prefix + 'tmp_dir')
+
+ self.m.run(
+ self.m.step,
name='install iOSShell',
cmd=[self.ios_bin.join('ios_install')],
env=self.default_env,
@@ -129,15 +140,15 @@ class iOSFlavorUtils(default_flavor.DefaultFlavorUtils):
def cleanup_steps(self):
"""Run any device-specific cleanup steps."""
- if self._skia_api.do_test_steps or self._skia_api.do_perf_steps:
- self._skia_api.run(
- self._skia_api.m.step,
+ if self.m.vars.do_test_steps or self.m.vars.do_perf_steps:
+ self.m.run(
+ self.m.step,
name='reboot',
cmd=[self.ios_bin.join('ios_restart')],
env=self.default_env,
infra_step=True)
- self._skia_api.run(
- self._skia_api.m.step,
+ self.m.run(
+ self.m.step,
name='wait for reboot',
cmd=['sleep', '20'],
env=self.default_env,
@@ -145,32 +156,21 @@ class iOSFlavorUtils(default_flavor.DefaultFlavorUtils):
def read_file_on_device(self, path):
"""Read the given file."""
- ret = self._skia_api.run(
- self._skia_api.m.step,
- name='read %s' % self._skia_api.m.path.basename(path),
+ ret = self.m.run(
+ self.m.step,
+ name='read %s' % self.m.path.basename(path),
cmd=[self.ios_bin.join('ios_cat_file'), path],
env=self.default_env,
- stdout=self._skia_api.m.raw_io.output(),
+ stdout=self.m.raw_io.output(),
infra_step=True)
return ret.stdout.rstrip() if ret.stdout else ret.stdout
def remove_file_on_device(self, path):
"""Remove the file on the device."""
- return self._skia_api.run(
- self._skia_api.m.step,
+ return self.m.run(
+ self.m.step,
'rm %s' % path,
cmd=[self.ios_bin.join('ios_rm'), path],
env=self.default_env,
infra_step=True,
)
-
- def get_device_dirs(self):
- """ Set the directories which will be used by the build steps."""
- prefix = self.device_path_join('skiabot', 'skia_')
- return default_flavor.DeviceDirs(
- dm_dir=prefix + 'dm',
- perf_data_dir=prefix + 'perf',
- resource_dir=prefix + 'resources',
- images_dir=prefix + 'images',
- skp_dir=prefix + 'skp/skps',
- tmp_dir=prefix + 'tmp_dir')
diff --git a/infra/bots/recipe_modules/skia/pdfium_flavor.py b/infra/bots/recipe_modules/flavor/pdfium_flavor.py
index 782daf0487..ce11aeac2e 100644
--- a/infra/bots/recipe_modules/skia/pdfium_flavor.py
+++ b/infra/bots/recipe_modules/flavor/pdfium_flavor.py
@@ -14,11 +14,11 @@ class PDFiumFlavorUtils(default_flavor.DefaultFlavorUtils):
def compile(self, target):
"""Build PDFium with Skia."""
- pdfium_dir = self._skia_api.checkout_root.join('pdfium')
+ pdfium_dir = self.m.vars.checkout_root.join('pdfium')
# Runhook to generate the gn binary in buildtools.
- self._skia_api.run(
- self._skia_api.m.step,
+ self.m.run(
+ self.m.step,
'runhook',
cmd=['gclient', 'runhook', 'gn_linux64'],
cwd=pdfium_dir)
@@ -26,19 +26,19 @@ class PDFiumFlavorUtils(default_flavor.DefaultFlavorUtils):
# Setup gn args.
gn_args = ['pdf_use_skia=true', 'pdf_is_standalone=true',
'clang_use_chrome_plugins=false']
- self._skia_api.run(
- self._skia_api.m.step,
+ self.m.run(
+ self.m.step,
'gn_gen',
cmd=['gn', 'gen', 'out/skia', '--args=%s' % ' '.join(gn_args)],
cwd=pdfium_dir,
env={'CHROMIUM_BUILDTOOLS_PATH': str(pdfium_dir.join('buildtools'))})
# Modify DEPS file to contain the current Skia revision.
- skia_revision = self._skia_api.got_revision
+ skia_revision = self.m.vars.got_revision
deps_file = pdfium_dir.join('DEPS')
test_data = "'skia_revision': 'abc'"
- original_contents = self._skia_api.m.file.read(
+ original_contents = self.m.file.read(
'read PDFium DEPS', deps_file, test_data=test_data, infra_step=True)
deps_skia_regexp = re.compile(
@@ -46,19 +46,19 @@ class PDFiumFlavorUtils(default_flavor.DefaultFlavorUtils):
re.MULTILINE)
patched_contents = re.sub(deps_skia_regexp, str(skia_revision),
original_contents)
- self._skia_api.m.file.write('write PDFium DEPs', deps_file,
+ self.m.file.write('write PDFium DEPs', deps_file,
patched_contents, infra_step=True)
# gclient sync after updating DEPS.
- self._skia_api.run(
- self._skia_api.m.step,
+ self.m.run(
+ self.m.step,
'sync_pdfium',
cmd=['gclient', 'sync'],
cwd=pdfium_dir)
# Build PDFium.
- self._skia_api.run(
- self._skia_api.m.step,
+ self.m.run(
+ self.m.step,
'build_pdfium',
cmd=['ninja', '-C', 'out/skia', '-j100'],
cwd=pdfium_dir)
diff --git a/infra/bots/recipe_modules/skia/valgrind_flavor.py b/infra/bots/recipe_modules/flavor/valgrind_flavor.py
index 2c00566436..2dea3e7ab4 100644
--- a/infra/bots/recipe_modules/skia/valgrind_flavor.py
+++ b/infra/bots/recipe_modules/flavor/valgrind_flavor.py
@@ -10,9 +10,9 @@ import default_flavor
class ValgrindFlavorUtils(default_flavor.DefaultFlavorUtils):
- def __init__(self, *args, **kwargs):
- super(ValgrindFlavorUtils, self).__init__(*args, **kwargs)
- self._suppressions_file = self._skia_api.skia_dir.join(
+ def __init__(self, m):
+ super(ValgrindFlavorUtils, self).__init__(m)
+ self._suppressions_file = self.m.vars.skia_dir.join(
'tools', 'valgrind.supp')
def step(self, name, cmd, **kwargs):
@@ -22,6 +22,6 @@ class ValgrindFlavorUtils(default_flavor.DefaultFlavorUtils):
path_to_app = self.out_dir.join(cmd[0])
new_cmd.append(path_to_app)
new_cmd.extend(cmd[1:])
- return self._skia_api.run(self._skia_api.m.step, name, cmd=new_cmd,
- **kwargs)
+ return self.m.run(self.m.step, name, cmd=new_cmd,
+ **kwargs)
diff --git a/infra/bots/recipe_modules/skia/xsan_flavor.py b/infra/bots/recipe_modules/flavor/xsan_flavor.py
index ac4cac9b04..89169b77c1 100644
--- a/infra/bots/recipe_modules/skia/xsan_flavor.py
+++ b/infra/bots/recipe_modules/flavor/xsan_flavor.py
@@ -10,8 +10,8 @@ import default_flavor
class XSanFlavorUtils(default_flavor.DefaultFlavorUtils):
- def __init__(self, *args, **kwargs):
- super(XSanFlavorUtils, self).__init__(*args, **kwargs)
+ def __init__(self, m):
+ super(XSanFlavorUtils, self).__init__(m)
self._sanitizer = {
# We'd love to just pass 'address,undefined' and get all the checks, but
# we're not anywhere close to being able to do that. Instead we start
@@ -24,37 +24,37 @@ class XSanFlavorUtils(default_flavor.DefaultFlavorUtils):
# MSAN and TSAN can't run together with ASAN, so they're their own bots.
'MSAN': 'memory',
'TSAN': 'thread',
- }[self._skia_api.builder_cfg['extra_config'].replace('Swarming', '')]
+ }[self.m.vars.builder_cfg['extra_config'].replace('Swarming', '')]
def compile(self, target):
- cmd = [self._skia_api.skia_dir.join('tools', 'xsan_build'),
+ cmd = [self.m.vars.skia_dir.join('tools', 'xsan_build'),
self._sanitizer, target]
- self._skia_api.run(self._skia_api.m.step, 'build %s' % target, cmd=cmd,
- cwd=self._skia_api.skia_dir)
+ self.m.run(self.m.step, 'build %s' % target, cmd=cmd,
+ cwd=self.m.vars.skia_dir)
def copy_extra_build_products(self, swarming_out_dir):
# Include msan_out if MSAN.
- if 'MSAN' in self._skia_api.builder_cfg['extra_config']:
- msan_out = self._skia_api.m.path.join(
+ if 'MSAN' in self.m.vars.builder_cfg['extra_config']:
+ msan_out = self.m.path.join(
'third_party', 'externals', 'llvm', 'msan_out')
- self._skia_api.m.file.copytree(
+ self.m.file.copytree(
'copy msan_out',
- self._skia_api.skia_dir.join(msan_out),
+ self.m.vars.skia_dir.join(msan_out),
swarming_out_dir.join(msan_out),
symlinks=True)
# Include llvm_symbolizer from the Chromium DEPS so that suppressions work
# by symbol name.
# TODO(benjaminwagner): Figure out how to add this to Skia DEPS for
# target_os 'llvm'.
- self._skia_api.m.file.copytree(
+ self.m.file.copytree(
'copy llvm-build',
- self._skia_api.checkout_root.join('src', 'third_party', 'llvm-build'),
+ self.m.vars.checkout_root.join('src', 'third_party', 'llvm-build'),
swarming_out_dir.join('llvm-build'),
symlinks=True)
def step(self, name, cmd, env=None, **kwargs):
"""Wrapper for the Step API; runs a step as appropriate for this flavor."""
- skia_dir = self._skia_api.skia_dir
+ skia_dir = self.m.vars.skia_dir
lsan_suppressions = skia_dir.join('tools', 'lsan.supp')
tsan_suppressions = skia_dir.join('tools', 'tsan.supp')
ubsan_suppressions = skia_dir.join('tools', 'ubsan.supp')
@@ -64,13 +64,12 @@ class XSanFlavorUtils(default_flavor.DefaultFlavorUtils):
lsan_suppressions)
env['TSAN_OPTIONS'] = 'suppressions=%s' % tsan_suppressions
env['UBSAN_OPTIONS'] = 'suppressions=%s' % ubsan_suppressions
- self._skia_api.default_env['PATH'] = '%%(PATH)s:%s' % (
- self._skia_api.slave_dir.join('llvm-build', 'Release+Asserts', 'bin'))
- env['LD_LIBRARY_PATH'] = self._skia_api.slave_dir.join(
+ self.m.vars.default_env['PATH'] = '%%(PATH)s:%s' % (
+ self.m.vars.slave_dir.join('llvm-build', 'Release+Asserts', 'bin'))
+ env['LD_LIBRARY_PATH'] = self.m.vars.slave_dir.join(
'third_party', 'externals', 'llvm', 'msan_out', 'lib')
path_to_app = self.out_dir.join(cmd[0])
new_cmd = [path_to_app]
new_cmd.extend(cmd[1:])
- return self._skia_api.run(self._skia_api.m.step, name, cmd=new_cmd, env=env,
- **kwargs)
+ return self.m.run(self.m.step, name, cmd=new_cmd, env=env, **kwargs)
diff --git a/infra/bots/recipe_modules/run/__init__.py b/infra/bots/recipe_modules/run/__init__.py
new file mode 100644
index 0000000000..cb4c1ae0ee
--- /dev/null
+++ b/infra/bots/recipe_modules/run/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+DEPS = [
+ 'build/file',
+ 'recipe_engine/json',
+ 'recipe_engine/path',
+ 'recipe_engine/platform',
+ 'recipe_engine/python',
+ 'recipe_engine/step',
+ 'vars',
+]
diff --git a/infra/bots/recipe_modules/run/api.py b/infra/bots/recipe_modules/run/api.py
new file mode 100644
index 0000000000..1784254d6e
--- /dev/null
+++ b/infra/bots/recipe_modules/run/api.py
@@ -0,0 +1,152 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+# pylint: disable=W0201
+
+
+from recipe_engine import recipe_api
+
+
+BUILD_PRODUCTS_ISOLATE_WHITELIST = [
+ 'dm',
+ 'dm.exe',
+ 'nanobench',
+ 'nanobench.exe',
+ '*.so',
+ '*.dll',
+ '*.dylib',
+ 'skia_launcher',
+ 'lib/*.so',
+ 'iOSShell.app',
+ 'iOSShell.ipa',
+ 'visualbench',
+ 'visualbench.exe',
+ 'vulkan-1.dll',
+]
+
+
+class SkiaStepApi(recipe_api.RecipeApi):
+
+ def __init__(self, *args, **kwargs):
+ """Initialize the recipe module."""
+ super(SkiaStepApi, self).__init__(*args, **kwargs)
+
+ self._already_ran = {}
+ self._ccache = None
+ self._checked_for_ccache = False
+ self._failed = []
+
+ def check_failure(self):
+ """Raise an exception if any step failed."""
+ if self._failed:
+ raise self.m.step.StepFailure('Failed build steps: %s' %
+ ', '.join([f.name for f in self._failed]))
+
+ def run_once(self, fn, *args, **kwargs):
+ if not fn.__name__ in self._already_ran:
+ self._already_ran[fn.__name__] = fn(*args, **kwargs)
+ return self._already_ran[fn.__name__]
+
+ def readfile(self, filename, *args, **kwargs):
+ """Convenience function for reading files."""
+ name = kwargs.pop('name') or 'read %s' % self.m.path.basename(filename)
+ return self.m.file.read(name, filename, infra_step=True, *args, **kwargs)
+
+ def writefile(self, filename, contents):
+ """Convenience function for writing files."""
+ return self.m.file.write('write %s' % self.m.path.basename(filename),
+ filename, contents, infra_step=True)
+
+ def rmtree(self, path):
+ """Wrapper around api.file.rmtree with environment fix."""
+ env = {}
+ env['PYTHONPATH'] = str(self.m.path['checkout'].join(
+ 'infra', 'bots', '.recipe_deps', 'build', 'scripts'))
+ self.m.file.rmtree(self.m.path.basename(path),
+ path,
+ env=env,
+ infra_step=True)
+
+ def __call__(self, steptype, name, abort_on_failure=True,
+ fail_build_on_failure=True, env=None, **kwargs):
+ """Run a step. If it fails, keep going but mark the build status failed."""
+ env = dict(env or {})
+ env.update(self.m.vars.default_env)
+ try:
+ return steptype(name=name, env=env, **kwargs)
+ except self.m.step.StepFailure as e:
+ if abort_on_failure:
+ raise # pragma: no cover
+ if fail_build_on_failure:
+ self._failed.append(e)
+
+ def json_from_file(self, filename, cwd, builder_name, test_data):
+ """Execute the given script to obtain JSON data."""
+ return self.m.python(
+ 'exec %s' % self.m.path.basename(filename),
+ filename,
+ args=[self.m.json.output(), builder_name],
+ step_test_data=lambda: self.m.json.test_api.output(test_data),
+ cwd=cwd,
+ infra_step=True).json.output
+
+ def copy_build_products(self, src, dst):
+ """Copy whitelisted build products from src to dst."""
+ self.m.python.inline(
+ name='copy build products',
+ program='''import errno
+import glob
+import os
+import shutil
+import sys
+
+src = sys.argv[1]
+dst = sys.argv[2]
+build_products_whitelist = %s
+
+try:
+ os.makedirs(dst)
+except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+for pattern in build_products_whitelist:
+ path = os.path.join(src, pattern)
+ for f in glob.glob(path):
+ dst_path = os.path.join(dst, os.path.relpath(f, src))
+ if not os.path.isdir(os.path.dirname(dst_path)):
+ os.makedirs(os.path.dirname(dst_path))
+ print 'Copying build product %%s to %%s' %% (f, dst_path)
+ shutil.move(f, dst_path)
+''' % str(BUILD_PRODUCTS_ISOLATE_WHITELIST),
+ args=[src, dst],
+ infra_step=True)
+
+ def ccache(self):
+ if not self._checked_for_ccache:
+ self._checked_for_ccache = True
+ if not self.m.platform.is_win:
+ result = self(
+ self.m.python.inline,
+ name='has ccache?',
+ program='''import json
+import subprocess
+import sys
+
+ccache = None
+try:
+ ccache = subprocess.check_output(['which', 'ccache']).rstrip()
+except:
+ pass
+print json.dumps({'ccache': ccache})
+''',
+ stdout=self.m.json.output(),
+ infra_step=True,
+ abort_on_failure=False,
+ fail_build_on_failure=False)
+ if result and result.stdout and result.stdout.get('ccache'):
+ self._ccache = result.stdout['ccache']
+
+ return self._ccache
diff --git a/infra/bots/recipe_modules/skia/api.py b/infra/bots/recipe_modules/skia/api.py
deleted file mode 100644
index 96467eaaff..0000000000
--- a/infra/bots/recipe_modules/skia/api.py
+++ /dev/null
@@ -1,842 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-# pylint: disable=W0201
-
-
-import json
-import os
-import re
-import sys
-
-from recipe_engine import recipe_api
-from recipe_engine import config_types
-
-from . import android_flavor
-from . import cmake_flavor
-from . import coverage_flavor
-from . import default_flavor
-from . import fake_specs
-from . import gn_flavor
-from . import ios_flavor
-from . import pdfium_flavor
-from . import valgrind_flavor
-from . import xsan_flavor
-
-
-BOTO_CHROMIUM_SKIA_GM = 'chromium-skia-gm.boto'
-
-GS_SUBDIR_TMPL_SK_IMAGE = 'skimage/v%s'
-GS_SUBDIR_TMPL_SKP = 'playback_%s/skps'
-
-TEST_EXPECTED_SKP_VERSION = '42'
-TEST_EXPECTED_SK_IMAGE_VERSION = '42'
-
-VERSION_FILE_SK_IMAGE = 'SK_IMAGE_VERSION'
-VERSION_FILE_SKP = 'SKP_VERSION'
-
-VERSION_NONE = -1
-
-BUILD_PRODUCTS_ISOLATE_WHITELIST = [
- 'dm',
- 'dm.exe',
- 'nanobench',
- 'nanobench.exe',
- '*.so',
- '*.dll',
- '*.dylib',
- 'skia_launcher',
- 'lib/*.so',
- 'iOSShell.app',
- 'iOSShell.ipa',
- 'visualbench',
- 'visualbench.exe',
- 'vulkan-1.dll',
-]
-
-
-def is_android(builder_cfg):
- """Determine whether the given builder is an Android builder."""
- return ('Android' in builder_cfg.get('extra_config', '') or
- builder_cfg.get('os') == 'Android')
-
-
-def is_cmake(builder_cfg):
- return 'CMake' in builder_cfg.get('extra_config', '')
-
-
-def is_gn(builder_cfg):
- return 'GN' == builder_cfg.get('extra_config', '')
-
-
-def is_ios(builder_cfg):
- return ('iOS' in builder_cfg.get('extra_config', '') or
- builder_cfg.get('os') == 'iOS')
-
-
-def is_pdfium(builder_cfg):
- return 'PDFium' in builder_cfg.get('extra_config', '')
-
-
-def is_valgrind(builder_cfg):
- return 'Valgrind' in builder_cfg.get('extra_config', '')
-
-
-def is_xsan(builder_cfg):
- return ('ASAN' in builder_cfg.get('extra_config', '') or
- 'MSAN' in builder_cfg.get('extra_config', '') or
- 'TSAN' in builder_cfg.get('extra_config', ''))
-
-
-class SkiaApi(recipe_api.RecipeApi):
-
- def get_flavor(self, builder_cfg):
- """Return a flavor utils object specific to the given builder."""
- if is_android(builder_cfg):
- return android_flavor.AndroidFlavorUtils(self)
- elif is_cmake(builder_cfg):
- return cmake_flavor.CMakeFlavorUtils(self)
- elif is_gn(builder_cfg):
- return gn_flavor.GNFlavorUtils(self)
- elif is_ios(builder_cfg):
- return ios_flavor.iOSFlavorUtils(self)
- elif is_pdfium(builder_cfg):
- return pdfium_flavor.PDFiumFlavorUtils(self)
- elif is_valgrind(builder_cfg):
- return valgrind_flavor.ValgrindFlavorUtils(self)
- elif is_xsan(builder_cfg):
- return xsan_flavor.XSanFlavorUtils(self)
- elif builder_cfg.get('configuration') == 'Coverage':
- return coverage_flavor.CoverageFlavorUtils(self)
- else:
- return default_flavor.DefaultFlavorUtils(self)
-
- @property
- def home_dir(self):
- """Find the home directory."""
- home_dir = os.path.expanduser('~')
- if self._test_data.enabled:
- home_dir = '[HOME]'
- return home_dir
-
- def gsutil_env(self, boto_file):
- """Environment variables for gsutil."""
- boto_path = None
- if boto_file:
- boto_path = self.m.path.join(self.home_dir, boto_file)
- return {'AWS_CREDENTIAL_FILE': boto_path,
- 'BOTO_CONFIG': boto_path}
-
- def get_builder_spec(self, skia_dir, builder_name):
- """Obtain the buildbot spec for the given builder."""
- fake_spec = None
- if self._test_data.enabled:
- fake_spec = fake_specs.FAKE_SPECS[builder_name]
- builder_spec = self.json_from_file(
- skia_dir.join('tools', 'buildbot_spec.py'),
- skia_dir,
- builder_name,
- fake_spec)
- return builder_spec
-
- def make_path(self, *path):
- """Return a Path object for the given path."""
- key = 'custom_%s' % '_'.join(path)
- self.m.path.c.base_paths[key] = tuple(path)
- return self.m.path[key]
-
- def setup(self):
- """Prepare the bot to run."""
- # Setup
- self.failed = []
-
- self.builder_name = self.m.properties['buildername']
- self.master_name = self.m.properties['mastername']
- self.slave_name = self.m.properties['slavename']
-
- self.slave_dir = self.m.path['slave_build']
- self.checkout_root = self.slave_dir
- self.default_env = {}
- self.gclient_env = {}
- self.is_compile_bot = self.builder_name.startswith('Build-')
-
- self.default_env['CHROME_HEADLESS'] = '1'
- # The 'depot_tools' directory comes from recipe DEPS and isn't provided by
- # default. We have to set it manually.
- self.m.path.c.base_paths['depot_tools'] = (
- self.m.path.c.base_paths['slave_build'] +
- ('skia', 'infra', 'bots', '.recipe_deps', 'depot_tools'))
- if 'Win' in self.builder_name:
- self.m.path.c.base_paths['depot_tools'] = (
- 'c:\\', 'Users', 'chrome-bot', 'depot_tools')
-
- # Compile bots keep a persistent checkout.
- self.persistent_checkout = (self.is_compile_bot or
- 'RecreateSKPs' in self.builder_name)
- if self.persistent_checkout:
- if 'Win' in self.builder_name:
- self.checkout_root = self.make_path('C:\\', 'b', 'work')
- self.gclient_cache = self.make_path('C:\\', 'b', 'cache')
- else:
- self.checkout_root = self.make_path('/', 'b', 'work')
- self.gclient_cache = self.make_path('/', 'b', 'cache')
-
- self.skia_dir = self.checkout_root.join('skia')
- self.infrabots_dir = self.skia_dir.join('infra', 'bots')
-
- # Some bots also require a checkout of chromium.
- self._need_chromium_checkout = 'CommandBuffer' in self.builder_name
- if 'CommandBuffer' in self.builder_name:
- self.gclient_env['GYP_CHROMIUM_NO_ACTION'] = '0'
- if ((self.is_compile_bot and
- 'SAN' in self.builder_name) or
- 'RecreateSKPs' in self.builder_name):
- self._need_chromium_checkout = True
- if 'RecreateSKPs' in self.builder_name:
- self.gclient_env['CPPFLAGS'] = (
- '-DSK_ALLOW_CROSSPROCESS_PICTUREIMAGEFILTERS=1')
-
- # Some bots also require a checkout of PDFium.
- self._need_pdfium_checkout = 'PDFium' in self.builder_name
-
- # Check out the Skia code.
- self.checkout_steps()
-
- # Obtain the spec for this builder from the Skia repo. Use it to set more
- # properties.
- self.builder_spec = self.get_builder_spec(self.skia_dir, self.builder_name)
-
- self.builder_cfg = self.builder_spec['builder_cfg']
- self.role = self.builder_cfg['role']
-
- # Set some important variables.
- self.resource_dir = self.skia_dir.join('resources')
- self.images_dir = self.slave_dir.join('skimage')
- if not self.m.path.exists(self.infrabots_dir.join(
- 'assets', 'skimage', 'VERSION')):
- # TODO(borenet): Remove this once enough time has passed.
- self.images_dir = self.slave_dir.join('images')
- self.skia_out = self.skia_dir.join('out', self.builder_name)
- self.swarming_out_dir = self.make_path(self.m.properties['swarm_out_dir'])
- self.local_skp_dir = self.slave_dir.join('skp')
- if not self.m.path.exists(self.infrabots_dir.join(
- 'assets', 'skp', 'VERSION')):
- # TODO(borenet): Remove this once enough time has passed.
- self.local_skp_dir = self.slave_dir.join('skps')
- if not self.is_compile_bot:
- self.skia_out = self.slave_dir.join('out')
- self.tmp_dir = self.m.path['slave_build'].join('tmp')
- if not self.m.path.exists(self.tmp_dir):
- self._run_once(self.m.file.makedirs,
- 'tmp_dir',
- self.tmp_dir,
- infra_step=True)
-
- self.gsutil_env_chromium_skia_gm = self.gsutil_env(BOTO_CHROMIUM_SKIA_GM)
-
- self.device_dirs = None
- self._ccache = None
- self._checked_for_ccache = False
- self.configuration = self.builder_spec['configuration']
- self.default_env.update({'SKIA_OUT': self.skia_out,
- 'BUILDTYPE': self.configuration})
- self.default_env.update(self.builder_spec['env'])
- self.build_targets = [str(t) for t in self.builder_spec['build_targets']]
- self.do_compile_steps = self.builder_spec.get('do_compile_steps', True)
- self.do_test_steps = self.builder_spec['do_test_steps']
- self.do_perf_steps = self.builder_spec['do_perf_steps']
- self.is_trybot = self.builder_cfg['is_trybot']
- self.upload_dm_results = self.builder_spec['upload_dm_results']
- self.upload_perf_results = self.builder_spec['upload_perf_results']
- self.dm_dir = self.m.path.join(
- self.swarming_out_dir, 'dm')
- self.perf_data_dir = self.m.path.join(self.swarming_out_dir,
- 'perfdata', self.builder_name, 'data')
- self.dm_flags = self.builder_spec['dm_flags']
- self.nanobench_flags = self.builder_spec['nanobench_flags']
-
- self.flavor = self.get_flavor(self.builder_cfg)
-
- def check_failure(self):
- """Raise an exception if any step failed."""
- if self.failed:
- raise self.m.step.StepFailure('Failed build steps: %s' %
- ', '.join([f.name for f in self.failed]))
-
- def _run_once(self, fn, *args, **kwargs):
- if not hasattr(self, '_already_ran'):
- self._already_ran = {}
- if not fn.__name__ in self._already_ran:
- self._already_ran[fn.__name__] = fn(*args, **kwargs)
- return self._already_ran[fn.__name__]
-
- def update_repo(self, parent_dir, repo):
- """Update an existing repo. This is safe to call without gen_steps."""
- repo_path = parent_dir.join(repo.name)
- if self.m.path.exists(repo_path): # pragma: nocover
- if self.m.platform.is_win:
- git = 'git.bat'
- else:
- git = 'git'
- self.m.step('git remote set-url',
- cmd=[git, 'remote', 'set-url', 'origin', repo.url],
- cwd=repo_path,
- infra_step=True)
- self.m.step('git fetch',
- cmd=[git, 'fetch'],
- cwd=repo_path,
- infra_step=True)
- self.m.step('git reset',
- cmd=[git, 'reset', '--hard', repo.revision],
- cwd=repo_path,
- infra_step=True)
- self.m.step('git clean',
- cmd=[git, 'clean', '-d', '-f'],
- cwd=repo_path,
- infra_step=True)
-
- def checkout_steps(self):
- """Run the steps to obtain a checkout of Skia."""
- cfg_kwargs = {}
- if not self.persistent_checkout:
- # We should've obtained the Skia checkout through isolates, so we don't
- # need to perform the checkout ourselves.
- self.m.path['checkout'] = self.skia_dir
- self.got_revision = self.m.properties['revision']
- return
-
- # Use a persistent gclient cache for Swarming.
- cfg_kwargs['CACHE_DIR'] = self.gclient_cache
-
- # Create the checkout path if necessary.
- if not self.m.path.exists(self.checkout_root):
- self.m.file.makedirs('checkout_path', self.checkout_root, infra_step=True)
-
- # Initial cleanup.
- gclient_cfg = self.m.gclient.make_config(**cfg_kwargs)
- skia = gclient_cfg.solutions.add()
- skia.name = 'skia'
- skia.managed = False
- skia.url = 'https://skia.googlesource.com/skia.git'
- skia.revision = self.m.properties.get('revision') or 'origin/master'
- self.update_repo(self.checkout_root, skia)
-
- # TODO(rmistry): Remove the below block after there is a solution for
- # crbug.com/616443
- entries_file = self.checkout_root.join('.gclient_entries')
- if self.m.path.exists(entries_file):
- self.m.file.remove('remove %s' % entries_file,
- entries_file,
- infra_step=True) # pragma: no cover
-
- if self._need_chromium_checkout:
- chromium = gclient_cfg.solutions.add()
- chromium.name = 'src'
- chromium.managed = False
- chromium.url = 'https://chromium.googlesource.com/chromium/src.git'
- chromium.revision = 'origin/lkgr'
- self.update_repo(self.checkout_root, chromium)
-
- if self._need_pdfium_checkout:
- pdfium = gclient_cfg.solutions.add()
- pdfium.name = 'pdfium'
- pdfium.managed = False
- pdfium.url = 'https://pdfium.googlesource.com/pdfium.git'
- pdfium.revision = 'origin/master'
- self.update_repo(self.checkout_root, pdfium)
-
- # Run 'gclient sync'.
- gclient_cfg.got_revision_mapping['skia'] = 'got_revision'
- gclient_cfg.target_os.add('llvm')
- checkout_kwargs = {}
- checkout_kwargs['env'] = self.default_env
-
- # api.gclient.revert() assumes things about the layout of the code, so it
- # fails for us. Run an appropriate revert sequence for trybots instead.
- gclient_file = self.checkout_root.join('.gclient')
- if (self.m.tryserver.is_tryserver and
- self.m.path.exists(gclient_file)): # pragma: no cover
- # These steps taken from:
- # https://chromium.googlesource.com/chromium/tools/build/+/
- # 81a696760ab7c25f6606c54fc781b90b8af9fdd2/scripts/slave/
- # gclient_safe_revert.py
- if self.m.path.exists(entries_file):
- self.m.gclient('recurse', [
- 'recurse', '-i', 'sh', '-c',
- 'if [ -e .git ]; then git remote update; fi'])
- self.m.gclient(
- 'revert',
- ['revert', '-v', '-v', '-v', '--nohooks', '--upstream'],
- cwd=self.checkout_root)
-
- update_step = self.m.gclient.checkout(gclient_config=gclient_cfg,
- cwd=self.checkout_root,
- revert=False,
- **checkout_kwargs)
-
- self.got_revision = update_step.presentation.properties['got_revision']
- self.m.tryserver.maybe_apply_issue()
-
- if self._need_chromium_checkout:
- self.m.gclient.runhooks(cwd=self.checkout_root, env=self.gclient_env)
-
- def copy_build_products(self, src, dst):
- """Copy whitelisted build products from src to dst."""
- self.m.python.inline(
- name='copy build products',
- program='''import errno
-import glob
-import os
-import shutil
-import sys
-
-src = sys.argv[1]
-dst = sys.argv[2]
-build_products_whitelist = %s
-
-try:
- os.makedirs(dst)
-except OSError as e:
- if e.errno != errno.EEXIST:
- raise
-
-for pattern in build_products_whitelist:
- path = os.path.join(src, pattern)
- for f in glob.glob(path):
- dst_path = os.path.join(dst, os.path.relpath(f, src))
- if not os.path.isdir(os.path.dirname(dst_path)):
- os.makedirs(os.path.dirname(dst_path))
- print 'Copying build product %%s to %%s' %% (f, dst_path)
- shutil.move(f, dst_path)
-''' % str(BUILD_PRODUCTS_ISOLATE_WHITELIST),
- args=[src, dst],
- infra_step=True)
-
- def compile_steps(self, clobber=False):
- """Run the steps to build Skia."""
- try:
- for target in self.build_targets:
- self.flavor.compile(target)
- self.copy_build_products(
- self.flavor.out_dir,
- self.swarming_out_dir.join('out', self.configuration))
- self.flavor.copy_extra_build_products(self.swarming_out_dir)
- finally:
- if 'Win' in self.builder_cfg.get('os', ''):
- self.m.python.inline(
- name='cleanup',
- program='''import psutil
-for p in psutil.process_iter():
- try:
- if p.name in ('mspdbsrv.exe', 'vctip.exe', 'cl.exe', 'link.exe'):
- p.kill()
- except psutil._error.AccessDenied:
- pass
-''',
- infra_step=True)
-
- def _readfile(self, filename, *args, **kwargs):
- """Convenience function for reading files."""
- name = kwargs.pop('name') or 'read %s' % self.m.path.basename(filename)
- return self.m.file.read(name, filename, infra_step=True, *args, **kwargs)
-
- def _writefile(self, filename, contents):
- """Convenience function for writing files."""
- return self.m.file.write('write %s' % self.m.path.basename(filename),
- filename, contents, infra_step=True)
-
- def rmtree(self, path):
- """Wrapper around api.file.rmtree with environment fix."""
- env = {}
- env['PYTHONPATH'] = str(self.m.path['checkout'].join(
- 'infra', 'bots', '.recipe_deps', 'build', 'scripts'))
- self.m.file.rmtree(self.m.path.basename(path),
- path,
- env=env,
- infra_step=True)
-
- def run(self, steptype, name, abort_on_failure=True,
- fail_build_on_failure=True, env=None, **kwargs):
- """Run a step. If it fails, keep going but mark the build status failed."""
- env = dict(env or {})
- env.update(self.default_env)
- try:
- return steptype(name=name, env=env, **kwargs)
- except self.m.step.StepFailure as e:
- if abort_on_failure:
- raise # pragma: no cover
- if fail_build_on_failure:
- self.failed.append(e)
-
- def check_actual_version(self, version_file, tmp_dir, test_actual_version):
- """Assert that we have an actually-downloaded version of the dir."""
- actual_version_file = self.m.path.join(tmp_dir, version_file)
- actual_version = self._readfile(
- actual_version_file,
- name='Get downloaded %s' % version_file,
- test_data=test_actual_version).rstrip()
- assert actual_version != VERSION_NONE
- return actual_version
-
- def copy_dir(self, host_version, version_file, tmp_dir,
- host_path, device_path, test_expected_version,
- test_actual_version):
- actual_version_file = self.m.path.join(tmp_dir, version_file)
- # Copy to device.
- device_version_file = self.flavor.device_path_join(
- self.device_dirs.tmp_dir, version_file)
- if str(actual_version_file) != str(device_version_file):
- try:
- device_version = self.flavor.read_file_on_device(device_version_file)
- except self.m.step.StepFailure:
- device_version = VERSION_NONE
- if device_version != host_version:
- self.flavor.remove_file_on_device(device_version_file)
- self.flavor.create_clean_device_dir(device_path)
- self.flavor.copy_directory_contents_to_device(host_path, device_path)
-
- # Copy the new version file.
- self.flavor.copy_file_to_device(actual_version_file,
- device_version_file)
-
- def _copy_images(self):
- """Download and copy test images if needed."""
- version_file = self.infrabots_dir.join('assets', 'skimage', 'VERSION')
- if self.m.path.exists(version_file):
- test_data = self.m.properties.get(
- 'test_downloaded_sk_image_version', TEST_EXPECTED_SK_IMAGE_VERSION)
- version = self._readfile(version_file,
- name='Get downloaded skimage VERSION',
- test_data=test_data).rstrip()
- self._writefile(self.m.path.join(self.tmp_dir, VERSION_FILE_SK_IMAGE),
- version)
- else:
- # TODO(borenet): Remove this once enough time has passed.
- version = self.check_actual_version(
- VERSION_FILE_SK_IMAGE,
- self.tmp_dir,
- test_actual_version=self.m.properties.get(
- 'test_downloaded_sk_image_version',
- TEST_EXPECTED_SK_IMAGE_VERSION),
- )
- self.copy_dir(
- version,
- VERSION_FILE_SK_IMAGE,
- self.tmp_dir,
- self.images_dir,
- self.device_dirs.images_dir,
- test_expected_version=self.m.properties.get(
- 'test_downloaded_sk_image_version',
- TEST_EXPECTED_SK_IMAGE_VERSION),
- test_actual_version=self.m.properties.get(
- 'test_downloaded_sk_image_version',
- TEST_EXPECTED_SK_IMAGE_VERSION))
- return version
-
- def _copy_skps(self):
- """Download and copy the SKPs if needed."""
- version_file = self.infrabots_dir.join('assets', 'skp', 'VERSION')
- if self.m.path.exists(version_file):
- test_data = self.m.properties.get(
- 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION)
- version = self._readfile(version_file,
- name='Get downloaded SKP VERSION',
- test_data=test_data).rstrip()
- self._writefile(self.m.path.join(self.tmp_dir, VERSION_FILE_SKP), version)
- else:
- # TODO(borenet): Remove this once enough time has passed.
- version = self.check_actual_version(
- VERSION_FILE_SKP,
- self.tmp_dir,
- test_actual_version=self.m.properties.get(
- 'test_downloaded_skp_version',
- TEST_EXPECTED_SKP_VERSION),
- )
- self.copy_dir(
- version,
- VERSION_FILE_SKP,
- self.tmp_dir,
- self.local_skp_dir,
- self.device_dirs.skp_dir,
- test_expected_version=self.m.properties.get(
- 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION),
- test_actual_version=self.m.properties.get(
- 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION))
- return version
-
- def install(self):
- """Copy the required executables and files to the device."""
- self.device_dirs = self.flavor.get_device_dirs()
-
- # Run any device-specific installation.
- self.flavor.install()
-
- # TODO(borenet): Only copy files which have changed.
- # Resources
- self.flavor.copy_directory_contents_to_device(self.resource_dir,
- self.device_dirs.resource_dir)
-
- def ccache(self):
- if not self._checked_for_ccache:
- self._checked_for_ccache = True
- if not self.m.platform.is_win:
- result = self.run(
- self.m.python.inline,
- name='has ccache?',
- program='''import json
-import subprocess
-import sys
-
-ccache = None
-try:
- ccache = subprocess.check_output(['which', 'ccache']).rstrip()
-except:
- pass
-print json.dumps({'ccache': ccache})
-''',
- stdout=self.m.json.output(),
- infra_step=True,
- abort_on_failure=False,
- fail_build_on_failure=False)
- if result and result.stdout and result.stdout.get('ccache'):
- self._ccache = result.stdout['ccache']
-
- return self._ccache
-
- def json_from_file(self, filename, cwd, builder_name, test_data):
- """Execute the given script to obtain JSON data."""
- return self.m.python(
- 'exec %s' % self.m.path.basename(filename),
- filename,
- args=[self.m.json.output(), builder_name],
- step_test_data=lambda: self.m.json.test_api.output(test_data),
- cwd=cwd,
- infra_step=True).json.output
-
- def test_steps(self):
- """Run the DM test."""
- self._run_once(self.install)
- self._run_once(self._copy_skps)
- self._run_once(self._copy_images)
-
- use_hash_file = False
- if self.upload_dm_results:
- # This must run before we write anything into self.device_dirs.dm_dir
- # or we may end up deleting our output on machines where they're the same.
- self.flavor.create_clean_host_dir(self.dm_dir)
- if str(self.dm_dir) != str(self.device_dirs.dm_dir):
- self.flavor.create_clean_device_dir(self.device_dirs.dm_dir)
-
- # Obtain the list of already-generated hashes.
- hash_filename = 'uninteresting_hashes.txt'
-
- # Ensure that the tmp_dir exists.
- self._run_once(self.m.file.makedirs,
- 'tmp_dir',
- self.tmp_dir,
- infra_step=True)
-
- host_hashes_file = self.tmp_dir.join(hash_filename)
- hashes_file = self.flavor.device_path_join(
- self.device_dirs.tmp_dir, hash_filename)
- self.run(
- self.m.python.inline,
- 'get uninteresting hashes',
- program="""
- import contextlib
- import math
- import socket
- import sys
- import time
- import urllib2
-
- HASHES_URL = 'https://gold.skia.org/_/hashes'
- RETRIES = 5
- TIMEOUT = 60
- WAIT_BASE = 15
-
- socket.setdefaulttimeout(TIMEOUT)
- for retry in range(RETRIES):
- try:
- with contextlib.closing(
- urllib2.urlopen(HASHES_URL, timeout=TIMEOUT)) as w:
- hashes = w.read()
- with open(sys.argv[1], 'w') as f:
- f.write(hashes)
- break
- except Exception as e:
- print 'Failed to get uninteresting hashes from %s:' % HASHES_URL
- print e
- if retry == RETRIES:
- raise
- waittime = WAIT_BASE * math.pow(2, retry)
- print 'Retry in %d seconds.' % waittime
- time.sleep(waittime)
- """,
- args=[host_hashes_file],
- cwd=self.skia_dir,
- abort_on_failure=False,
- fail_build_on_failure=False,
- infra_step=True)
-
- if self.m.path.exists(host_hashes_file):
- self.flavor.copy_file_to_device(host_hashes_file, hashes_file)
- use_hash_file = True
-
- # Run DM.
- properties = [
- 'gitHash', self.got_revision,
- 'master', self.master_name,
- 'builder', self.builder_name,
- 'build_number', self.m.properties['buildnumber'],
- ]
- if self.is_trybot:
- properties.extend([
- 'issue', self.m.properties['issue'],
- 'patchset', self.m.properties['patchset'],
- ])
-
- args = [
- 'dm',
- '--undefok', # This helps branches that may not know new flags.
- '--resourcePath', self.device_dirs.resource_dir,
- '--skps', self.device_dirs.skp_dir,
- '--images', self.flavor.device_path_join(
- self.device_dirs.images_dir, 'dm'),
- '--colorImages', self.flavor.device_path_join(self.device_dirs.images_dir,
- 'colorspace'),
- '--nameByHash',
- '--properties'
- ] + properties
-
- args.append('--key')
- args.extend(self._KeyParams())
- if use_hash_file:
- args.extend(['--uninterestingHashesFile', hashes_file])
- if self.upload_dm_results:
- args.extend(['--writePath', self.device_dirs.dm_dir])
-
- skip_flag = None
- if self.builder_cfg.get('cpu_or_gpu') == 'CPU':
- skip_flag = '--nogpu'
- elif self.builder_cfg.get('cpu_or_gpu') == 'GPU':
- skip_flag = '--nocpu'
- if skip_flag:
- args.append(skip_flag)
- args.extend(self.dm_flags)
-
- self.run(self.flavor.step, 'dm', cmd=args, abort_on_failure=False,
- env=self.default_env)
-
- if self.upload_dm_results:
- # Copy images and JSON to host machine if needed.
- self.flavor.copy_directory_contents_to_host(self.device_dirs.dm_dir,
- self.dm_dir)
-
- # See skia:2789.
- if ('Valgrind' in self.builder_name and
- self.builder_cfg.get('cpu_or_gpu') == 'GPU'):
- abandonGpuContext = list(args)
- abandonGpuContext.append('--abandonGpuContext')
- self.run(self.flavor.step, 'dm --abandonGpuContext',
- cmd=abandonGpuContext, abort_on_failure=False)
- preAbandonGpuContext = list(args)
- preAbandonGpuContext.append('--preAbandonGpuContext')
- self.run(self.flavor.step, 'dm --preAbandonGpuContext',
- cmd=preAbandonGpuContext, abort_on_failure=False,
- env=self.default_env)
-
- def perf_steps(self):
- """Run Skia benchmarks."""
- self._run_once(self.install)
- self._run_once(self._copy_skps)
- self._run_once(self._copy_images)
-
- if self.upload_perf_results:
- self.flavor.create_clean_device_dir(self.device_dirs.perf_data_dir)
-
- # Run nanobench.
- properties = [
- '--properties',
- 'gitHash', self.got_revision,
- 'build_number', self.m.properties['buildnumber'],
- ]
- if self.is_trybot:
- properties.extend([
- 'issue', self.m.properties['issue'],
- 'patchset', self.m.properties['patchset'],
- ])
-
- target = 'nanobench'
- if 'VisualBench' in self.builder_name:
- target = 'visualbench'
- args = [
- target,
- '--undefok', # This helps branches that may not know new flags.
- '-i', self.device_dirs.resource_dir,
- '--skps', self.device_dirs.skp_dir,
- '--images', self.flavor.device_path_join(
- self.device_dirs.images_dir, 'nanobench'),
- ]
-
- skip_flag = None
- if self.builder_cfg.get('cpu_or_gpu') == 'CPU':
- skip_flag = '--nogpu'
- elif self.builder_cfg.get('cpu_or_gpu') == 'GPU':
- skip_flag = '--nocpu'
- if skip_flag:
- args.append(skip_flag)
- args.extend(self.nanobench_flags)
-
- if self.upload_perf_results:
- json_path = self.flavor.device_path_join(
- self.device_dirs.perf_data_dir,
- 'nanobench_%s.json' % self.got_revision)
- args.extend(['--outResultsFile', json_path])
- args.extend(properties)
-
- keys_blacklist = ['configuration', 'role', 'is_trybot']
- args.append('--key')
- for k in sorted(self.builder_cfg.keys()):
- if not k in keys_blacklist:
- args.extend([k, self.builder_cfg[k]])
-
- self.run(self.flavor.step, target, cmd=args, abort_on_failure=False,
- env=self.default_env)
-
- # See skia:2789.
- if ('Valgrind' in self.builder_name and
- self.builder_cfg.get('cpu_or_gpu') == 'GPU'):
- abandonGpuContext = list(args)
- abandonGpuContext.extend(['--abandonGpuContext', '--nocpu'])
- self.run(self.flavor.step, '%s --abandonGpuContext' % target,
- cmd=abandonGpuContext, abort_on_failure=False,
- env=self.default_env)
-
- # Upload results.
- if self.upload_perf_results:
- self.m.file.makedirs('perf_dir', self.perf_data_dir)
- self.flavor.copy_directory_contents_to_host(
- self.device_dirs.perf_data_dir, self.perf_data_dir)
-
- def cleanup_steps(self):
- """Run any cleanup steps."""
- self.flavor.cleanup_steps()
-
- def _KeyParams(self):
- """Build a unique key from the builder name (as a list).
-
- E.g. arch x86 gpu GeForce320M mode MacMini4.1 os Mac10.6
- """
- # Don't bother to include role, which is always Test.
- # TryBots are uploaded elsewhere so they can use the same key.
- blacklist = ['role', 'is_trybot']
-
- flat = []
- for k in sorted(self.builder_cfg.keys()):
- if k not in blacklist:
- flat.append(k)
- flat.append(self.builder_cfg[k])
- return flat
diff --git a/infra/bots/recipe_modules/skia_swarming/__init__.py b/infra/bots/recipe_modules/swarming/__init__.py
index 1232fc2d99..3e05b02b1f 100644
--- a/infra/bots/recipe_modules/skia_swarming/__init__.py
+++ b/infra/bots/recipe_modules/swarming/__init__.py
@@ -14,5 +14,5 @@ DEPS = [
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
- 'skia',
+ 'run',
]
diff --git a/infra/bots/recipe_modules/skia_swarming/api.py b/infra/bots/recipe_modules/swarming/api.py
index f1b0143aa3..ebbce5322a 100644
--- a/infra/bots/recipe_modules/skia_swarming/api.py
+++ b/infra/bots/recipe_modules/swarming/api.py
@@ -61,7 +61,7 @@ class SkiaSwarmingApi(recipe_api.RecipeApi):
env=env)
# Copy binaries to the expected location.
dest = self.m.path['slave_build'].join('luci-go')
- self.m.skia.rmtree(dest)
+ self.m.run.rmtree(dest)
self.m.file.copytree('Copy Go binary',
source=luci_go_dir,
dest=dest)
diff --git a/infra/bots/recipe_modules/vars/__init__.py b/infra/bots/recipe_modules/vars/__init__.py
new file mode 100644
index 0000000000..e18377b7cb
--- /dev/null
+++ b/infra/bots/recipe_modules/vars/__init__.py
@@ -0,0 +1,11 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+DEPS = [
+ 'recipe_engine/json',
+ 'recipe_engine/path',
+ 'recipe_engine/properties',
+ 'recipe_engine/python',
+ 'recipe_engine/step',
+]
diff --git a/infra/bots/recipe_modules/vars/api.py b/infra/bots/recipe_modules/vars/api.py
new file mode 100644
index 0000000000..69842fb37f
--- /dev/null
+++ b/infra/bots/recipe_modules/vars/api.py
@@ -0,0 +1,144 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+# pylint: disable=W0201
+
+
+from recipe_engine import recipe_api
+import os
+
+
+BOTO_CHROMIUM_SKIA_GM = 'chromium-skia-gm.boto'
+
+
+class SkiaVarsApi(recipe_api.RecipeApi):
+
+ def make_path(self, *path):
+ """Return a Path object for the given path."""
+ key = 'custom_%s' % '_'.join(path)
+ self.m.path.c.base_paths[key] = tuple(path)
+ return self.m.path[key]
+
+ def gsutil_env(self, boto_file):
+ """Environment variables for gsutil."""
+ boto_path = None
+ if boto_file:
+ boto_path = self.m.path.join(self.home_dir, boto_file)
+ return {'AWS_CREDENTIAL_FILE': boto_path,
+ 'BOTO_CONFIG': boto_path}
+
+ @property
+ def home_dir(self):
+ """Find the home directory."""
+ home_dir = os.path.expanduser('~')
+ if self._test_data.enabled:
+ home_dir = '[HOME]'
+ return home_dir
+
+ def setup(self):
+ """Prepare the variables."""
+ # Setup
+ self.builder_name = self.m.properties['buildername']
+ self.master_name = self.m.properties['mastername']
+ self.slave_name = self.m.properties['slavename']
+ self.build_number = self.m.properties['buildnumber']
+
+ self.slave_dir = self.m.path['slave_build']
+ self.checkout_root = self.slave_dir
+ self.default_env = {}
+ self.gclient_env = {}
+ self.is_compile_bot = self.builder_name.startswith('Build-')
+
+ self.default_env['CHROME_HEADLESS'] = '1'
+ # The 'depot_tools' directory comes from recipe DEPS and isn't provided by
+ # default. We have to set it manually.
+ self.m.path.c.base_paths['depot_tools'] = (
+ self.m.path.c.base_paths['slave_build'] +
+ ('skia', 'infra', 'bots', '.recipe_deps', 'depot_tools'))
+ if 'Win' in self.builder_name:
+ self.m.path.c.base_paths['depot_tools'] = (
+ 'c:\\', 'Users', 'chrome-bot', 'depot_tools')
+
+ # Compile bots keep a persistent checkout.
+ self.persistent_checkout = (self.is_compile_bot or
+ 'RecreateSKPs' in self.builder_name)
+ if self.persistent_checkout:
+ if 'Win' in self.builder_name:
+ self.checkout_root = self.make_path('C:\\', 'b', 'work')
+ self.gclient_cache = self.make_path('C:\\', 'b', 'cache')
+ else:
+ self.checkout_root = self.make_path('/', 'b', 'work')
+ self.gclient_cache = self.make_path('/', 'b', 'cache')
+
+ # got_revision is filled in after checkout steps.
+ self.got_revision = None
+ else:
+ # If there's no persistent checkout, then we have to asume we got the
+ # correct revision of the files from isolate.
+ self.got_revision = self.m.properties['revision']
+
+ self.skia_dir = self.checkout_root.join('skia')
+ if not self.persistent_checkout:
+ self.m.path['checkout'] = self.skia_dir
+
+ self.infrabots_dir = self.skia_dir.join('infra', 'bots')
+ self.resource_dir = self.skia_dir.join('resources')
+ self.images_dir = self.slave_dir.join('skimage')
+ self.skia_out = self.skia_dir.join('out', self.builder_name)
+ self.swarming_out_dir = self.make_path(self.m.properties['swarm_out_dir'])
+ self.local_skp_dir = self.slave_dir.join('skp')
+ if not self.is_compile_bot:
+ self.skia_out = self.slave_dir.join('out')
+ self.tmp_dir = self.m.path['slave_build'].join('tmp')
+
+ # Some bots also require a checkout of chromium.
+ self.need_chromium_checkout = 'CommandBuffer' in self.builder_name
+ if 'CommandBuffer' in self.builder_name:
+ self.gclient_env['GYP_CHROMIUM_NO_ACTION'] = '0'
+ if ((self.is_compile_bot and
+ 'SAN' in self.builder_name) or
+ 'RecreateSKPs' in self.builder_name):
+ self.need_chromium_checkout = True
+ if 'RecreateSKPs' in self.builder_name:
+ self.gclient_env['CPPFLAGS'] = (
+ '-DSK_ALLOW_CROSSPROCESS_PICTUREIMAGEFILTERS=1')
+
+ # Some bots also require a checkout of PDFium.
+ self.need_pdfium_checkout = 'PDFium' in self.builder_name
+
+
+ def update_with_builder_spec(self, builder_spec):
+ """Set more variables based on the builder_spec."""
+ # Obtain the spec for this builder from the Skia repo. Use it to set more
+ # properties.
+ self.builder_spec = builder_spec
+
+ self.builder_cfg = self.builder_spec['builder_cfg']
+ self.role = self.builder_cfg['role']
+
+ self.configuration = self.builder_spec['configuration']
+ self.default_env.update({'SKIA_OUT': self.skia_out,
+ 'BUILDTYPE': self.configuration})
+ self.default_env.update(self.builder_spec['env'])
+ self.build_targets = [str(t) for t in self.builder_spec['build_targets']]
+ self.do_compile_steps = self.builder_spec.get('do_compile_steps', True)
+ self.do_test_steps = self.builder_spec['do_test_steps']
+ self.do_perf_steps = self.builder_spec['do_perf_steps']
+ self.is_trybot = self.builder_cfg['is_trybot']
+ self.issue = None
+ self.patchset = None
+ self.rietveld = None
+ if self.is_trybot:
+ self.issue = self.m.properties['issue']
+ self.patchset = self.m.properties['patchset']
+ self.rietveld = self.m.properties['rietveld']
+ self.upload_dm_results = self.builder_spec['upload_dm_results']
+ self.upload_perf_results = self.builder_spec['upload_perf_results']
+ self.dm_dir = self.m.path.join(
+ self.swarming_out_dir, 'dm')
+ self.perf_data_dir = self.m.path.join(self.swarming_out_dir,
+ 'perfdata', self.builder_name, 'data')
+ self.dm_flags = self.builder_spec['dm_flags']
+ self.nanobench_flags = self.builder_spec['nanobench_flags']
diff --git a/infra/bots/recipes/swarm_RecreateSKPs.py b/infra/bots/recipes/swarm_RecreateSKPs.py
index 49f9c11678..38fd8534b3 100644
--- a/infra/bots/recipes/swarm_RecreateSKPs.py
+++ b/infra/bots/recipes/swarm_RecreateSKPs.py
@@ -8,13 +8,14 @@
DEPS = [
'build/file',
+ 'core',
'depot_tools/gclient',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
- 'skia',
+ 'vars',
]
@@ -91,9 +92,9 @@ if os.path.isfile(backup_file):
def RunSteps(api):
# Check out Chrome.
- api.skia.setup()
+ api.core.setup()
- src_dir = api.skia.checkout_root.join('src')
+ src_dir = api.vars.checkout_root.join('src')
out_dir = src_dir.join('out', 'Release')
# Call GN.
@@ -150,7 +151,7 @@ with open(dest_path, 'w') as f:
recreate_skps_env = {}
recreate_skps_env.update(env)
recreate_skps_env.update(boto_env)
- asset_dir = api.skia.infrabots_dir.join('assets', 'skp')
+ asset_dir = api.vars.infrabots_dir.join('assets', 'skp')
cmd = ['python', asset_dir.join('create.py'),
'--chrome_src_path', src_dir,
'--browser_executable', src_dir.join('out', 'Release', 'chrome'),
@@ -159,18 +160,18 @@ with open(dest_path, 'w') as f:
cmd.append('--upload_to_partner_bucket')
api.step('Recreate SKPs',
cmd=cmd,
- cwd=api.skia.skia_dir,
+ cwd=api.vars.skia_dir,
env=recreate_skps_env)
# Upload the SKPs.
if 'Canary' not in api.properties['buildername']:
cmd = ['python',
- api.skia.skia_dir.join('infra', 'bots', 'upload_skps.py'),
+ api.vars.skia_dir.join('infra', 'bots', 'upload_skps.py'),
'--target_dir', output_dir]
with depot_tools_auth(api, UPDATE_SKPS_KEY):
api.step('Upload SKPs',
cmd=cmd,
- cwd=api.skia.skia_dir,
+ cwd=api.vars.skia_dir,
env=env)
diff --git a/infra/bots/recipes/swarm_compile.expected/Build-Win-MSVC-x86_64-Release-Vulkan.json b/infra/bots/recipes/swarm_compile.expected/Build-Win-MSVC-x86_64-Release-Vulkan.json
index 62fa93c855..3209f21012 100644
--- a/infra/bots/recipes/swarm_compile.expected/Build-Win-MSVC-x86_64-Release-Vulkan.json
+++ b/infra/bots/recipes/swarm_compile.expected/Build-Win-MSVC-x86_64-Release-Vulkan.json
@@ -138,7 +138,7 @@
"GYP_DEFINES": "qt_sdk=C:/Qt/4.8.5/ skia_arch_type=x86_64 skia_vulkan=1 skia_vulkan_debug_layers=0 skia_warnings_as_errors=1 skia_win_debuggers_path=c:/DbgHelp skia_win_ltcg=0",
"PATH": "%(PATH)s;RECIPE_PACKAGE_REPO[depot_tools];RECIPE_PACKAGE_REPO[depot_tools]",
"SKIA_OUT": "[CUSTOM_C:\\_B_WORK]\\skia\\out\\Build-Win-MSVC-x86_64-Release-Vulkan",
- "VK_SDK_PATH": "[SLAVE_BUILD]\\vulkan_1.0.17.0"
+ "VK_SDK_PATH": "[SLAVE_BUILD]\\win_vulkan_sdk"
},
"name": "build most"
},
diff --git a/infra/bots/recipes/swarm_compile.py b/infra/bots/recipes/swarm_compile.py
index 50bbcbe0fe..4a1b134de6 100644
--- a/infra/bots/recipes/swarm_compile.py
+++ b/infra/bots/recipes/swarm_compile.py
@@ -7,11 +7,15 @@
DEPS = [
+ 'core',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
- 'skia',
+ 'recipe_engine/python',
+ 'flavor',
+ 'run',
+ 'vars',
]
@@ -42,10 +46,32 @@ TEST_BUILDERS = {
def RunSteps(api):
- api.skia.setup()
- api.skia.compile_steps()
- api.skia.cleanup_steps()
- api.skia.check_failure()
+ api.core.setup()
+
+ try:
+ for target in api.vars.build_targets:
+ api.flavor.compile(target)
+ api.run.copy_build_products(
+ api.flavor.out_dir,
+ api.vars.swarming_out_dir.join(
+ 'out', api.vars.configuration))
+ api.flavor.copy_extra_build_products(api.vars.swarming_out_dir)
+ finally:
+ if 'Win' in api.vars.builder_cfg.get('os', ''):
+ api.python.inline(
+ name='cleanup',
+ program='''import psutil
+for p in psutil.process_iter():
+ try:
+ if p.name in ('mspdbsrv.exe', 'vctip.exe', 'cl.exe', 'link.exe'):
+ p.kill()
+ except psutil._error.AccessDenied:
+ pass
+''',
+ infra_step=True)
+
+ api.core.cleanup_steps()
+ api.run.check_failure()
def GenTests(api):
diff --git a/infra/bots/recipes/swarm_housekeeper.expected/Housekeeper-PerCommit-Trybot.json b/infra/bots/recipes/swarm_housekeeper.expected/Housekeeper-PerCommit-Trybot.json
index e73056dd75..9e5375620c 100644
--- a/infra/bots/recipes/swarm_housekeeper.expected/Housekeeper-PerCommit-Trybot.json
+++ b/infra/bots/recipes/swarm_housekeeper.expected/Housekeeper-PerCommit-Trybot.json
@@ -76,7 +76,7 @@
{
"cmd": [
"python",
- "RECIPE_MODULE[skia::skia]/resources/run_binary_size_analysis.py",
+ "RECIPE_MODULE[skia::core]/resources/run_binary_size_analysis.py",
"--library",
"[SLAVE_BUILD]/out/Release/lib/libskia.so",
"--githash",
diff --git a/infra/bots/recipes/swarm_housekeeper.expected/Housekeeper-PerCommit.json b/infra/bots/recipes/swarm_housekeeper.expected/Housekeeper-PerCommit.json
index 5c68f034d4..d08d8cbe08 100644
--- a/infra/bots/recipes/swarm_housekeeper.expected/Housekeeper-PerCommit.json
+++ b/infra/bots/recipes/swarm_housekeeper.expected/Housekeeper-PerCommit.json
@@ -76,7 +76,7 @@
{
"cmd": [
"python",
- "RECIPE_MODULE[skia::skia]/resources/generate_and_upload_doxygen.py",
+ "RECIPE_MODULE[skia::core]/resources/generate_and_upload_doxygen.py",
"[DEPOT_TOOLS]/third_party/gsutil/gsutil"
],
"cwd": "[SLAVE_BUILD]/skia",
@@ -91,7 +91,7 @@
{
"cmd": [
"python",
- "RECIPE_MODULE[skia::skia]/resources/run_binary_size_analysis.py",
+ "RECIPE_MODULE[skia::core]/resources/run_binary_size_analysis.py",
"--library",
"[SLAVE_BUILD]/out/Release/lib/libskia.so",
"--githash",
diff --git a/infra/bots/recipes/swarm_housekeeper.py b/infra/bots/recipes/swarm_housekeeper.py
index bf9cabbbf9..ae31611b1e 100644
--- a/infra/bots/recipes/swarm_housekeeper.py
+++ b/infra/bots/recipes/swarm_housekeeper.py
@@ -7,11 +7,13 @@
DEPS = [
+ 'core',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/python',
- 'skia',
'recipe_engine/step',
+ 'run',
+ 'vars',
]
@@ -27,11 +29,11 @@ TEST_BUILDERS = {
def RunSteps(api):
# Checkout, compile, etc.
- api.skia.setup()
+ api.core.setup()
cwd = api.path['checkout']
- api.skia.run(
+ api.run(
api.step,
'android platform self-tests',
cmd=['python',
@@ -43,22 +45,23 @@ def RunSteps(api):
gsutil_path = api.path['depot_tools'].join('third_party', 'gsutil',
'gsutil')
- if not api.skia.is_trybot:
- api.skia.run(
+ if not api.vars.is_trybot:
+ api.run(
api.step,
'generate and upload doxygen',
- cmd=['python', api.skia.resource('generate_and_upload_doxygen.py'),
+ cmd=['python', api.core.resource('generate_and_upload_doxygen.py'),
gsutil_path],
cwd=cwd,
abort_on_failure=False)
- cmd = ['python', api.skia.resource('run_binary_size_analysis.py'),
- '--library', api.skia.skia_out.join('Release', 'lib', 'libskia.so'),
+ cmd = ['python', api.core.resource('run_binary_size_analysis.py'),
+ '--library', api.vars.skia_out.join(
+ 'Release', 'lib', 'libskia.so'),
'--githash', api.properties['revision'],
'--gsutil_path', gsutil_path]
- if api.skia.is_trybot:
- cmd.extend(['--issue_number', str(api.skia.m.properties['issue'])])
- api.skia.run(
+ if api.vars.is_trybot:
+ cmd.extend(['--issue_number', str(api.properties['issue'])])
+ api.run(
api.step,
'generate and upload binary size data',
cmd=cmd,
@@ -82,4 +85,6 @@ def GenTests(api):
)
if 'Trybot' in buildername:
test.properties['issue'] = '500'
+ test.properties['patchset'] = '1'
+ test.properties['rietveld'] = 'https://codereview.chromium.org'
yield test
diff --git a/infra/bots/recipes/swarm_perf.py b/infra/bots/recipes/swarm_perf.py
index a6c5e27a8a..93332b707f 100644
--- a/infra/bots/recipes/swarm_perf.py
+++ b/infra/bots/recipes/swarm_perf.py
@@ -7,12 +7,13 @@
DEPS = [
+ 'core',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/raw_io',
- 'skia',
+ 'run',
]
@@ -31,10 +32,10 @@ TEST_BUILDERS = {
def RunSteps(api):
- api.skia.setup()
- api.skia.perf_steps()
- api.skia.cleanup_steps()
- api.skia.check_failure()
+ api.core.setup()
+ api.core.perf_steps()
+ api.core.cleanup_steps()
+ api.run.check_failure()
def GenTests(api):
diff --git a/infra/bots/recipes/swarm_test.expected/legacy_skimage_version.json b/infra/bots/recipes/swarm_test.expected/legacy_skimage_version.json
deleted file mode 100644
index 3aa30b4bb9..0000000000
--- a/infra/bots/recipes/swarm_test.expected/legacy_skimage_version.json
+++ /dev/null
@@ -1,249 +0,0 @@
-[
- {
- "cmd": [
- "python",
- "-u",
- "[SLAVE_BUILD]/skia/tools/buildbot_spec.py",
- "/path/to/tmp/json",
- "Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug"
- ],
- "cwd": "[SLAVE_BUILD]/skia",
- "name": "exec buildbot_spec.py",
- "~followup_annotations": [
- "@@@STEP_LOG_LINE@json.output@{@@@",
- "@@@STEP_LOG_LINE@json.output@ \"build_targets\": [@@@",
- "@@@STEP_LOG_LINE@json.output@ \"dm\"@@@",
- "@@@STEP_LOG_LINE@json.output@ ], @@@",
- "@@@STEP_LOG_LINE@json.output@ \"builder_cfg\": {@@@",
- "@@@STEP_LOG_LINE@json.output@ \"arch\": \"x86_64\", @@@",
- "@@@STEP_LOG_LINE@json.output@ \"compiler\": \"GCC\", @@@",
- "@@@STEP_LOG_LINE@json.output@ \"configuration\": \"Debug\", @@@",
- "@@@STEP_LOG_LINE@json.output@ \"cpu_or_gpu\": \"CPU\", @@@",
- "@@@STEP_LOG_LINE@json.output@ \"cpu_or_gpu_value\": \"AVX2\", @@@",
- "@@@STEP_LOG_LINE@json.output@ \"is_trybot\": false, @@@",
- "@@@STEP_LOG_LINE@json.output@ \"model\": \"GCE\", @@@",
- "@@@STEP_LOG_LINE@json.output@ \"os\": \"Ubuntu\", @@@",
- "@@@STEP_LOG_LINE@json.output@ \"role\": \"Test\"@@@",
- "@@@STEP_LOG_LINE@json.output@ }, @@@",
- "@@@STEP_LOG_LINE@json.output@ \"configuration\": \"Debug\", @@@",
- "@@@STEP_LOG_LINE@json.output@ \"dm_flags\": [@@@",
- "@@@STEP_LOG_LINE@json.output@ \"--dummy-flags\"@@@",
- "@@@STEP_LOG_LINE@json.output@ ], @@@",
- "@@@STEP_LOG_LINE@json.output@ \"do_perf_steps\": false, @@@",
- "@@@STEP_LOG_LINE@json.output@ \"do_test_steps\": true, @@@",
- "@@@STEP_LOG_LINE@json.output@ \"env\": {@@@",
- "@@@STEP_LOG_LINE@json.output@ \"GYP_DEFINES\": \"skia_arch_type=x86_64 skia_gpu=0 skia_warnings_as_errors=0\"@@@",
- "@@@STEP_LOG_LINE@json.output@ }, @@@",
- "@@@STEP_LOG_LINE@json.output@ \"nanobench_flags\": [@@@",
- "@@@STEP_LOG_LINE@json.output@ \"--dummy-flags\"@@@",
- "@@@STEP_LOG_LINE@json.output@ ], @@@",
- "@@@STEP_LOG_LINE@json.output@ \"upload_dm_results\": true, @@@",
- "@@@STEP_LOG_LINE@json.output@ \"upload_perf_results\": false@@@",
- "@@@STEP_LOG_LINE@json.output@}@@@",
- "@@@STEP_LOG_END@json.output@@@"
- ]
- },
- {
- "cmd": [
- "python",
- "-u",
- "\nimport shutil\nimport sys\nshutil.copy(sys.argv[1], sys.argv[2])\n",
- "[SLAVE_BUILD]/skia/infra/bots/assets/skp/VERSION",
- "/path/to/tmp/"
- ],
- "name": "Get downloaded SKP VERSION"
- },
- {
- "cmd": [
- "python",
- "-u",
- "\nimport shutil\nimport sys\nshutil.copy(sys.argv[1], sys.argv[2])\n",
- "42",
- "[SLAVE_BUILD]/tmp/SKP_VERSION"
- ],
- "name": "write SKP_VERSION"
- },
- {
- "cmd": [
- "python",
- "-u",
- "\nimport shutil\nimport sys\nshutil.copy(sys.argv[1], sys.argv[2])\n",
- "[SLAVE_BUILD]/tmp/SK_IMAGE_VERSION",
- "/path/to/tmp/"
- ],
- "name": "Get downloaded SK_IMAGE_VERSION"
- },
- {
- "cmd": [
- "python",
- "-u",
- "\nimport os, sys\nfrom common import chromium_utils # Error? See https://crbug.com/584783.\n\n\nif os.path.exists(sys.argv[1]):\n chromium_utils.RemoveDirectory(sys.argv[1])\n",
- "[CUSTOM_[SWARM_OUT_DIR]]/dm"
- ],
- "env": {
- "PYTHONPATH": "[SLAVE_BUILD]/skia/infra/bots/.recipe_deps/build/scripts"
- },
- "name": "rmtree dm",
- "~followup_annotations": [
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@import os, sys@@@",
- "@@@STEP_LOG_LINE@python.inline@from common import chromium_utils # Error? See https://crbug.com/584783.@@@",
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@if os.path.exists(sys.argv[1]):@@@",
- "@@@STEP_LOG_LINE@python.inline@ chromium_utils.RemoveDirectory(sys.argv[1])@@@",
- "@@@STEP_LOG_END@python.inline@@@"
- ]
- },
- {
- "cmd": [
- "python",
- "-u",
- "\nimport sys, os\npath = sys.argv[1]\nmode = int(sys.argv[2])\nif not os.path.isdir(path):\n if os.path.exists(path):\n print \"%s exists but is not a dir\" % path\n sys.exit(1)\n os.makedirs(path, mode)\n",
- "[CUSTOM_[SWARM_OUT_DIR]]/dm",
- "511"
- ],
- "name": "makedirs dm",
- "~followup_annotations": [
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@import sys, os@@@",
- "@@@STEP_LOG_LINE@python.inline@path = sys.argv[1]@@@",
- "@@@STEP_LOG_LINE@python.inline@mode = int(sys.argv[2])@@@",
- "@@@STEP_LOG_LINE@python.inline@if not os.path.isdir(path):@@@",
- "@@@STEP_LOG_LINE@python.inline@ if os.path.exists(path):@@@",
- "@@@STEP_LOG_LINE@python.inline@ print \"%s exists but is not a dir\" % path@@@",
- "@@@STEP_LOG_LINE@python.inline@ sys.exit(1)@@@",
- "@@@STEP_LOG_LINE@python.inline@ os.makedirs(path, mode)@@@",
- "@@@STEP_LOG_END@python.inline@@@"
- ]
- },
- {
- "cmd": [
- "python",
- "-u",
- "\nimport sys, os\npath = sys.argv[1]\nmode = int(sys.argv[2])\nif not os.path.isdir(path):\n if os.path.exists(path):\n print \"%s exists but is not a dir\" % path\n sys.exit(1)\n os.makedirs(path, mode)\n",
- "[SLAVE_BUILD]/tmp",
- "511"
- ],
- "name": "makedirs tmp_dir",
- "~followup_annotations": [
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@import sys, os@@@",
- "@@@STEP_LOG_LINE@python.inline@path = sys.argv[1]@@@",
- "@@@STEP_LOG_LINE@python.inline@mode = int(sys.argv[2])@@@",
- "@@@STEP_LOG_LINE@python.inline@if not os.path.isdir(path):@@@",
- "@@@STEP_LOG_LINE@python.inline@ if os.path.exists(path):@@@",
- "@@@STEP_LOG_LINE@python.inline@ print \"%s exists but is not a dir\" % path@@@",
- "@@@STEP_LOG_LINE@python.inline@ sys.exit(1)@@@",
- "@@@STEP_LOG_LINE@python.inline@ os.makedirs(path, mode)@@@",
- "@@@STEP_LOG_END@python.inline@@@"
- ]
- },
- {
- "cmd": [
- "python",
- "-u",
- "\nimport contextlib\nimport math\nimport socket\nimport sys\nimport time\nimport urllib2\n\nHASHES_URL = 'https://gold.skia.org/_/hashes'\nRETRIES = 5\nTIMEOUT = 60\nWAIT_BASE = 15\n\nsocket.setdefaulttimeout(TIMEOUT)\nfor retry in range(RETRIES):\n try:\n with contextlib.closing(\n urllib2.urlopen(HASHES_URL, timeout=TIMEOUT)) as w:\n hashes = w.read()\n with open(sys.argv[1], 'w') as f:\n f.write(hashes)\n break\n except Exception as e:\n print 'Failed to get uninteresting hashes from %s:' % HASHES_URL\n print e\n if retry == RETRIES:\n raise\n waittime = WAIT_BASE * math.pow(2, retry)\n print 'Retry in %d seconds.' % waittime\n time.sleep(waittime)\n",
- "[SLAVE_BUILD]/tmp/uninteresting_hashes.txt"
- ],
- "cwd": "[SLAVE_BUILD]/skia",
- "env": {
- "BUILDTYPE": "Debug",
- "CHROME_HEADLESS": "1",
- "GYP_DEFINES": "skia_arch_type=x86_64 skia_gpu=0 skia_warnings_as_errors=0",
- "SKIA_OUT": "[SLAVE_BUILD]/out"
- },
- "name": "get uninteresting hashes",
- "~followup_annotations": [
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@import contextlib@@@",
- "@@@STEP_LOG_LINE@python.inline@import math@@@",
- "@@@STEP_LOG_LINE@python.inline@import socket@@@",
- "@@@STEP_LOG_LINE@python.inline@import sys@@@",
- "@@@STEP_LOG_LINE@python.inline@import time@@@",
- "@@@STEP_LOG_LINE@python.inline@import urllib2@@@",
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@HASHES_URL = 'https://gold.skia.org/_/hashes'@@@",
- "@@@STEP_LOG_LINE@python.inline@RETRIES = 5@@@",
- "@@@STEP_LOG_LINE@python.inline@TIMEOUT = 60@@@",
- "@@@STEP_LOG_LINE@python.inline@WAIT_BASE = 15@@@",
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@socket.setdefaulttimeout(TIMEOUT)@@@",
- "@@@STEP_LOG_LINE@python.inline@for retry in range(RETRIES):@@@",
- "@@@STEP_LOG_LINE@python.inline@ try:@@@",
- "@@@STEP_LOG_LINE@python.inline@ with contextlib.closing(@@@",
- "@@@STEP_LOG_LINE@python.inline@ urllib2.urlopen(HASHES_URL, timeout=TIMEOUT)) as w:@@@",
- "@@@STEP_LOG_LINE@python.inline@ hashes = w.read()@@@",
- "@@@STEP_LOG_LINE@python.inline@ with open(sys.argv[1], 'w') as f:@@@",
- "@@@STEP_LOG_LINE@python.inline@ f.write(hashes)@@@",
- "@@@STEP_LOG_LINE@python.inline@ break@@@",
- "@@@STEP_LOG_LINE@python.inline@ except Exception as e:@@@",
- "@@@STEP_LOG_LINE@python.inline@ print 'Failed to get uninteresting hashes from %s:' % HASHES_URL@@@",
- "@@@STEP_LOG_LINE@python.inline@ print e@@@",
- "@@@STEP_LOG_LINE@python.inline@ if retry == RETRIES:@@@",
- "@@@STEP_LOG_LINE@python.inline@ raise@@@",
- "@@@STEP_LOG_LINE@python.inline@ waittime = WAIT_BASE * math.pow(2, retry)@@@",
- "@@@STEP_LOG_LINE@python.inline@ print 'Retry in %d seconds.' % waittime@@@",
- "@@@STEP_LOG_LINE@python.inline@ time.sleep(waittime)@@@",
- "@@@STEP_LOG_END@python.inline@@@"
- ]
- },
- {
- "cmd": [
- "catchsegv",
- "[SLAVE_BUILD]/out/Debug/dm",
- "--undefok",
- "--resourcePath",
- "[SLAVE_BUILD]/skia/resources",
- "--skps",
- "[SLAVE_BUILD]/skp",
- "--images",
- "[SLAVE_BUILD]/images/dm",
- "--colorImages",
- "[SLAVE_BUILD]/images/colorspace",
- "--nameByHash",
- "--properties",
- "gitHash",
- "abc123",
- "master",
- "client.skia",
- "builder",
- "Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug",
- "build_number",
- "6",
- "--key",
- "arch",
- "x86_64",
- "compiler",
- "GCC",
- "configuration",
- "Debug",
- "cpu_or_gpu",
- "CPU",
- "cpu_or_gpu_value",
- "AVX2",
- "model",
- "GCE",
- "os",
- "Ubuntu",
- "--uninterestingHashesFile",
- "[SLAVE_BUILD]/tmp/uninteresting_hashes.txt",
- "--writePath",
- "[CUSTOM_[SWARM_OUT_DIR]]/dm",
- "--nogpu",
- "--dummy-flags"
- ],
- "env": {
- "BUILDTYPE": "Debug",
- "CHROME_HEADLESS": "1",
- "GYP_DEFINES": "skia_arch_type=x86_64 skia_gpu=0 skia_warnings_as_errors=0",
- "SKIA_OUT": "[SLAVE_BUILD]/out"
- },
- "name": "dm"
- },
- {
- "name": "$result",
- "recipe_result": null,
- "status_code": 0
- }
-] \ No newline at end of file
diff --git a/infra/bots/recipes/swarm_test.expected/legacy_skp_version.json b/infra/bots/recipes/swarm_test.expected/legacy_skp_version.json
deleted file mode 100644
index d281205a03..0000000000
--- a/infra/bots/recipes/swarm_test.expected/legacy_skp_version.json
+++ /dev/null
@@ -1,249 +0,0 @@
-[
- {
- "cmd": [
- "python",
- "-u",
- "[SLAVE_BUILD]/skia/tools/buildbot_spec.py",
- "/path/to/tmp/json",
- "Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug"
- ],
- "cwd": "[SLAVE_BUILD]/skia",
- "name": "exec buildbot_spec.py",
- "~followup_annotations": [
- "@@@STEP_LOG_LINE@json.output@{@@@",
- "@@@STEP_LOG_LINE@json.output@ \"build_targets\": [@@@",
- "@@@STEP_LOG_LINE@json.output@ \"dm\"@@@",
- "@@@STEP_LOG_LINE@json.output@ ], @@@",
- "@@@STEP_LOG_LINE@json.output@ \"builder_cfg\": {@@@",
- "@@@STEP_LOG_LINE@json.output@ \"arch\": \"x86_64\", @@@",
- "@@@STEP_LOG_LINE@json.output@ \"compiler\": \"GCC\", @@@",
- "@@@STEP_LOG_LINE@json.output@ \"configuration\": \"Debug\", @@@",
- "@@@STEP_LOG_LINE@json.output@ \"cpu_or_gpu\": \"CPU\", @@@",
- "@@@STEP_LOG_LINE@json.output@ \"cpu_or_gpu_value\": \"AVX2\", @@@",
- "@@@STEP_LOG_LINE@json.output@ \"is_trybot\": false, @@@",
- "@@@STEP_LOG_LINE@json.output@ \"model\": \"GCE\", @@@",
- "@@@STEP_LOG_LINE@json.output@ \"os\": \"Ubuntu\", @@@",
- "@@@STEP_LOG_LINE@json.output@ \"role\": \"Test\"@@@",
- "@@@STEP_LOG_LINE@json.output@ }, @@@",
- "@@@STEP_LOG_LINE@json.output@ \"configuration\": \"Debug\", @@@",
- "@@@STEP_LOG_LINE@json.output@ \"dm_flags\": [@@@",
- "@@@STEP_LOG_LINE@json.output@ \"--dummy-flags\"@@@",
- "@@@STEP_LOG_LINE@json.output@ ], @@@",
- "@@@STEP_LOG_LINE@json.output@ \"do_perf_steps\": false, @@@",
- "@@@STEP_LOG_LINE@json.output@ \"do_test_steps\": true, @@@",
- "@@@STEP_LOG_LINE@json.output@ \"env\": {@@@",
- "@@@STEP_LOG_LINE@json.output@ \"GYP_DEFINES\": \"skia_arch_type=x86_64 skia_gpu=0 skia_warnings_as_errors=0\"@@@",
- "@@@STEP_LOG_LINE@json.output@ }, @@@",
- "@@@STEP_LOG_LINE@json.output@ \"nanobench_flags\": [@@@",
- "@@@STEP_LOG_LINE@json.output@ \"--dummy-flags\"@@@",
- "@@@STEP_LOG_LINE@json.output@ ], @@@",
- "@@@STEP_LOG_LINE@json.output@ \"upload_dm_results\": true, @@@",
- "@@@STEP_LOG_LINE@json.output@ \"upload_perf_results\": false@@@",
- "@@@STEP_LOG_LINE@json.output@}@@@",
- "@@@STEP_LOG_END@json.output@@@"
- ]
- },
- {
- "cmd": [
- "python",
- "-u",
- "\nimport shutil\nimport sys\nshutil.copy(sys.argv[1], sys.argv[2])\n",
- "[SLAVE_BUILD]/tmp/SKP_VERSION",
- "/path/to/tmp/"
- ],
- "name": "Get downloaded SKP_VERSION"
- },
- {
- "cmd": [
- "python",
- "-u",
- "\nimport shutil\nimport sys\nshutil.copy(sys.argv[1], sys.argv[2])\n",
- "[SLAVE_BUILD]/skia/infra/bots/assets/skimage/VERSION",
- "/path/to/tmp/"
- ],
- "name": "Get downloaded skimage VERSION"
- },
- {
- "cmd": [
- "python",
- "-u",
- "\nimport shutil\nimport sys\nshutil.copy(sys.argv[1], sys.argv[2])\n",
- "42",
- "[SLAVE_BUILD]/tmp/SK_IMAGE_VERSION"
- ],
- "name": "write SK_IMAGE_VERSION"
- },
- {
- "cmd": [
- "python",
- "-u",
- "\nimport os, sys\nfrom common import chromium_utils # Error? See https://crbug.com/584783.\n\n\nif os.path.exists(sys.argv[1]):\n chromium_utils.RemoveDirectory(sys.argv[1])\n",
- "[CUSTOM_[SWARM_OUT_DIR]]/dm"
- ],
- "env": {
- "PYTHONPATH": "[SLAVE_BUILD]/skia/infra/bots/.recipe_deps/build/scripts"
- },
- "name": "rmtree dm",
- "~followup_annotations": [
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@import os, sys@@@",
- "@@@STEP_LOG_LINE@python.inline@from common import chromium_utils # Error? See https://crbug.com/584783.@@@",
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@if os.path.exists(sys.argv[1]):@@@",
- "@@@STEP_LOG_LINE@python.inline@ chromium_utils.RemoveDirectory(sys.argv[1])@@@",
- "@@@STEP_LOG_END@python.inline@@@"
- ]
- },
- {
- "cmd": [
- "python",
- "-u",
- "\nimport sys, os\npath = sys.argv[1]\nmode = int(sys.argv[2])\nif not os.path.isdir(path):\n if os.path.exists(path):\n print \"%s exists but is not a dir\" % path\n sys.exit(1)\n os.makedirs(path, mode)\n",
- "[CUSTOM_[SWARM_OUT_DIR]]/dm",
- "511"
- ],
- "name": "makedirs dm",
- "~followup_annotations": [
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@import sys, os@@@",
- "@@@STEP_LOG_LINE@python.inline@path = sys.argv[1]@@@",
- "@@@STEP_LOG_LINE@python.inline@mode = int(sys.argv[2])@@@",
- "@@@STEP_LOG_LINE@python.inline@if not os.path.isdir(path):@@@",
- "@@@STEP_LOG_LINE@python.inline@ if os.path.exists(path):@@@",
- "@@@STEP_LOG_LINE@python.inline@ print \"%s exists but is not a dir\" % path@@@",
- "@@@STEP_LOG_LINE@python.inline@ sys.exit(1)@@@",
- "@@@STEP_LOG_LINE@python.inline@ os.makedirs(path, mode)@@@",
- "@@@STEP_LOG_END@python.inline@@@"
- ]
- },
- {
- "cmd": [
- "python",
- "-u",
- "\nimport sys, os\npath = sys.argv[1]\nmode = int(sys.argv[2])\nif not os.path.isdir(path):\n if os.path.exists(path):\n print \"%s exists but is not a dir\" % path\n sys.exit(1)\n os.makedirs(path, mode)\n",
- "[SLAVE_BUILD]/tmp",
- "511"
- ],
- "name": "makedirs tmp_dir",
- "~followup_annotations": [
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@import sys, os@@@",
- "@@@STEP_LOG_LINE@python.inline@path = sys.argv[1]@@@",
- "@@@STEP_LOG_LINE@python.inline@mode = int(sys.argv[2])@@@",
- "@@@STEP_LOG_LINE@python.inline@if not os.path.isdir(path):@@@",
- "@@@STEP_LOG_LINE@python.inline@ if os.path.exists(path):@@@",
- "@@@STEP_LOG_LINE@python.inline@ print \"%s exists but is not a dir\" % path@@@",
- "@@@STEP_LOG_LINE@python.inline@ sys.exit(1)@@@",
- "@@@STEP_LOG_LINE@python.inline@ os.makedirs(path, mode)@@@",
- "@@@STEP_LOG_END@python.inline@@@"
- ]
- },
- {
- "cmd": [
- "python",
- "-u",
- "\nimport contextlib\nimport math\nimport socket\nimport sys\nimport time\nimport urllib2\n\nHASHES_URL = 'https://gold.skia.org/_/hashes'\nRETRIES = 5\nTIMEOUT = 60\nWAIT_BASE = 15\n\nsocket.setdefaulttimeout(TIMEOUT)\nfor retry in range(RETRIES):\n try:\n with contextlib.closing(\n urllib2.urlopen(HASHES_URL, timeout=TIMEOUT)) as w:\n hashes = w.read()\n with open(sys.argv[1], 'w') as f:\n f.write(hashes)\n break\n except Exception as e:\n print 'Failed to get uninteresting hashes from %s:' % HASHES_URL\n print e\n if retry == RETRIES:\n raise\n waittime = WAIT_BASE * math.pow(2, retry)\n print 'Retry in %d seconds.' % waittime\n time.sleep(waittime)\n",
- "[SLAVE_BUILD]/tmp/uninteresting_hashes.txt"
- ],
- "cwd": "[SLAVE_BUILD]/skia",
- "env": {
- "BUILDTYPE": "Debug",
- "CHROME_HEADLESS": "1",
- "GYP_DEFINES": "skia_arch_type=x86_64 skia_gpu=0 skia_warnings_as_errors=0",
- "SKIA_OUT": "[SLAVE_BUILD]/out"
- },
- "name": "get uninteresting hashes",
- "~followup_annotations": [
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@import contextlib@@@",
- "@@@STEP_LOG_LINE@python.inline@import math@@@",
- "@@@STEP_LOG_LINE@python.inline@import socket@@@",
- "@@@STEP_LOG_LINE@python.inline@import sys@@@",
- "@@@STEP_LOG_LINE@python.inline@import time@@@",
- "@@@STEP_LOG_LINE@python.inline@import urllib2@@@",
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@HASHES_URL = 'https://gold.skia.org/_/hashes'@@@",
- "@@@STEP_LOG_LINE@python.inline@RETRIES = 5@@@",
- "@@@STEP_LOG_LINE@python.inline@TIMEOUT = 60@@@",
- "@@@STEP_LOG_LINE@python.inline@WAIT_BASE = 15@@@",
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@socket.setdefaulttimeout(TIMEOUT)@@@",
- "@@@STEP_LOG_LINE@python.inline@for retry in range(RETRIES):@@@",
- "@@@STEP_LOG_LINE@python.inline@ try:@@@",
- "@@@STEP_LOG_LINE@python.inline@ with contextlib.closing(@@@",
- "@@@STEP_LOG_LINE@python.inline@ urllib2.urlopen(HASHES_URL, timeout=TIMEOUT)) as w:@@@",
- "@@@STEP_LOG_LINE@python.inline@ hashes = w.read()@@@",
- "@@@STEP_LOG_LINE@python.inline@ with open(sys.argv[1], 'w') as f:@@@",
- "@@@STEP_LOG_LINE@python.inline@ f.write(hashes)@@@",
- "@@@STEP_LOG_LINE@python.inline@ break@@@",
- "@@@STEP_LOG_LINE@python.inline@ except Exception as e:@@@",
- "@@@STEP_LOG_LINE@python.inline@ print 'Failed to get uninteresting hashes from %s:' % HASHES_URL@@@",
- "@@@STEP_LOG_LINE@python.inline@ print e@@@",
- "@@@STEP_LOG_LINE@python.inline@ if retry == RETRIES:@@@",
- "@@@STEP_LOG_LINE@python.inline@ raise@@@",
- "@@@STEP_LOG_LINE@python.inline@ waittime = WAIT_BASE * math.pow(2, retry)@@@",
- "@@@STEP_LOG_LINE@python.inline@ print 'Retry in %d seconds.' % waittime@@@",
- "@@@STEP_LOG_LINE@python.inline@ time.sleep(waittime)@@@",
- "@@@STEP_LOG_END@python.inline@@@"
- ]
- },
- {
- "cmd": [
- "catchsegv",
- "[SLAVE_BUILD]/out/Debug/dm",
- "--undefok",
- "--resourcePath",
- "[SLAVE_BUILD]/skia/resources",
- "--skps",
- "[SLAVE_BUILD]/skps",
- "--images",
- "[SLAVE_BUILD]/skimage/dm",
- "--colorImages",
- "[SLAVE_BUILD]/skimage/colorspace",
- "--nameByHash",
- "--properties",
- "gitHash",
- "abc123",
- "master",
- "client.skia",
- "builder",
- "Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug",
- "build_number",
- "6",
- "--key",
- "arch",
- "x86_64",
- "compiler",
- "GCC",
- "configuration",
- "Debug",
- "cpu_or_gpu",
- "CPU",
- "cpu_or_gpu_value",
- "AVX2",
- "model",
- "GCE",
- "os",
- "Ubuntu",
- "--uninterestingHashesFile",
- "[SLAVE_BUILD]/tmp/uninteresting_hashes.txt",
- "--writePath",
- "[CUSTOM_[SWARM_OUT_DIR]]/dm",
- "--nogpu",
- "--dummy-flags"
- ],
- "env": {
- "BUILDTYPE": "Debug",
- "CHROME_HEADLESS": "1",
- "GYP_DEFINES": "skia_arch_type=x86_64 skia_gpu=0 skia_warnings_as_errors=0",
- "SKIA_OUT": "[SLAVE_BUILD]/out"
- },
- "name": "dm"
- },
- {
- "name": "$result",
- "recipe_result": null,
- "status_code": 0
- }
-] \ No newline at end of file
diff --git a/infra/bots/recipes/swarm_test.py b/infra/bots/recipes/swarm_test.py
index a56da1ea5d..c42a7901ba 100644
--- a/infra/bots/recipes/swarm_test.py
+++ b/infra/bots/recipes/swarm_test.py
@@ -7,12 +7,13 @@
DEPS = [
+ 'core',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/raw_io',
- 'skia',
+ 'run',
]
@@ -33,10 +34,10 @@ TEST_BUILDERS = {
def RunSteps(api):
- api.skia.setup()
- api.skia.test_steps()
- api.skia.cleanup_steps()
- api.skia.check_failure()
+ api.core.setup()
+ api.core.test_steps()
+ api.core.cleanup_steps()
+ api.run.check_failure()
def GenTests(api):
@@ -307,38 +308,3 @@ def GenTests(api):
) +
api.platform('win', 64)
)
-
- builder = 'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug'
- yield (
- api.test('legacy_skimage_version') +
- api.properties(buildername=builder,
- mastername='client.skia',
- slavename='skiabot-linux-swarm-000',
- buildnumber=6,
- revision='abc123',
- path_config='kitchen',
- swarm_out_dir='[SWARM_OUT_DIR]') +
- api.path.exists(
- api.path['slave_build'].join('skia'),
- api.path['slave_build'].join('skia', 'infra', 'bots', 'assets',
- 'skp', 'VERSION'),
- api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt')
- )
- )
-
- yield (
- api.test('legacy_skp_version') +
- api.properties(buildername=builder,
- mastername='client.skia',
- slavename='skiabot-linux-swarm-000',
- buildnumber=6,
- revision='abc123',
- path_config='kitchen',
- swarm_out_dir='[SWARM_OUT_DIR]') +
- api.path.exists(
- api.path['slave_build'].join('skia'),
- api.path['slave_build'].join('skia', 'infra', 'bots', 'assets',
- 'skimage', 'VERSION'),
- api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt')
- )
- )
diff --git a/infra/bots/recipes/swarm_trigger.expected/Perf-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Release-Trybot.json b/infra/bots/recipes/swarm_trigger.expected/Perf-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Release-Trybot.json
index 5d68baa509..75359f6583 100644
--- a/infra/bots/recipes/swarm_trigger.expected/Perf-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Release-Trybot.json
+++ b/infra/bots/recipes/swarm_trigger.expected/Perf-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Release-Trybot.json
@@ -907,7 +907,7 @@
"cmd": [
"python",
"-u",
- "RECIPE_MODULE[skia::skia]/resources/upload_bench_results.py",
+ "RECIPE_MODULE[skia::core]/resources/upload_bench_results.py",
"Perf-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Release-Trybot",
"5",
"[SLAVE_BUILD]/perfdata/Perf-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Release-Trybot/data",
diff --git a/infra/bots/recipes/swarm_trigger.expected/Test-Android-GCC-NVIDIA_Shield-GPU-TegraX1-Arm64-Debug-Vulkan.json b/infra/bots/recipes/swarm_trigger.expected/Test-Android-GCC-NVIDIA_Shield-GPU-TegraX1-Arm64-Debug-Vulkan.json
index 9a8507fe14..cd02903df2 100644
--- a/infra/bots/recipes/swarm_trigger.expected/Test-Android-GCC-NVIDIA_Shield-GPU-TegraX1-Arm64-Debug-Vulkan.json
+++ b/infra/bots/recipes/swarm_trigger.expected/Test-Android-GCC-NVIDIA_Shield-GPU-TegraX1-Arm64-Debug-Vulkan.json
@@ -878,7 +878,7 @@
"cmd": [
"python",
"-u",
- "RECIPE_MODULE[skia::skia]/resources/upload_dm_results.py",
+ "RECIPE_MODULE[skia::core]/resources/upload_dm_results.py",
"[SLAVE_BUILD]/dm",
"abc123",
"Test-Android-GCC-NVIDIA_Shield-GPU-TegraX1-Arm64-Debug-Vulkan",
diff --git a/infra/bots/recipes/swarm_trigger.expected/Test-Android-GCC-Nexus7v2-GPU-Tegra3-Arm7-Release.json b/infra/bots/recipes/swarm_trigger.expected/Test-Android-GCC-Nexus7v2-GPU-Tegra3-Arm7-Release.json
index 953feb3a18..dcbb9bbaca 100644
--- a/infra/bots/recipes/swarm_trigger.expected/Test-Android-GCC-Nexus7v2-GPU-Tegra3-Arm7-Release.json
+++ b/infra/bots/recipes/swarm_trigger.expected/Test-Android-GCC-Nexus7v2-GPU-Tegra3-Arm7-Release.json
@@ -877,7 +877,7 @@
"cmd": [
"python",
"-u",
- "RECIPE_MODULE[skia::skia]/resources/upload_dm_results.py",
+ "RECIPE_MODULE[skia::core]/resources/upload_dm_results.py",
"[SLAVE_BUILD]/dm",
"abc123",
"Test-Android-GCC-Nexus7v2-GPU-Tegra3-Arm7-Release",
diff --git a/infra/bots/recipes/swarm_trigger.expected/Test-Mac-Clang-MacMini6.2-CPU-AVX-x86_64-Release.json b/infra/bots/recipes/swarm_trigger.expected/Test-Mac-Clang-MacMini6.2-CPU-AVX-x86_64-Release.json
index 409aadf20c..e266e8bf9a 100644
--- a/infra/bots/recipes/swarm_trigger.expected/Test-Mac-Clang-MacMini6.2-CPU-AVX-x86_64-Release.json
+++ b/infra/bots/recipes/swarm_trigger.expected/Test-Mac-Clang-MacMini6.2-CPU-AVX-x86_64-Release.json
@@ -868,7 +868,7 @@
"cmd": [
"python",
"-u",
- "RECIPE_MODULE[skia::skia]/resources/upload_dm_results.py",
+ "RECIPE_MODULE[skia::core]/resources/upload_dm_results.py",
"[SLAVE_BUILD]/dm",
"abc123",
"Test-Mac-Clang-MacMini6.2-CPU-AVX-x86_64-Release",
diff --git a/infra/bots/recipes/swarm_trigger.expected/Test-Ubuntu-Clang-GCE-CPU-AVX2-x86_64-Coverage-Trybot.json b/infra/bots/recipes/swarm_trigger.expected/Test-Ubuntu-Clang-GCE-CPU-AVX2-x86_64-Coverage-Trybot.json
index c6d06e5edd..84d2ef59b7 100644
--- a/infra/bots/recipes/swarm_trigger.expected/Test-Ubuntu-Clang-GCE-CPU-AVX2-x86_64-Coverage-Trybot.json
+++ b/infra/bots/recipes/swarm_trigger.expected/Test-Ubuntu-Clang-GCE-CPU-AVX2-x86_64-Coverage-Trybot.json
@@ -621,7 +621,7 @@
"cmd": [
"python",
"-u",
- "RECIPE_MODULE[skia::skia]/resources/upload_bench_results.py",
+ "RECIPE_MODULE[skia::core]/resources/upload_bench_results.py",
"Test-Ubuntu-Clang-GCE-CPU-AVX2-x86_64-Coverage-Trybot",
"5",
"[SLAVE_BUILD]/swarming_temp_dir/outputs/test_skia/0",
diff --git a/infra/bots/recipes/swarm_trigger.expected/Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug.json b/infra/bots/recipes/swarm_trigger.expected/Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug.json
index 97a49aa7c7..fbe10be824 100644
--- a/infra/bots/recipes/swarm_trigger.expected/Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug.json
+++ b/infra/bots/recipes/swarm_trigger.expected/Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug.json
@@ -864,7 +864,7 @@
"cmd": [
"python",
"-u",
- "RECIPE_MODULE[skia::skia]/resources/upload_dm_results.py",
+ "RECIPE_MODULE[skia::core]/resources/upload_dm_results.py",
"[SLAVE_BUILD]/dm",
"abc123",
"Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug",
diff --git a/infra/bots/recipes/swarm_trigger.expected/Test-Win8-MSVC-ShuttleA-GPU-HD7770-x86_64-Release.json b/infra/bots/recipes/swarm_trigger.expected/Test-Win8-MSVC-ShuttleA-GPU-HD7770-x86_64-Release.json
index 97c64a6501..97be43115a 100644
--- a/infra/bots/recipes/swarm_trigger.expected/Test-Win8-MSVC-ShuttleA-GPU-HD7770-x86_64-Release.json
+++ b/infra/bots/recipes/swarm_trigger.expected/Test-Win8-MSVC-ShuttleA-GPU-HD7770-x86_64-Release.json
@@ -873,7 +873,7 @@
"cmd": [
"python",
"-u",
- "RECIPE_MODULE[skia::skia]/resources/upload_dm_results.py",
+ "RECIPE_MODULE[skia::core]/resources/upload_dm_results.py",
"[SLAVE_BUILD]/dm",
"abc123",
"Test-Win8-MSVC-ShuttleA-GPU-HD7770-x86_64-Release",
diff --git a/infra/bots/recipes/swarm_trigger.expected/Test-Win8-MSVC-ShuttleB-CPU-AVX2-x86_64-Release.json b/infra/bots/recipes/swarm_trigger.expected/Test-Win8-MSVC-ShuttleB-CPU-AVX2-x86_64-Release.json
index d3ad377f13..fae8ed8aae 100644
--- a/infra/bots/recipes/swarm_trigger.expected/Test-Win8-MSVC-ShuttleB-CPU-AVX2-x86_64-Release.json
+++ b/infra/bots/recipes/swarm_trigger.expected/Test-Win8-MSVC-ShuttleB-CPU-AVX2-x86_64-Release.json
@@ -876,7 +876,7 @@
"cmd": [
"python",
"-u",
- "RECIPE_MODULE[skia::skia]/resources/upload_dm_results.py",
+ "RECIPE_MODULE[skia::core]/resources/upload_dm_results.py",
"[SLAVE_BUILD]/dm",
"abc123",
"Test-Win8-MSVC-ShuttleB-CPU-AVX2-x86_64-Release",
diff --git a/infra/bots/recipes/swarm_trigger.expected/Test-iOS-Clang-iPad4-GPU-SGX554-Arm7-Release.json b/infra/bots/recipes/swarm_trigger.expected/Test-iOS-Clang-iPad4-GPU-SGX554-Arm7-Release.json
index bdaf37a74d..88eb30b16d 100644
--- a/infra/bots/recipes/swarm_trigger.expected/Test-iOS-Clang-iPad4-GPU-SGX554-Arm7-Release.json
+++ b/infra/bots/recipes/swarm_trigger.expected/Test-iOS-Clang-iPad4-GPU-SGX554-Arm7-Release.json
@@ -867,7 +867,7 @@
"cmd": [
"python",
"-u",
- "RECIPE_MODULE[skia::skia]/resources/upload_dm_results.py",
+ "RECIPE_MODULE[skia::core]/resources/upload_dm_results.py",
"[SLAVE_BUILD]/dm",
"abc123",
"Test-iOS-Clang-iPad4-GPU-SGX554-Arm7-Release",
diff --git a/infra/bots/recipes/swarm_trigger.py b/infra/bots/recipes/swarm_trigger.py
index 344a26922a..d3dbbc08a2 100644
--- a/infra/bots/recipes/swarm_trigger.py
+++ b/infra/bots/recipes/swarm_trigger.py
@@ -10,6 +10,7 @@ import json
DEPS = [
+ 'core',
'build/file',
'build/gsutil',
'depot_tools/depot_tools',
@@ -22,8 +23,9 @@ DEPS = [
'recipe_engine/raw_io',
'recipe_engine/step',
'recipe_engine/time',
- 'skia',
- 'skia_swarming',
+ 'run',
+ 'swarming',
+ 'vars',
]
@@ -196,7 +198,7 @@ def trigger_task(api, task_name, builder, master, slave, buildnumber,
isolate_file = 'coverage_skia.isolate'
if 'RecreateSKPs' in builder:
isolate_file = 'compile_skia.isolate'
- return api.skia_swarming.isolate_and_trigger_task(
+ return api.swarming.isolate_and_trigger_task(
infrabots_dir.join(isolate_file),
isolate_base_dir,
'%s_skia' % task_name,
@@ -231,7 +233,7 @@ def checkout_steps(api):
# Write a fake .gclient file if none exists. This is required by .isolates.
dot_gclient = api.path['slave_build'].join('.gclient')
if not api.path.exists(dot_gclient):
- api.skia._writefile(dot_gclient, '')
+ api.run.writefile(dot_gclient, '')
fix_filemodes(api, api.path['checkout'])
return got_revision
@@ -252,7 +254,7 @@ def housekeeper_swarm(api, builder_spec, got_revision, infrabots_dir,
idempotent=False,
store_output=False,
extra_isolate_hashes=extra_isolate_hashes)
- return api.skia_swarming.collect_swarming_task(task)
+ return api.swarming.collect_swarming_task(task)
def recreate_skps_swarm(api, builder_spec, got_revision, infrabots_dir,
@@ -270,7 +272,7 @@ def recreate_skps_swarm(api, builder_spec, got_revision, infrabots_dir,
idempotent=False,
store_output=False,
extra_isolate_hashes=extra_isolate_hashes)
- return api.skia_swarming.collect_swarming_task(task)
+ return api.swarming.collect_swarming_task(task)
def infra_swarm(api, got_revision, infrabots_dir, extra_isolate_hashes):
@@ -294,7 +296,7 @@ def infra_swarm(api, got_revision, infrabots_dir, extra_isolate_hashes):
idempotent=False,
store_output=False,
extra_isolate_hashes=extra_isolate_hashes)
- return api.skia_swarming.collect_swarming_task(task)
+ return api.swarming.collect_swarming_task(task)
def compile_steps_swarm(api, builder_spec, got_revision, infrabots_dir,
@@ -303,7 +305,7 @@ def compile_steps_swarm(api, builder_spec, got_revision, infrabots_dir,
builder_spec)
compile_builder_spec = builder_spec
if builder_name != api.properties['buildername']:
- compile_builder_spec = api.skia.get_builder_spec(
+ compile_builder_spec = api.core.get_builder_spec(
api.path['slave_build'].join('skia'), builder_name)
extra_hashes = extra_isolate_hashes[:]
@@ -311,7 +313,7 @@ def compile_steps_swarm(api, builder_spec, got_revision, infrabots_dir,
# Windows bots require a toolchain.
if 'Win' in builder_name:
version_file = infrabots_dir.join('assets', 'win_toolchain', 'VERSION')
- version = api.skia._readfile(version_file,
+ version = api.run.readfile(version_file,
name='read win_toolchain VERSION',
test_data='0').rstrip()
version = 'version:%s' % version
@@ -342,7 +344,7 @@ def compile_steps_swarm(api, builder_spec, got_revision, infrabots_dir,
cipd_packages=cipd_packages)
# Wait for compile to finish, record the results hash.
- return api.skia_swarming.collect_swarming_task_isolate_hash(task)
+ return api.swarming.collect_swarming_task_isolate_hash(task)
def get_timeouts(builder_cfg):
@@ -388,8 +390,8 @@ def perf_steps_collect(api, task, upload_perf_results, got_revision,
is_trybot):
"""Wait for perf steps to finish and upload results."""
# Wait for nanobench to finish, download the results.
- api.skia.rmtree(task.task_output_dir)
- api.skia_swarming.collect_swarming_task(task)
+ api.run.rmtree(task.task_output_dir)
+ api.swarming.collect_swarming_task(task)
# Upload the results.
if upload_perf_results:
@@ -397,7 +399,7 @@ def perf_steps_collect(api, task, upload_perf_results, got_revision,
'perfdata', api.properties['buildername'], 'data')
git_timestamp = api.git.get_timestamp(test_data='1408633190',
infra_step=True)
- api.skia.rmtree(perf_data_dir)
+ api.run.rmtree(perf_data_dir)
api.file.makedirs('perf_dir', perf_data_dir, infra_step=True)
src_results_file = task.task_output_dir.join(
'0', 'perfdata', api.properties['buildername'], 'data',
@@ -416,7 +418,7 @@ def perf_steps_collect(api, task, upload_perf_results, got_revision,
upload_args.append(api.properties['issue'])
api.python(
'Upload perf results',
- script=api.skia.resource('upload_bench_results.py'),
+ script=api.core.resource('upload_bench_results.py'),
args=upload_args,
cwd=api.path['checkout'],
infra_step=True)
@@ -448,20 +450,20 @@ def test_steps_collect(api, task, upload_dm_results, got_revision, is_trybot,
builder_cfg):
"""Collect the test results from Swarming."""
# Wait for tests to finish, download the results.
- api.skia.rmtree(task.task_output_dir)
- api.skia_swarming.collect_swarming_task(task)
+ api.run.rmtree(task.task_output_dir)
+ api.swarming.collect_swarming_task(task)
# Upload the results.
if upload_dm_results:
dm_dir = api.path['slave_build'].join('dm')
dm_src = task.task_output_dir.join('0', 'dm')
- api.skia.rmtree(dm_dir)
+ api.run.rmtree(dm_dir)
api.file.copytree('dm_dir', dm_src, dm_dir, infra_step=True)
# Upload them to Google Storage.
api.python(
'Upload DM Results',
- script=api.skia.resource('upload_dm_results.py'),
+ script=api.core.resource('upload_dm_results.py'),
args=[
dm_dir,
got_revision,
@@ -471,7 +473,7 @@ def test_steps_collect(api, task, upload_dm_results, got_revision, is_trybot,
api.path['slave_build'].join('skia', 'common', 'py', 'utils'),
],
cwd=api.path['checkout'],
- env=api.skia.gsutil_env('chromium-skia-gm.boto'),
+ env=api.vars.gsutil_env('chromium-skia-gm.boto'),
infra_step=True)
if builder_cfg['configuration'] == 'Coverage':
@@ -522,10 +524,10 @@ def upload_coverage_results(api, task, got_revision, is_trybot):
upload_args.append(api.properties['issue'])
api.python(
'upload nanobench coverage results',
- script=api.skia.resource('upload_bench_results.py'),
+ script=api.core.resource('upload_bench_results.py'),
args=upload_args,
cwd=api.path['checkout'],
- env=api.skia.gsutil_env('chromium-skia-gm.boto'),
+ env=api.vars.gsutil_env('chromium-skia-gm.boto'),
infra_step=True)
# Transform the coverage_by_line_${git_hash}.json file received from
@@ -552,7 +554,7 @@ def upload_coverage_results(api, task, got_revision, is_trybot):
def cipd_pkg(api, infrabots_dir, asset_name):
"""Find and return the CIPD package info for the given asset."""
version_file = infrabots_dir.join('assets', asset_name, 'VERSION')
- version = api.skia._readfile(version_file,
+ version = api.run.readfile(version_file,
name='read %s VERSION' % asset_name,
test_data='0').rstrip()
version = 'version:%s' % version
@@ -584,7 +586,7 @@ def RunSteps(api):
got_revision = checkout_steps(api)
infrabots_dir = api.path['checkout'].join('infra', 'bots')
- api.skia_swarming.setup(
+ api.swarming.setup(
infrabots_dir.join('tools', 'luci-go'),
swarming_rev='')
@@ -601,7 +603,7 @@ def RunSteps(api):
if 'Infra' in api.properties['buildername']:
return infra_swarm(api, got_revision, infrabots_dir, extra_hashes)
- builder_spec = api.skia.get_builder_spec(api.path['checkout'],
+ builder_spec = api.core.get_builder_spec(api.path['checkout'],
api.properties['buildername'])
builder_cfg = builder_spec['builder_cfg']
@@ -609,8 +611,6 @@ def RunSteps(api):
recreate_skps_swarm(api, builder_spec, got_revision, infrabots_dir,
extra_hashes)
return
-
- # Android bots require an SDK.
if 'Android' in api.properties['buildername']:
compile_cipd_deps.append(cipd_pkg(api, infrabots_dir, 'android_sdk'))