aboutsummaryrefslogtreecommitdiffhomepage
path: root/infra/bots/recipe_modules/swarming/resources
diff options
context:
space:
mode:
Diffstat (limited to 'infra/bots/recipe_modules/swarming/resources')
-rwxr-xr-xinfra/bots/recipe_modules/swarming/resources/collect_task.py159
-rwxr-xr-xinfra/bots/recipe_modules/swarming/resources/noop_merge.py46
-rwxr-xr-xinfra/bots/recipe_modules/swarming/resources/results_merger.py278
-rwxr-xr-xinfra/bots/recipe_modules/swarming/resources/standard_gtest_merge.py198
-rwxr-xr-xinfra/bots/recipe_modules/swarming/resources/standard_isolated_script_merge.py45
5 files changed, 726 insertions, 0 deletions
diff --git a/infra/bots/recipe_modules/swarming/resources/collect_task.py b/infra/bots/recipe_modules/swarming/resources/collect_task.py
new file mode 100755
index 0000000000..cb4f15e96a
--- /dev/null
+++ b/infra/bots/recipe_modules/swarming/resources/collect_task.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import json
+import logging
+import os
+import subprocess
+import sys
+
+
+def collect_task(
+ collect_cmd, merge_script, build_properties, merge_arguments,
+ task_output_dir, output_json):
+ """Collect and merge the results of a task.
+
+ This is a relatively thin wrapper script around a `swarming.py collect`
+ command and a subsequent results merge to ensure that the recipe system
+ treats them as a single step. The results merge can either be the default
+ one provided by results_merger or a python script provided as merge_script.
+
+ Args:
+ collect_cmd: The `swarming.py collect` command to run. Should not contain
+ a --task-output-dir argument.
+ merge_script: A merge/postprocessing script that should be run to
+ merge the results. This script will be invoked as
+
+ <merge_script> \
+ [--build-properties <string JSON>] \
+ [merge arguments...] \
+ --summary-json <summary json> \
+ -o <merged json path> \
+ <shard json>...
+
+ where the merge arguments are the contents of merge_arguments_json.
+ build_properties: A string containing build information to
+ pass to the merge script in JSON form.
+ merge_arguments: A string containing additional arguments to pass to
+ the merge script in JSON form.
+ task_output_dir: A path to a directory in which swarming will write the
+ output of the task, including a summary JSON and all of the individual
+ shard results.
+ output_json: A path to a JSON file to which the merged results should be
+ written. The merged results should be in the JSON Results File Format
+ (https://www.chromium.org/developers/the-json-test-results-format)
+ and may optionally contain a top level "links" field that may contain a
+ dict mapping link text to URLs, for a set of links that will be included
+ in the buildbot output.
+ Returns:
+ The exit code of collect_cmd or merge_cmd.
+ """
+ logging.debug('Using task_output_dir: %r', task_output_dir)
+ if os.path.exists(task_output_dir):
+ logging.warn('task_output_dir %r already exists!', task_output_dir)
+ existing_contents = []
+ try:
+ for p in os.listdir(task_output_dir):
+ existing_contents.append(os.path.join(task_output_dir, p))
+ except (OSError, IOError) as e:
+ logging.error('Error while examining existing task_output_dir: %s', e)
+
+ logging.warn('task_output_dir existing content: %r', existing_contents)
+
+ collect_cmd.extend(['--task-output-dir', task_output_dir])
+
+ logging.info('collect_cmd: %s', ' '.join(collect_cmd))
+ collect_result = subprocess.call(collect_cmd)
+ if collect_result != 0:
+ logging.warn('collect_cmd had non-zero return code: %s', collect_result)
+
+ task_output_dir_contents = []
+ try:
+ task_output_dir_contents.extend(
+ os.path.join(task_output_dir, p)
+ for p in os.listdir(task_output_dir))
+ except (OSError, IOError) as e:
+ logging.error('Error while processing task_output_dir: %s', e)
+
+ logging.debug('Contents of task_output_dir: %r', task_output_dir_contents)
+ if not task_output_dir_contents:
+ logging.warn(
+ 'No files found in task_output_dir: %r',
+ task_output_dir)
+
+ task_output_subdirs = (
+ p for p in task_output_dir_contents
+ if os.path.isdir(p))
+ shard_json_files = [
+ os.path.join(subdir, 'output.json')
+ for subdir in task_output_subdirs]
+ extant_shard_json_files = [
+ f for f in shard_json_files if os.path.exists(f)]
+
+ if shard_json_files != extant_shard_json_files:
+ logging.warn(
+ 'Expected output.json file missing: %r\nFound: %r\nExpected: %r\n',
+ set(shard_json_files) - set(extant_shard_json_files),
+ extant_shard_json_files,
+ shard_json_files)
+
+ if not extant_shard_json_files:
+ logging.warn(
+ 'No shard json files found in task_output_dir: %r\nFound %r',
+ task_output_dir, task_output_dir_contents)
+
+ logging.debug('Found shard_json_files: %r', shard_json_files)
+
+ summary_json_file = os.path.join(task_output_dir, 'summary.json')
+
+ merge_result = 0
+
+ merge_cmd = [sys.executable, merge_script]
+ if build_properties:
+ merge_cmd.extend(('--build-properties', build_properties))
+ if os.path.exists(summary_json_file):
+ merge_cmd.extend(('--summary-json', summary_json_file))
+ else:
+ logging.warn('Summary json file missing: %r', summary_json_file)
+ if merge_arguments:
+ merge_cmd.extend(json.loads(merge_arguments))
+ merge_cmd.extend(('-o', output_json))
+ merge_cmd.extend(extant_shard_json_files)
+
+ logging.info('merge_cmd: %s', ' '.join(merge_cmd))
+ merge_result = subprocess.call(merge_cmd)
+ if merge_result != 0:
+ logging.warn('merge_cmd had non-zero return code: %s', merge_result)
+
+ if not os.path.exists(output_json):
+ logging.warn(
+ 'merge_cmd did not create output_json file: %r', output_json)
+
+ return collect_result or merge_result
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--build-properties')
+ parser.add_argument('--merge-additional-args')
+ parser.add_argument('--merge-script', required=True)
+ parser.add_argument('--task-output-dir', required=True)
+ parser.add_argument('-o', '--output-json', required=True)
+ parser.add_argument('--verbose', action='store_true')
+ parser.add_argument('collect_cmd', nargs='+')
+
+ args = parser.parse_args()
+ if args.verbose:
+ logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
+
+ return collect_task(
+ args.collect_cmd,
+ args.merge_script, args.build_properties, args.merge_additional_args,
+ args.task_output_dir, args.output_json)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/infra/bots/recipe_modules/swarming/resources/noop_merge.py b/infra/bots/recipe_modules/swarming/resources/noop_merge.py
new file mode 100755
index 0000000000..740e0d3b32
--- /dev/null
+++ b/infra/bots/recipe_modules/swarming/resources/noop_merge.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import json
+import shutil
+import sys
+
+
+def noop_merge(output_json, jsons_to_merge):
+ """Use the first supplied JSON as the output JSON.
+
+ Primarily intended for unsharded tasks.
+
+ Args:
+ output_json: A path to a JSON file to which the results should be written.
+ jsons_to_merge: A list of paths to JSON files.
+ """
+ if len(jsons_to_merge) > 1:
+ print >> sys.stderr, (
+ 'Multiple JSONs provided: %s' % ','.join(jsons_to_merge))
+ return 1
+ if jsons_to_merge:
+ shutil.copyfile(jsons_to_merge[0], output_json)
+ else:
+ with open(output_json, 'w') as f:
+ json.dump({}, f)
+ return 0
+
+
+def main(raw_args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--build-properties', help=argparse.SUPPRESS)
+ parser.add_argument('--summary-json', help=argparse.SUPPRESS)
+ parser.add_argument('-o', '--output-json', required=True)
+ parser.add_argument('jsons_to_merge', nargs='*')
+
+ args = parser.parse_args(raw_args)
+
+ return noop_merge(args.output_json, args.jsons_to_merge)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/infra/bots/recipe_modules/swarming/resources/results_merger.py b/infra/bots/recipe_modules/swarming/resources/results_merger.py
new file mode 100755
index 0000000000..3ea045345e
--- /dev/null
+++ b/infra/bots/recipe_modules/swarming/resources/results_merger.py
@@ -0,0 +1,278 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import copy
+import json
+import sys
+
+# These fields must appear in the test result output
+REQUIRED = {
+ 'interrupted',
+ 'num_failures_by_type',
+ 'seconds_since_epoch',
+ 'tests',
+ }
+
+# These fields are optional, but must have the same value on all shards
+OPTIONAL_MATCHING = (
+ 'builder_name',
+ 'build_number',
+ 'chromium_revision',
+ 'has_pretty_patch',
+ 'has_wdiff',
+ 'path_delimiter',
+ 'pixel_tests_enabled',
+ 'random_order_seed',
+ )
+
+OPTIONAL_IGNORED = (
+ 'layout_tests_dir',
+ )
+
+# These fields are optional and will be summed together
+OPTIONAL_COUNTS = (
+ 'fixable',
+ 'num_flaky',
+ 'num_passes',
+ 'num_regressions',
+ 'skipped',
+ 'skips',
+ )
+
+
+class MergeException(Exception):
+ pass
+
+
+def merge_test_results(shard_results_list):
+ """ Merge list of results.
+
+ Args:
+ shard_results_list: list of results to merge. All the results must have the
+ same format. Supported format are simplified JSON format & Chromium JSON
+ test results format version 3 (see
+ https://www.chromium.org/developers/the-json-test-results-format)
+
+ Returns:
+ a dictionary that represent the merged results. Its format follow the same
+ format of all results in |shard_results_list|.
+ """
+ if not shard_results_list:
+ return {}
+
+ if 'seconds_since_epoch' in shard_results_list[0]:
+ return _merge_json_test_result_format(shard_results_list)
+ else:
+ return _merge_simplified_json_format(shard_results_list)
+
+
+def _merge_simplified_json_format(shard_results_list):
+ # This code is specialized to the "simplified" JSON format that used to be
+ # the standard for recipes.
+
+ # These are the only keys we pay attention to in the output JSON.
+ merged_results = {
+ 'successes': [],
+ 'failures': [],
+ 'valid': True,
+ }
+
+ for result_json in shard_results_list:
+ successes = result_json.get('successes', [])
+ failures = result_json.get('failures', [])
+ valid = result_json.get('valid', True)
+
+ if (not isinstance(successes, list) or not isinstance(failures, list) or
+ not isinstance(valid, bool)):
+ raise MergeException(
+ 'Unexpected value type in %s' % result_json) # pragma: no cover
+
+ merged_results['successes'].extend(successes)
+ merged_results['failures'].extend(failures)
+ merged_results['valid'] = merged_results['valid'] and valid
+ return merged_results
+
+
+def _merge_json_test_result_format(shard_results_list):
+ # This code is specialized to the Chromium JSON test results format version 3:
+ # https://www.chromium.org/developers/the-json-test-results-format
+
+ # These are required fields for the JSON test result format version 3.
+ merged_results = {
+ 'tests': {},
+ 'interrupted': False,
+ 'version': 3,
+ 'seconds_since_epoch': float('inf'),
+ 'num_failures_by_type': {
+ }
+ }
+
+ # To make sure that we don't mutate existing shard_results_list.
+ shard_results_list = copy.deepcopy(shard_results_list)
+ for result_json in shard_results_list:
+ # TODO(tansell): check whether this deepcopy is actually neccessary.
+ result_json = copy.deepcopy(result_json)
+
+ # Check the version first
+ version = result_json.pop('version', -1)
+ if version != 3:
+ raise MergeException( # pragma: no cover (covered by
+ # results_merger_unittest).
+ 'Unsupported version %s. Only version 3 is supported' % version)
+
+ # Check the results for each shard have the required keys
+ missing = REQUIRED - set(result_json)
+ if missing:
+ raise MergeException( # pragma: no cover (covered by
+ # results_merger_unittest).
+ 'Invalid json test results (missing %s)' % missing)
+
+ # Curry merge_values for this result_json.
+ merge = lambda key, merge_func: merge_value(
+ result_json, merged_results, key, merge_func)
+
+ # Traverse the result_json's test trie & merged_results's test tries in
+ # DFS order & add the n to merged['tests'].
+ merge('tests', merge_tries)
+
+ # If any were interrupted, we are interrupted.
+ merge('interrupted', lambda x,y: x|y)
+
+ # Use the earliest seconds_since_epoch value
+ merge('seconds_since_epoch', min)
+
+ # Sum the number of failure types
+ merge('num_failures_by_type', sum_dicts)
+
+ # Optional values must match
+ for optional_key in OPTIONAL_MATCHING:
+ if optional_key not in result_json:
+ continue
+
+ if optional_key not in merged_results:
+ # Set this value to None, then blindly copy over it.
+ merged_results[optional_key] = None
+ merge(optional_key, lambda src, dst: src)
+ else:
+ merge(optional_key, ensure_match)
+
+ # Optional values ignored
+ for optional_key in OPTIONAL_IGNORED:
+ if optional_key in result_json:
+ merged_results[optional_key] = result_json.pop(
+ # pragma: no cover (covered by
+ # results_merger_unittest).
+ optional_key)
+
+ # Sum optional value counts
+ for count_key in OPTIONAL_COUNTS:
+ if count_key in result_json: # pragma: no cover
+ # TODO(mcgreevy): add coverage.
+ merged_results.setdefault(count_key, 0)
+ merge(count_key, lambda a, b: a+b)
+
+ if result_json:
+ raise MergeException( # pragma: no cover (covered by
+ # results_merger_unittest).
+ 'Unmergable values %s' % result_json.keys())
+
+ return merged_results
+
+
+def merge_tries(source, dest):
+ """ Merges test tries.
+
+ This is intended for use as a merge_func parameter to merge_value.
+
+ Args:
+ source: A result json test trie.
+ dest: A json test trie merge destination.
+ """
+ # merge_tries merges source into dest by performing a lock-step depth-first
+ # traversal of dest and source.
+ # pending_nodes contains a list of all sub-tries which have been reached but
+ # need further merging.
+ # Each element consists of a trie prefix, and a sub-trie from each of dest
+ # and source which is reached via that prefix.
+ pending_nodes = [('', dest, source)]
+ while pending_nodes:
+ prefix, dest_node, curr_node = pending_nodes.pop()
+ for k, v in curr_node.iteritems():
+ if k in dest_node:
+ if not isinstance(v, dict):
+ raise MergeException(
+ "%s:%s: %r not mergable, curr_node: %r\ndest_node: %r" % (
+ prefix, k, v, curr_node, dest_node))
+ pending_nodes.append(("%s:%s" % (prefix, k), dest_node[k], v))
+ else:
+ dest_node[k] = v
+ return dest
+
+
+def ensure_match(source, dest):
+ """ Returns source if it matches dest.
+
+ This is intended for use as a merge_func parameter to merge_value.
+
+ Raises:
+ MergeException if source != dest
+ """
+ if source != dest:
+ raise MergeException( # pragma: no cover (covered by
+ # results_merger_unittest).
+ "Values don't match: %s, %s" % (source, dest))
+ return source
+
+
+def sum_dicts(source, dest):
+ """ Adds values from source to corresponding values in dest.
+
+ This is intended for use as a merge_func parameter to merge_value.
+ """
+ for k, v in source.iteritems():
+ dest.setdefault(k, 0)
+ dest[k] += v
+
+ return dest
+
+
+def merge_value(source, dest, key, merge_func):
+ """ Merges a value from source to dest.
+
+ The value is deleted from source.
+
+ Args:
+ source: A dictionary from which to pull a value, identified by key.
+ dest: The dictionary into to which the value is to be merged.
+ key: The key which identifies the value to be merged.
+ merge_func(src, dst): A function which merges its src into dst,
+ and returns the result. May modify dst. May raise a MergeException.
+
+ Raises:
+ MergeException if the values can not be merged.
+ """
+ try:
+ dest[key] = merge_func(source[key], dest[key])
+ except MergeException as e:
+ e.message = "MergeFailure for %s\n%s" % (key, e.message)
+ e.args = tuple([e.message] + list(e.args[1:]))
+ raise
+ del source[key]
+
+
+def main(files):
+ if len(files) < 2:
+ sys.stderr.write("Not enough JSON files to merge.\n")
+ return 1
+ sys.stderr.write('Starting with %s\n' % files[0])
+ result = json.load(open(files[0]))
+ for f in files[1:]:
+ sys.stderr.write('Merging %s\n' % f)
+ result = merge_test_results([result, json.load(open(f))])
+ print json.dumps(result)
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/infra/bots/recipe_modules/swarming/resources/standard_gtest_merge.py b/infra/bots/recipe_modules/swarming/resources/standard_gtest_merge.py
new file mode 100755
index 0000000000..ca3abcf004
--- /dev/null
+++ b/infra/bots/recipe_modules/swarming/resources/standard_gtest_merge.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import json
+import os
+import shutil
+import sys
+import tempfile
+import traceback
+
+from common import gtest_utils
+from slave import annotation_utils
+from slave import slave_utils
+
+
+MISSING_SHARDS_MSG = r"""Missing results from the following shard(s): %s
+
+This can happen in following cases:
+ * Test failed to start (missing *.dll/*.so dependency for example)
+ * Test crashed or hung
+ * Task expired because there are not enough bots available and are all used
+ * Swarming service experienced problems
+
+Please examine logs to figure out what happened.
+"""
+
+
+def emit_warning(title, log=None):
+ print '@@@STEP_WARNINGS@@@'
+ print title
+ if log:
+ slave_utils.WriteLogLines(title, log.split('\n'))
+
+
+def merge_shard_results(summary_json, jsons_to_merge):
+ """Reads JSON test output from all shards and combines them into one.
+
+ Returns dict with merged test output on success or None on failure. Emits
+ annotations.
+ """
+ # summary.json is produced by swarming.py itself. We are mostly interested
+ # in the number of shards.
+ try:
+ with open(summary_json) as f:
+ summary = json.load(f)
+ except (IOError, ValueError):
+ emit_warning(
+ 'summary.json is missing or can not be read',
+ 'Something is seriously wrong with swarming_client/ or the bot.')
+ return None
+
+ # Merge all JSON files together. Keep track of missing shards.
+ merged = {
+ 'all_tests': set(),
+ 'disabled_tests': set(),
+ 'global_tags': set(),
+ 'missing_shards': [],
+ 'per_iteration_data': [],
+ 'swarming_summary': summary,
+ }
+ for index, result in enumerate(summary['shards']):
+ if result is not None:
+ # Author note: this code path doesn't trigger convert_to_old_format() in
+ # client/swarming.py, which means the state enum is saved in its string
+ # name form, not in the number form.
+ state = result.get('state')
+ if state == u'BOT_DIED':
+ emit_warning('Shard #%d had a Swarming internal failure' % index)
+ elif state == u'EXPIRED':
+ emit_warning('There wasn\'t enough capacity to run your test')
+ elif state == u'TIMED_OUT':
+ emit_warning(
+ 'Test runtime exceeded allocated time',
+ 'Either it ran for too long (hard timeout) or it didn\'t produce '
+ 'I/O for an extended period of time (I/O timeout)')
+ elif state == u'COMPLETED':
+ json_data, err_msg = load_shard_json(index, jsons_to_merge)
+ if json_data:
+ # Set-like fields.
+ for key in ('all_tests', 'disabled_tests', 'global_tags'):
+ merged[key].update(json_data.get(key), [])
+
+ # 'per_iteration_data' is a list of dicts. Dicts should be merged
+ # together, not the 'per_iteration_data' list itself.
+ merged['per_iteration_data'] = merge_list_of_dicts(
+ merged['per_iteration_data'],
+ json_data.get('per_iteration_data', []))
+ continue
+ else:
+ emit_warning('Task ran but no result was found: %s' % err_msg)
+ else:
+ emit_warning('Invalid Swarming task state: %s' % state)
+ merged['missing_shards'].append(index)
+
+ # If some shards are missing, make it known. Continue parsing anyway. Step
+ # should be red anyway, since swarming.py return non-zero exit code in that
+ # case.
+ if merged['missing_shards']:
+ as_str = ', '.join(map(str, merged['missing_shards']))
+ emit_warning(
+ 'some shards did not complete: %s' % as_str,
+ MISSING_SHARDS_MSG % as_str)
+ # Not all tests run, combined JSON summary can not be trusted.
+ merged['global_tags'].add('UNRELIABLE_RESULTS')
+
+ # Convert to jsonish dict.
+ for key in ('all_tests', 'disabled_tests', 'global_tags'):
+ merged[key] = sorted(merged[key])
+ return merged
+
+
+OUTPUT_JSON_SIZE_LIMIT = 100 * 1024 * 1024 # 100 MB
+
+
+def load_shard_json(index, jsons_to_merge):
+ """Reads JSON output of the specified shard.
+
+ Args:
+ output_dir: The directory in which to look for the JSON output to load.
+ index: The index of the shard to load data for.
+
+ Returns: A tuple containing:
+ * The contents of path, deserialized into a python object.
+ * An error string.
+ (exactly one of the tuple elements will be non-None).
+ """
+ # 'output.json' is set in swarming/api.py, gtest_task method.
+ matching_json_files = [
+ j for j in jsons_to_merge
+ if (os.path.basename(j) == 'output.json'
+ and os.path.basename(os.path.dirname(j)) == str(index))]
+
+ if not matching_json_files:
+ print >> sys.stderr, 'shard %s test output missing' % index
+ return (None, 'shard %s test output was missing' % index)
+ elif len(matching_json_files) > 1:
+ print >> sys.stderr, 'duplicate test output for shard %s' % index
+ return (None, 'shard %s test output was duplicated' % index)
+
+ path = matching_json_files[0]
+
+ try:
+ filesize = os.stat(path).st_size
+ if filesize > OUTPUT_JSON_SIZE_LIMIT:
+ print >> sys.stderr, 'output.json is %d bytes. Max size is %d' % (
+ filesize, OUTPUT_JSON_SIZE_LIMIT)
+ return (None, 'shard %s test output exceeded the size limit' % index)
+
+ with open(path) as f:
+ return (json.load(f), None)
+ except (IOError, ValueError, OSError) as e:
+ print >> sys.stderr, 'Missing or invalid gtest JSON file: %s' % path
+ print >> sys.stderr, '%s: %s' % (type(e).__name__, e)
+
+ return (None, 'shard %s test output was missing or invalid' % index)
+
+
+def merge_list_of_dicts(left, right):
+ """Merges dicts left[0] with right[0], left[1] with right[1], etc."""
+ output = []
+ for i in xrange(max(len(left), len(right))):
+ left_dict = left[i] if i < len(left) else {}
+ right_dict = right[i] if i < len(right) else {}
+ merged_dict = left_dict.copy()
+ merged_dict.update(right_dict)
+ output.append(merged_dict)
+ return output
+
+
+def standard_gtest_merge(
+ output_json, summary_json, jsons_to_merge):
+
+ output = merge_shard_results(summary_json, jsons_to_merge)
+ with open(output_json, 'wb') as f:
+ json.dump(output, f)
+
+ return 0
+
+
+def main(raw_args):
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--build-properties')
+ parser.add_argument('--summary-json')
+ parser.add_argument('-o', '--output-json', required=True)
+ parser.add_argument('jsons_to_merge', nargs='*')
+
+ args = parser.parse_args(raw_args)
+
+ return standard_gtest_merge(
+ args.output_json, args.summary_json, args.jsons_to_merge)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/infra/bots/recipe_modules/swarming/resources/standard_isolated_script_merge.py b/infra/bots/recipe_modules/swarming/resources/standard_isolated_script_merge.py
new file mode 100755
index 0000000000..e3c860f433
--- /dev/null
+++ b/infra/bots/recipe_modules/swarming/resources/standard_isolated_script_merge.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import json
+import sys
+
+import results_merger
+
+
+def StandardIsolatedScriptMerge(output_json, jsons_to_merge):
+ """Merge the contents of one or more results JSONs into a single JSON.
+
+ Args:
+ output_json: A path to a JSON file to which the merged results should be
+ written.
+ jsons_to_merge: A list of paths to JSON files that should be merged.
+ """
+ shard_results_list = []
+ for j in jsons_to_merge:
+ with open(j) as f:
+ shard_results_list.append(json.load(f))
+ merged_results = results_merger.merge_test_results(shard_results_list)
+
+ with open(output_json, 'w') as f:
+ json.dump(merged_results, f)
+
+ return 0
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-o', '--output-json', required=True)
+ parser.add_argument('--build-properties', help=argparse.SUPPRESS)
+ parser.add_argument('--summary-json', help=argparse.SUPPRESS)
+ parser.add_argument('jsons_to_merge', nargs='*')
+
+ args = parser.parse_args()
+ return StandardIsolatedScriptMerge(args.output_json, args.jsons_to_merge)
+
+
+if __name__ == '__main__':
+ sys.exit(main())