aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar Oliver Chang <oliverchang@users.noreply.github.com>2020-08-05 11:20:48 +1000
committerGravatar GitHub <noreply@github.com>2020-08-05 11:20:48 +1000
commit3f1d43115ff363ca07994b15ef2fead5d9a9d92e (patch)
tree7b6606f09d0a8c8d5b25e2c9feb8b6f282419ce4
parente15b72d833dd5c30f4cd4aed2ba06e64011ce73d (diff)
Clean up build infra. (#4251)
- Remove gcb folder. Move the files that we still need into the build/functions dir. - Remove badge_images. They are now stored on GCS. - Remove jenins configs.
-rw-r--r--.pylintrc5
-rw-r--r--infra/build/functions/__init__.py15
-rw-r--r--[l---------]infra/build/functions/build_and_run_coverage.py276
-rwxr-xr-x[l---------]infra/build/functions/build_base_images.py99
-rw-r--r--[l---------]infra/build/functions/build_lib.py240
-rwxr-xr-x[l---------]infra/build/functions/build_msan_libs.py95
-rw-r--r--[l---------]infra/build/functions/build_project.py450
-rwxr-xr-x[l---------]infra/build/functions/builds_status.py283
-rw-r--r--infra/build/status/bower.json (renamed from infra/gcb/templates/bower.json)0
-rwxr-xr-xinfra/build/status/deploy.sh18
-rw-r--r--infra/build/status/index.html (renamed from infra/gcb/templates/index.html)11
-rw-r--r--infra/build/status/manifest.json (renamed from infra/gcb/templates/manifest.json)0
-rw-r--r--infra/build/status/polymer.json (renamed from infra/gcb/templates/polymer.json)0
-rw-r--r--infra/build/status/src/build-status/build-status.html (renamed from infra/gcb/templates/src/build-status/build-status.html)0
-rw-r--r--infra/gcb/.gitignore1
-rw-r--r--infra/gcb/badge_images/building.pngbin3020 -> 0 bytes
-rw-r--r--infra/gcb/badge_images/building.svg1
-rw-r--r--infra/gcb/badge_images/coverage_failing.pngbin4160 -> 0 bytes
-rw-r--r--infra/gcb/badge_images/coverage_failing.svg1
-rw-r--r--infra/gcb/badge_images/failing.pngbin3465 -> 0 bytes
-rw-r--r--infra/gcb/badge_images/failing.svg1
-rw-r--r--infra/gcb/build_and_run_coverage.py275
-rwxr-xr-xinfra/gcb/build_base_images.py98
-rw-r--r--infra/gcb/build_lib.py239
-rwxr-xr-xinfra/gcb/build_msan_libs.py94
-rw-r--r--infra/gcb/build_project.py449
-rwxr-xr-xinfra/gcb/builds_status.py282
-rwxr-xr-xinfra/gcb/cancel.py40
-rw-r--r--infra/gcb/jenkins_config/base_job.xml62
-rw-r--r--infra/gcb/jenkins_config/coverage_job.xml60
-rw-r--r--infra/gcb/requirements.txt36
-rwxr-xr-xinfra/gcb/sync.py105
-rwxr-xr-xinfra/gcb/templates/deploy.sh3
-rwxr-xr-xinfra/gcb/wait_for_build.py82
-rw-r--r--infra/jenkins-cluster/deployment-jenkins.yaml60
-rw-r--r--infra/jenkins-cluster/ingress-jenkins-https.yaml10
-rw-r--r--infra/jenkins-cluster/server/Dockerfile19
-rw-r--r--infra/jenkins-cluster/service-jenkins-master.yaml16
38 files changed, 1484 insertions, 1942 deletions
diff --git a/.pylintrc b/.pylintrc
index 00878f72..b551b213 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -139,7 +139,8 @@ disable=print-statement,
deprecated-sys-function,
exception-escape,
comprehension-escape,
- fixme
+ fixme,
+ import-error
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
@@ -582,4 +583,4 @@ overgeneral-exceptions=BaseException,
Exception
# Maximum number of characters on a single line.
-max-line-length=80 \ No newline at end of file
+max-line-length=80
diff --git a/infra/build/functions/__init__.py b/infra/build/functions/__init__.py
new file mode 100644
index 00000000..9f76bf68
--- /dev/null
+++ b/infra/build/functions/__init__.py
@@ -0,0 +1,15 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
diff --git a/infra/build/functions/build_and_run_coverage.py b/infra/build/functions/build_and_run_coverage.py
index 4fe4b575..dcb2f1a0 120000..100644
--- a/infra/build/functions/build_and_run_coverage.py
+++ b/infra/build/functions/build_and_run_coverage.py
@@ -1 +1,275 @@
-../../gcb/build_and_run_coverage.py \ No newline at end of file
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+#!/usr/bin/python2
+"""Starts and runs coverage build on Google Cloud Builder.
+Usage: build_and_run_coverage.py <project_dir>
+"""
+import datetime
+import json
+import logging
+import os
+import sys
+
+import build_lib
+import build_project
+
+SANITIZER = 'coverage'
+CONFIGURATION = ['FUZZING_ENGINE=libfuzzer', 'SANITIZER=%s' % SANITIZER]
+PLATFORM = 'linux'
+
+COVERAGE_BUILD_TAG = 'coverage'
+
+# Where code coverage reports need to be uploaded to.
+COVERAGE_BUCKET_NAME = 'oss-fuzz-coverage'
+
+# Link to the code coverage report in HTML format.
+HTML_REPORT_URL_FORMAT = (build_lib.GCS_URL_BASENAME + COVERAGE_BUCKET_NAME +
+ '/{project}/reports/{date}/{platform}/index.html')
+
+# This is needed for ClusterFuzz to pick up the most recent reports data.
+LATEST_REPORT_INFO_URL = ('/' + COVERAGE_BUCKET_NAME +
+ '/latest_report_info/{project}.json')
+LATEST_REPORT_INFO_CONTENT_TYPE = 'application/json'
+
+# Link where to upload code coverage report files to.
+UPLOAD_URL_FORMAT = 'gs://' + COVERAGE_BUCKET_NAME + '/{project}/{type}/{date}'
+
+# Languages from project.yaml that have code coverage support.
+LANGUAGES_WITH_COVERAGE_SUPPORT = ['c', 'c++']
+
+
+def usage():
+ """Exit with code 1 and display syntax to use this file."""
+ sys.stderr.write("Usage: " + sys.argv[0] + " <project_dir>\n")
+ sys.exit(1)
+
+
+# pylint: disable=too-many-locals
+def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
+ image_project, base_images_project):
+ """Returns build steps for project."""
+ project_yaml = build_project.load_project_yaml(project_name,
+ project_yaml_file,
+ image_project)
+ if project_yaml['disabled']:
+ logging.info('Project "%s" is disabled.', project_name)
+ return []
+
+ if project_yaml['language'] not in LANGUAGES_WITH_COVERAGE_SUPPORT:
+ logging.info(
+ 'Project "%s" is written in "%s", coverage is not supported yet.',
+ project_name, project_yaml['language'])
+ return []
+
+ name = project_yaml['name']
+ image = project_yaml['image']
+ language = project_yaml['language']
+ report_date = datetime.datetime.now().strftime('%Y%m%d')
+
+ build_steps = build_lib.project_image_steps(name, image, language)
+
+ env = CONFIGURATION[:]
+ out = '/workspace/out/' + SANITIZER
+ env.append('OUT=' + out)
+ env.append('FUZZING_LANGUAGE=' + language)
+
+ workdir = build_project.workdir_from_dockerfile(dockerfile_lines)
+ if not workdir:
+ workdir = '/src'
+
+ failure_msg = ('*' * 80 + '\nCoverage build failed.\nTo reproduce, run:\n'
+ 'python infra/helper.py build_image {name}\n'
+ 'python infra/helper.py build_fuzzers --sanitizer coverage '
+ '{name}\n' + '*' * 80).format(name=name)
+
+ # Compilation step.
+ build_steps.append({
+ 'name':
+ image,
+ 'env':
+ env,
+ 'args': [
+ 'bash',
+ '-c',
+ # Remove /out to make sure there are non instrumented binaries.
+ # `cd /src && cd {workdir}` (where {workdir} is parsed from the
+ # Dockerfile). Container Builder overrides our workdir so we need
+ # to add this step to set it back.
+ ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} && '
+ 'compile || (echo "{failure_msg}" && false)'
+ ).format(workdir=workdir, out=out, failure_msg=failure_msg),
+ ],
+ })
+
+ download_corpora_steps = build_lib.download_corpora_steps(project_name)
+ if not download_corpora_steps:
+ logging.info('Skipping code coverage build for %s.', project_name)
+ return []
+
+ build_steps.extend(download_corpora_steps)
+
+ failure_msg = ('*' * 80 + '\nCode coverage report generation failed.\n'
+ 'To reproduce, run:\n'
+ 'python infra/helper.py build_image {name}\n'
+ 'python infra/helper.py build_fuzzers --sanitizer coverage '
+ '{name}\n'
+ 'python infra/helper.py coverage {name}\n' +
+ '*' * 80).format(name=name)
+
+ # Unpack the corpus and run coverage script.
+ coverage_env = env + [
+ 'HTTP_PORT=',
+ 'COVERAGE_EXTRA_ARGS=%s' % project_yaml['coverage_extra_args'].strip(),
+ ]
+ if 'dataflow' in project_yaml['fuzzing_engines']:
+ coverage_env.append('FULL_SUMMARY_PER_TARGET=1')
+
+ build_steps.append({
+ 'name': 'gcr.io/{0}/base-runner'.format(base_images_project),
+ 'env': coverage_env,
+ 'args': [
+ 'bash', '-c',
+ ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*} || ('
+ 'echo "Failed to unpack the corpus for $(basename ${f%%.*}). '
+ 'This usually means that corpus backup for a particular fuzz '
+ 'target does not exist. If a fuzz target was added in the last '
+ '24 hours, please wait one more day. Otherwise, something is '
+ 'wrong with the fuzz target or the infrastructure, and corpus '
+ 'pruning task does not finish successfully." && exit 1'
+ '); done && coverage || (echo "' + failure_msg + '" && false)')
+ ],
+ 'volumes': [{
+ 'name': 'corpus',
+ 'path': '/corpus'
+ }],
+ })
+
+ # Upload the report.
+ upload_report_url = UPLOAD_URL_FORMAT.format(project=project_name,
+ type='reports',
+ date=report_date)
+
+ # Delete the existing report as gsutil cannot overwrite it in a sane way due
+ # to the lack of `-T` option (it creates a subdir in the destination dir).
+ build_steps.append(build_lib.gsutil_rm_rf_step(upload_report_url))
+ build_steps.append({
+ 'name':
+ 'gcr.io/cloud-builders/gsutil',
+ 'args': [
+ '-m',
+ 'cp',
+ '-r',
+ os.path.join(out, 'report'),
+ upload_report_url,
+ ],
+ })
+
+ # Upload the fuzzer stats. Delete the old ones just in case.
+ upload_fuzzer_stats_url = UPLOAD_URL_FORMAT.format(project=project_name,
+ type='fuzzer_stats',
+ date=report_date)
+ build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_stats_url))
+ build_steps.append({
+ 'name':
+ 'gcr.io/cloud-builders/gsutil',
+ 'args': [
+ '-m',
+ 'cp',
+ '-r',
+ os.path.join(out, 'fuzzer_stats'),
+ upload_fuzzer_stats_url,
+ ],
+ })
+
+ # Upload the fuzzer logs. Delete the old ones just in case
+ upload_fuzzer_logs_url = UPLOAD_URL_FORMAT.format(project=project_name,
+ type='logs',
+ date=report_date)
+ build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_logs_url))
+ build_steps.append({
+ 'name':
+ 'gcr.io/cloud-builders/gsutil',
+ 'args': [
+ '-m',
+ 'cp',
+ '-r',
+ os.path.join(out, 'logs'),
+ upload_fuzzer_logs_url,
+ ],
+ })
+
+ # Upload srcmap.
+ srcmap_upload_url = UPLOAD_URL_FORMAT.format(project=project_name,
+ type='srcmap',
+ date=report_date)
+ srcmap_upload_url = srcmap_upload_url.rstrip('/') + '.json'
+ build_steps.append({
+ 'name': 'gcr.io/cloud-builders/gsutil',
+ 'args': [
+ 'cp',
+ '/workspace/srcmap.json',
+ srcmap_upload_url,
+ ],
+ })
+
+ # Update the latest report information file for ClusterFuzz.
+ latest_report_info_url = build_lib.get_signed_url(
+ LATEST_REPORT_INFO_URL.format(project=project_name),
+ content_type=LATEST_REPORT_INFO_CONTENT_TYPE)
+ latest_report_info_body = json.dumps({
+ 'fuzzer_stats_dir':
+ upload_fuzzer_stats_url,
+ 'html_report_url':
+ HTML_REPORT_URL_FORMAT.format(project=project_name,
+ date=report_date,
+ platform=PLATFORM),
+ 'report_date':
+ report_date,
+ 'report_summary_path':
+ os.path.join(upload_report_url, PLATFORM, 'summary.json'),
+ })
+
+ build_steps.append(
+ build_lib.http_upload_step(latest_report_info_body,
+ latest_report_info_url,
+ LATEST_REPORT_INFO_CONTENT_TYPE))
+ return build_steps
+
+
+def main():
+ """Build and run coverage for projects."""
+ if len(sys.argv) != 2:
+ usage()
+
+ image_project = 'oss-fuzz'
+ base_images_project = 'oss-fuzz-base'
+ project_dir = sys.argv[1].rstrip(os.path.sep)
+ project_name = os.path.basename(project_dir)
+ dockerfile_path = os.path.join(project_dir, 'Dockerfile')
+ project_yaml_path = os.path.join(project_dir, 'project.yaml')
+
+ with open(dockerfile_path) as docker_file:
+ dockerfile_lines = docker_file.readlines()
+
+ with open(project_yaml_path) as project_yaml_file:
+ steps = get_build_steps(project_name, project_yaml_file, dockerfile_lines,
+ image_project, base_images_project)
+
+ build_project.run_build(steps, project_name, COVERAGE_BUILD_TAG)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/infra/build/functions/build_base_images.py b/infra/build/functions/build_base_images.py
index b048e4dc..daadb9ef 120000..100755
--- a/infra/build/functions/build_base_images.py
+++ b/infra/build/functions/build_base_images.py
@@ -1 +1,98 @@
-../../gcb/build_base_images.py \ No newline at end of file
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+#!/usr/bin/python2
+"""Build base images on Google Cloud Builder.
+
+Usage: build_base_images.py
+"""
+from __future__ import print_function
+
+import os
+import sys
+import yaml
+
+from googleapiclient.discovery import build
+from oauth2client.client import GoogleCredentials
+
+BASE_IMAGES = [
+ 'base-image',
+ 'base-clang',
+ 'base-builder',
+ 'base-runner',
+ 'base-runner-debug',
+ 'base-sanitizer-libs-builder',
+]
+
+TAG_PREFIX = 'gcr.io/oss-fuzz-base/'
+
+
+def get_steps(images, tag_prefix=TAG_PREFIX):
+ """Returns build steps for given images."""
+ steps = [{
+ 'args': [
+ 'clone',
+ 'https://github.com/google/oss-fuzz.git',
+ ],
+ 'name': 'gcr.io/cloud-builders/git',
+ }]
+
+ for base_image in images:
+ steps.append({
+ 'args': [
+ 'build',
+ '-t',
+ tag_prefix + base_image,
+ '.',
+ ],
+ 'dir': 'oss-fuzz/infra/base-images/' + base_image,
+ 'name': 'gcr.io/cloud-builders/docker',
+ })
+
+ return steps
+
+
+def get_logs_url(build_id, project_id='oss-fuzz-base'):
+ """Returns url that displays the build logs."""
+ url_format = ('https://console.developers.google.com/logs/viewer?'
+ 'resource=build%2Fbuild_id%2F{0}&project={1}')
+ return url_format.format(build_id, project_id)
+
+
+# pylint: disable=no-member, missing-function-docstring
+def main():
+ options = {}
+ if 'GCB_OPTIONS' in os.environ:
+ options = yaml.safe_load(os.environ['GCB_OPTIONS'])
+
+ build_body = {
+ 'steps': get_steps(BASE_IMAGES),
+ 'timeout': str(4 * 3600) + 's',
+ 'options': options,
+ 'images': [TAG_PREFIX + base_image for base_image in BASE_IMAGES],
+ }
+
+ credentials = GoogleCredentials.get_application_default()
+ cloudbuild = build('cloudbuild', 'v1', credentials=credentials)
+ build_info = cloudbuild.projects().builds().create(projectId='oss-fuzz-base',
+ body=build_body).execute()
+ build_id = build_info['metadata']['build']['id']
+
+ print('Logs:', get_logs_url(build_id), file=sys.stderr)
+ print(build_id)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/infra/build/functions/build_lib.py b/infra/build/functions/build_lib.py
index 3a075303..007579ef 120000..100644
--- a/infra/build/functions/build_lib.py
+++ b/infra/build/functions/build_lib.py
@@ -1 +1,239 @@
-../../gcb/build_lib.py \ No newline at end of file
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+"""Utility module for Google Cloud Build scripts."""
+import base64
+import collections
+import os
+import six.moves.urllib.parse as urlparse
+import sys
+import time
+
+import requests
+
+import google.auth
+import googleapiclient.discovery
+from oauth2client.service_account import ServiceAccountCredentials
+
+BUILD_TIMEOUT = 12 * 60 * 60
+
+# Needed for reading public target.list.* files.
+GCS_URL_BASENAME = 'https://storage.googleapis.com/'
+
+GCS_UPLOAD_URL_FORMAT = '/{0}/{1}/{2}'
+
+# Where corpus backups can be downloaded from.
+CORPUS_BACKUP_URL = ('/{project}-backup.clusterfuzz-external.appspot.com/'
+ 'corpus/libFuzzer/{fuzzer}/latest.zip')
+
+# Cloud Builder has a limit of 100 build steps and 100 arguments for each step.
+CORPUS_DOWNLOAD_BATCH_SIZE = 100
+
+TARGETS_LIST_BASENAME = 'targets.list'
+
+EngineInfo = collections.namedtuple(
+ 'EngineInfo',
+ ['upload_bucket', 'supported_sanitizers', 'supported_architectures'])
+
+ENGINE_INFO = {
+ 'libfuzzer':
+ EngineInfo(upload_bucket='clusterfuzz-builds',
+ supported_sanitizers=['address', 'memory', 'undefined'],
+ supported_architectures=['x86_64', 'i386']),
+ 'afl':
+ EngineInfo(upload_bucket='clusterfuzz-builds-afl',
+ supported_sanitizers=['address'],
+ supported_architectures=['x86_64']),
+ 'honggfuzz':
+ EngineInfo(upload_bucket='clusterfuzz-builds-honggfuzz',
+ supported_sanitizers=['address'],
+ supported_architectures=['x86_64']),
+ 'dataflow':
+ EngineInfo(upload_bucket='clusterfuzz-builds-dataflow',
+ supported_sanitizers=['dataflow'],
+ supported_architectures=['x86_64']),
+ 'none':
+ EngineInfo(upload_bucket='clusterfuzz-builds-no-engine',
+ supported_sanitizers=['address'],
+ supported_architectures=['x86_64']),
+}
+
+
+def get_targets_list_filename(sanitizer):
+ """Returns target list filename."""
+ return TARGETS_LIST_BASENAME + '.' + sanitizer
+
+
+def get_targets_list_url(bucket, project, sanitizer):
+ """Returns target list url."""
+ filename = get_targets_list_filename(sanitizer)
+ url = GCS_UPLOAD_URL_FORMAT.format(bucket, project, filename)
+ return url
+
+
+def _get_targets_list(project_name):
+ """Returns target list."""
+ # libFuzzer ASan is the default configuration, get list of targets from it.
+ url = get_targets_list_url(ENGINE_INFO['libfuzzer'].upload_bucket,
+ project_name, 'address')
+
+ url = urlparse.urljoin(GCS_URL_BASENAME, url)
+ response = requests.get(url)
+ if not response.status_code == 200:
+ sys.stderr.write('Failed to get list of targets from "%s".\n' % url)
+ sys.stderr.write('Status code: %d \t\tText:\n%s\n' %
+ (response.status_code, response.text))
+ return None
+
+ return response.text.split()
+
+
+# pylint: disable=no-member
+def get_signed_url(path, method='PUT', content_type=''):
+ """Returns signed url."""
+ timestamp = int(time.time() + BUILD_TIMEOUT)
+ blob = '{0}\n\n{1}\n{2}\n{3}'.format(method, content_type, timestamp, path)
+
+ service_account_path = os.environ.get('GOOGLE_APPLICATION_CREDENTIALS')
+ if service_account_path:
+ creds = ServiceAccountCredentials.from_json_keyfile_name(
+ os.environ['GOOGLE_APPLICATION_CREDENTIALS'])
+ client_id = creds.service_account_email
+ signature = base64.b64encode(creds.sign_blob(blob)[1])
+ else:
+ credentials, project = google.auth.default()
+ iam = googleapiclient.discovery.build('iamcredentials',
+ 'v1',
+ credentials=credentials,
+ cache_discovery=False)
+ client_id = project + '@appspot.gserviceaccount.com'
+ service_account = 'projects/-/serviceAccounts/{0}'.format(client_id)
+ response = iam.projects().serviceAccounts().signBlob(
+ name=service_account,
+ body={
+ 'delegates': [],
+ 'payload': base64.b64encode(blob.encode('utf-8')).decode('utf-8'),
+ }).execute()
+ signature = response['signedBlob']
+
+ values = {
+ 'GoogleAccessId': client_id,
+ 'Expires': timestamp,
+ 'Signature': signature,
+ }
+ return ('https://storage.googleapis.com{0}?'.format(path) +
+ urlparse.urlencode(values))
+
+
+def download_corpora_steps(project_name):
+ """Returns GCB steps for downloading corpora backups for the given project.
+ """
+ fuzz_targets = _get_targets_list(project_name)
+ if not fuzz_targets:
+ sys.stderr.write('No fuzz targets found for project "%s".\n' % project_name)
+ return None
+
+ steps = []
+ # Split fuzz targets into batches of CORPUS_DOWNLOAD_BATCH_SIZE.
+ for i in range(0, len(fuzz_targets), CORPUS_DOWNLOAD_BATCH_SIZE):
+ download_corpus_args = []
+ for binary_name in fuzz_targets[i:i + CORPUS_DOWNLOAD_BATCH_SIZE]:
+ qualified_name = binary_name
+ qualified_name_prefix = '%s_' % project_name
+ if not binary_name.startswith(qualified_name_prefix):
+ qualified_name = qualified_name_prefix + binary_name
+
+ url = get_signed_url(CORPUS_BACKUP_URL.format(project=project_name,
+ fuzzer=qualified_name),
+ method='GET')
+
+ corpus_archive_path = os.path.join('/corpus', binary_name + '.zip')
+ download_corpus_args.append('%s %s' % (corpus_archive_path, url))
+
+ steps.append({
+ 'name': 'gcr.io/oss-fuzz-base/base-runner',
+ 'entrypoint': 'download_corpus',
+ 'args': download_corpus_args,
+ 'volumes': [{
+ 'name': 'corpus',
+ 'path': '/corpus'
+ }],
+ })
+
+ return steps
+
+
+def http_upload_step(data, signed_url, content_type):
+ """Returns a GCB step to upload data to the given URL via GCS HTTP API."""
+ step = {
+ 'name':
+ 'gcr.io/cloud-builders/curl',
+ 'args': [
+ '-H',
+ 'Content-Type: ' + content_type,
+ '-X',
+ 'PUT',
+ '-d',
+ data,
+ signed_url,
+ ],
+ }
+ return step
+
+
+def gsutil_rm_rf_step(url):
+ """Returns a GCB step to recursively delete the object with given GCS url."""
+ step = {
+ 'name': 'gcr.io/cloud-builders/gsutil',
+ 'entrypoint': 'sh',
+ 'args': [
+ '-c',
+ 'gsutil -m rm -rf %s || exit 0' % url,
+ ],
+ }
+ return step
+
+
+def project_image_steps(name, image, language):
+ """Returns GCB steps to build OSS-Fuzz project image."""
+ steps = [{
+ 'args': [
+ 'clone',
+ 'https://github.com/google/oss-fuzz.git',
+ ],
+ 'name': 'gcr.io/cloud-builders/git',
+ }, {
+ 'name': 'gcr.io/cloud-builders/docker',
+ 'args': [
+ 'build',
+ '-t',
+ image,
+ '.',
+ ],
+ 'dir': 'oss-fuzz/projects/' + name,
+ }, {
+ 'name':
+ image,
+ 'args': [
+ 'bash', '-c',
+ 'srcmap > /workspace/srcmap.json && cat /workspace/srcmap.json'
+ ],
+ 'env': [
+ 'OSSFUZZ_REVISION=$REVISION_ID',
+ 'FUZZING_LANGUAGE=%s' % language,
+ ],
+ }]
+
+ return steps
diff --git a/infra/build/functions/build_msan_libs.py b/infra/build/functions/build_msan_libs.py
index 28043a00..33526247 120000..100755
--- a/infra/build/functions/build_msan_libs.py
+++ b/infra/build/functions/build_msan_libs.py
@@ -1 +1,94 @@
-../../gcb/build_msan_libs.py \ No newline at end of file
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+#!/usr/bin/python2
+"""Build base images on Google Cloud Builder.
+
+Usage: build_base_images.py
+"""
+from __future__ import print_function
+
+import datetime
+import os
+import sys
+import yaml
+
+from oauth2client.client import GoogleCredentials
+from googleapiclient.discovery import build
+
+import build_base_images
+
+
+def get_steps(image):
+ """Get build steps for msan-libs-builder."""
+
+ timestamp = datetime.datetime.utcnow().strftime('%Y%m%d%H%M')
+ upload_name = 'msan-libs-' + timestamp + '.zip'
+
+ steps = build_base_images.get_steps([
+ 'base-sanitizer-libs-builder',
+ 'msan-libs-builder',
+ ])
+
+ steps.extend([{
+ 'name': image,
+ 'args': [
+ 'bash',
+ '-c',
+ 'cd /msan && zip -r /workspace/libs.zip .',
+ ],
+ }, {
+ 'name':
+ 'gcr.io/cloud-builders/gsutil',
+ 'args': [
+ 'cp',
+ '/workspace/libs.zip',
+ 'gs://oss-fuzz-msan-libs/' + upload_name,
+ ],
+ }])
+
+ return steps
+
+
+# pylint: disable=no-member
+def main():
+ """Build msan libs."""
+ options = {}
+ if 'GCB_OPTIONS' in os.environ:
+ options = yaml.safe_load(os.environ['GCB_OPTIONS'])
+
+ image = 'gcr.io/oss-fuzz-base/msan-libs-builder'
+ steps = get_steps(image)
+ build_body = {
+ 'steps': steps,
+ 'timeout': str(6 * 3600) + 's',
+ 'options': options,
+ 'images': [
+ 'gcr.io/oss-fuzz-base/base-sanitizer-libs-builder',
+ image,
+ ],
+ }
+ credentials = GoogleCredentials.get_application_default()
+ cloudbuild = build('cloudbuild', 'v1', credentials=credentials)
+ build_info = cloudbuild.projects().builds().create(projectId='oss-fuzz-base',
+ body=build_body).execute()
+ build_id = build_info['metadata']['build']['id']
+
+ print('Logs:', build_base_images.get_logs_url(build_id), file=sys.stderr)
+ print(build_id)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/infra/build/functions/build_project.py b/infra/build/functions/build_project.py
index 451e86b3..84cd7d56 120000..100644
--- a/infra/build/functions/build_project.py
+++ b/infra/build/functions/build_project.py
@@ -1 +1,449 @@
-../../gcb/build_project.py \ No newline at end of file
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+#!/usr/bin/python2
+"""Starts project build on Google Cloud Builder.
+
+Usage: build_project.py <project_dir>
+"""
+
+from __future__ import print_function
+
+import datetime
+import json
+import logging
+import os
+import re
+import sys
+
+import six
+import yaml
+
+from oauth2client.client import GoogleCredentials
+from googleapiclient.discovery import build
+
+import build_lib
+
+FUZZING_BUILD_TAG = 'fuzzing'
+
+GCB_LOGS_BUCKET = 'oss-fuzz-gcb-logs'
+
+CONFIGURATIONS = {
+ 'sanitizer-address': ['SANITIZER=address'],
+ 'sanitizer-dataflow': ['SANITIZER=dataflow'],
+ 'sanitizer-memory': ['SANITIZER=memory'],
+ 'sanitizer-undefined': ['SANITIZER=undefined'],
+ 'engine-libfuzzer': ['FUZZING_ENGINE=libfuzzer'],
+ 'engine-afl': ['FUZZING_ENGINE=afl'],
+ 'engine-honggfuzz': ['FUZZING_ENGINE=honggfuzz'],
+ 'engine-dataflow': ['FUZZING_ENGINE=dataflow'],
+ 'engine-none': ['FUZZING_ENGINE=none'],
+}
+
+DEFAULT_ARCHITECTURES = ['x86_64']
+DEFAULT_ENGINES = ['libfuzzer', 'afl', 'honggfuzz']
+DEFAULT_SANITIZERS = ['address', 'undefined']
+
+LATEST_VERSION_FILENAME = 'latest.version'
+LATEST_VERSION_CONTENT_TYPE = 'text/plain'
+
+QUEUE_TTL_SECONDS = 60 * 60 * 24 # 24 hours.
+
+
+def usage():
+ """Exit with code 1 and display syntax to use this file."""
+ sys.stderr.write('Usage: ' + sys.argv[0] + ' <project_dir>\n')
+ sys.exit(1)
+
+
+def set_yaml_defaults(project_name, project_yaml, image_project):
+ """Set project.yaml's default parameters."""
+ project_yaml.setdefault('disabled', False)
+ project_yaml.setdefault('name', project_name)
+ project_yaml.setdefault('image',
+ 'gcr.io/{0}/{1}'.format(image_project, project_name))
+ project_yaml.setdefault('architectures', DEFAULT_ARCHITECTURES)
+ project_yaml.setdefault('sanitizers', DEFAULT_SANITIZERS)
+ project_yaml.setdefault('fuzzing_engines', DEFAULT_ENGINES)
+ project_yaml.setdefault('run_tests', True)
+ project_yaml.setdefault('coverage_extra_args', '')
+ project_yaml.setdefault('labels', {})
+
+
+def is_supported_configuration(fuzzing_engine, sanitizer, architecture):
+ """Check if the given configuration is supported."""
+ fuzzing_engine_info = build_lib.ENGINE_INFO[fuzzing_engine]
+ if architecture == 'i386' and sanitizer != 'address':
+ return False
+ return (sanitizer in fuzzing_engine_info.supported_sanitizers and
+ architecture in fuzzing_engine_info.supported_architectures)
+
+
+def get_sanitizers(project_yaml):
+ """Retrieve sanitizers from project.yaml."""
+ sanitizers = project_yaml['sanitizers']
+ assert isinstance(sanitizers, list)
+
+ processed_sanitizers = []
+ for sanitizer in sanitizers:
+ if isinstance(sanitizer, six.string_types):
+ processed_sanitizers.append(sanitizer)
+ elif isinstance(sanitizer, dict):
+ for key in sanitizer.keys():
+ processed_sanitizers.append(key)
+
+ return processed_sanitizers
+
+
+def workdir_from_dockerfile(dockerfile_lines):
+ """Parse WORKDIR from the Dockerfile."""
+ workdir_regex = re.compile(r'\s*WORKDIR\s*([^\s]+)')
+ for line in dockerfile_lines:
+ match = re.match(workdir_regex, line)
+ if match:
+ # We need to escape '$' since they're used for subsitutions in Container
+ # Builer builds.
+ return match.group(1).replace('$', '$$')
+
+ return None
+
+
+def load_project_yaml(project_name, project_yaml_file, image_project):
+ """Loads project yaml and sets default values."""
+ project_yaml = yaml.safe_load(project_yaml_file)
+ set_yaml_defaults(project_name, project_yaml, image_project)
+ return project_yaml
+
+
+# pylint: disable=too-many-locals, too-many-statements, too-many-branches
+def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
+ image_project, base_images_project):
+ """Returns build steps for project."""
+ project_yaml = load_project_yaml(project_name, project_yaml_file,
+ image_project)
+
+ if project_yaml['disabled']:
+ logging.info('Project "%s" is disabled.', project_name)
+ return []
+
+ name = project_yaml['name']
+ image = project_yaml['image']
+ language = project_yaml['language']
+ run_tests = project_yaml['run_tests']
+ time_stamp = datetime.datetime.now().strftime('%Y%m%d%H%M')
+
+ build_steps = build_lib.project_image_steps(name, image, language)
+ # Copy over MSan instrumented libraries.
+ build_steps.append({
+ 'name': 'gcr.io/{0}/msan-libs-builder'.format(base_images_project),
+ 'args': [
+ 'bash',
+ '-c',
+ 'cp -r /msan /workspace',
+ ],
+ })
+
+ for fuzzing_engine in project_yaml['fuzzing_engines']:
+ for sanitizer in get_sanitizers(project_yaml):
+ for architecture in project_yaml['architectures']:
+ if not is_supported_configuration(fuzzing_engine, sanitizer,
+ architecture):
+ continue
+
+ env = CONFIGURATIONS['engine-' + fuzzing_engine][:]
+ env.extend(CONFIGURATIONS['sanitizer-' + sanitizer])
+ out = '/workspace/out/' + sanitizer
+ stamped_name = '-'.join([name, sanitizer, time_stamp])
+ latest_version_file = '-'.join(
+ [name, sanitizer, LATEST_VERSION_FILENAME])
+ zip_file = stamped_name + '.zip'
+ stamped_srcmap_file = stamped_name + '.srcmap.json'
+ bucket = build_lib.ENGINE_INFO[fuzzing_engine].upload_bucket
+ if architecture != 'x86_64':
+ bucket += '-' + architecture
+
+ upload_url = build_lib.get_signed_url(
+ build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, name, zip_file))
+ srcmap_url = build_lib.get_signed_url(
+ build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, name,
+ stamped_srcmap_file))
+ latest_version_url = build_lib.GCS_UPLOAD_URL_FORMAT.format(
+ bucket, name, latest_version_file)
+ latest_version_url = build_lib.get_signed_url(
+ latest_version_url, content_type=LATEST_VERSION_CONTENT_TYPE)
+
+ targets_list_filename = build_lib.get_targets_list_filename(sanitizer)
+ targets_list_url = build_lib.get_signed_url(
+ build_lib.get_targets_list_url(bucket, name, sanitizer))
+
+ env.append('OUT=' + out)
+ env.append('MSAN_LIBS_PATH=/workspace/msan')
+ env.append('ARCHITECTURE=' + architecture)
+ env.append('FUZZING_LANGUAGE=' + language)
+
+ workdir = workdir_from_dockerfile(dockerfile_lines)
+ if not workdir:
+ workdir = '/src'
+
+ failure_msg = ('*' * 80 + '\nFailed to build.\nTo reproduce, run:\n'
+ 'python infra/helper.py build_image {name}\n'
+ 'python infra/helper.py build_fuzzers --sanitizer '
+ '{sanitizer} --engine {engine} --architecture '
+ '{architecture} {name}\n' + '*' * 80).format(
+ name=name,
+ sanitizer=sanitizer,
+ engine=fuzzing_engine,
+ architecture=architecture)
+
+ build_steps.append(
+ # compile
+ {
+ 'name':
+ image,
+ 'env':
+ env,
+ 'args': [
+ 'bash',
+ '-c',
+ # Remove /out to break loudly when a build script
+ # incorrectly uses /out instead of $OUT.
+ # `cd /src && cd {workdir}` (where {workdir} is parsed from
+ # the Dockerfile). Container Builder overrides our workdir
+ # so we need to add this step to set it back.
+ ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} '
+ '&& compile || (echo "{failure_msg}" && false)'
+ ).format(workdir=workdir, out=out, failure_msg=failure_msg),
+ ],
+ })
+
+ if sanitizer == 'memory':
+ # Patch dynamic libraries to use instrumented ones.
+ build_steps.append({
+ 'name':
+ 'gcr.io/{0}/msan-libs-builder'.format(base_images_project),
+ 'args': [
+ 'bash',
+ '-c',
+ # TODO(ochang): Replace with just patch_build.py once
+ # permission in image is fixed.
+ 'python /usr/local/bin/patch_build.py {0}'.format(out),
+ ],
+ })
+
+ if run_tests:
+ failure_msg = ('*' * 80 + '\nBuild checks failed.\n'
+ 'To reproduce, run:\n'
+ 'python infra/helper.py build_image {name}\n'
+ 'python infra/helper.py build_fuzzers --sanitizer '
+ '{sanitizer} --engine {engine} --architecture '
+ '{architecture} {name}\n'
+ 'python infra/helper.py check_build --sanitizer '
+ '{sanitizer} --engine {engine} --architecture '
+ '{architecture} {name}\n' + '*' * 80).format(
+ name=name,
+ sanitizer=sanitizer,
+ engine=fuzzing_engine,
+ architecture=architecture)
+
+ build_steps.append(
+ # test binaries
+ {
+ 'name':
+ 'gcr.io/{0}/base-runner'.format(base_images_project),
+ 'env':
+ env,
+ 'args': [
+ 'bash', '-c',
+ 'test_all || (echo "{0}" && false)'.format(failure_msg)
+ ],
+ })
+
+ if project_yaml['labels']:
+ # write target labels
+ build_steps.append({
+ 'name':
+ image,
+ 'env':
+ env,
+ 'args': [
+ '/usr/local/bin/write_labels.py',
+ json.dumps(project_yaml['labels']),
+ out,
+ ],
+ })
+
+ if sanitizer == 'dataflow' and fuzzing_engine == 'dataflow':
+ dataflow_steps = dataflow_post_build_steps(name, env,
+ base_images_project)
+ if dataflow_steps:
+ build_steps.extend(dataflow_steps)
+ else:
+ sys.stderr.write('Skipping dataflow post build steps.\n')
+
+ build_steps.extend([
+ # generate targets list
+ {
+ 'name':
+ 'gcr.io/{0}/base-runner'.format(base_images_project),
+ 'env':
+ env,
+ 'args': [
+ 'bash',
+ '-c',
+ 'targets_list > /workspace/{0}'.format(
+ targets_list_filename),
+ ],
+ },
+ # zip binaries
+ {
+ 'name':
+ image,
+ 'args': [
+ 'bash', '-c',
+ 'cd {out} && zip -r {zip_file} *'.format(out=out,
+ zip_file=zip_file)
+ ],
+ },
+ # upload srcmap
+ {
+ 'name': 'gcr.io/{0}/uploader'.format(base_images_project),
+ 'args': [
+ '/workspace/srcmap.json',
+ srcmap_url,
+ ],
+ },
+ # upload binaries
+ {
+ 'name': 'gcr.io/{0}/uploader'.format(base_images_project),
+ 'args': [
+ os.path.join(out, zip_file),
+ upload_url,
+ ],
+ },
+ # upload targets list
+ {
+ 'name':
+ 'gcr.io/{0}/uploader'.format(base_images_project),
+ 'args': [
+ '/workspace/{0}'.format(targets_list_filename),
+ targets_list_url,
+ ],
+ },
+ # upload the latest.version file
+ build_lib.http_upload_step(zip_file, latest_version_url,
+ LATEST_VERSION_CONTENT_TYPE),
+ # cleanup
+ {
+ 'name': image,
+ 'args': [
+ 'bash',
+ '-c',
+ 'rm -r ' + out,
+ ],
+ },
+ ])
+
+ return build_steps
+
+
+def dataflow_post_build_steps(project_name, env, base_images_project):
+ """Appends dataflow post build steps."""
+ steps = build_lib.download_corpora_steps(project_name)
+ if not steps:
+ return None
+
+ steps.append({
+ 'name':
+ 'gcr.io/{0}/base-runner'.format(base_images_project),
+ 'env':
+ env + [
+ 'COLLECT_DFT_TIMEOUT=2h',
+ 'DFT_FILE_SIZE_LIMIT=65535',
+ 'DFT_MIN_TIMEOUT=2.0',
+ 'DFT_TIMEOUT_RANGE=6.0',
+ ],
+ 'args': [
+ 'bash', '-c',
+ ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*}; done && '
+ 'collect_dft || (echo "DFT collection failed." && false)')
+ ],
+ 'volumes': [{
+ 'name': 'corpus',
+ 'path': '/corpus'
+ }],
+ })
+ return steps
+
+
+def get_logs_url(build_id, image_project='oss-fuzz'):
+ """Returns url where logs are displayed for the build."""
+ url_format = ('https://console.developers.google.com/logs/viewer?'
+ 'resource=build%2Fbuild_id%2F{0}&project={1}')
+ return url_format.format(build_id, image_project)
+
+
+# pylint: disable=no-member
+def run_build(build_steps, project_name, tag):
+ """Run the build for given steps on cloud build."""
+ options = {}
+ if 'GCB_OPTIONS' in os.environ:
+ options = yaml.safe_load(os.environ['GCB_OPTIONS'])
+
+ build_body = {
+ 'steps': build_steps,
+ 'timeout': str(build_lib.BUILD_TIMEOUT) + 's',
+ 'options': options,
+ 'logsBucket': GCB_LOGS_BUCKET,
+ 'tags': [project_name + '-' + tag,],
+ 'queueTtl': str(QUEUE_TTL_SECONDS) + 's',
+ }
+
+ credentials = GoogleCredentials.get_application_default()
+ cloudbuild = build('cloudbuild',
+ 'v1',
+ credentials=credentials,
+ cache_discovery=False)
+ build_info = cloudbuild.projects().builds().create(projectId='oss-fuzz',
+ body=build_body).execute()
+ build_id = build_info['metadata']['build']['id']
+
+ print('Logs:', get_logs_url(build_id), file=sys.stderr)
+ print(build_id)
+
+
+def main():
+ """Build and run projects."""
+ if len(sys.argv) != 2:
+ usage()
+
+ image_project = 'oss-fuzz'
+ base_images_project = 'oss-fuzz-base'
+ project_dir = sys.argv[1].rstrip(os.path.sep)
+ dockerfile_path = os.path.join(project_dir, 'Dockerfile')
+ project_yaml_path = os.path.join(project_dir, 'project.yaml')
+ project_name = os.path.basename(project_dir)
+
+ with open(dockerfile_path) as dockerfile:
+ dockerfile_lines = dockerfile.readlines()
+
+ with open(project_yaml_path) as project_yaml_file:
+ steps = get_build_steps(project_name, project_yaml_file, dockerfile_lines,
+ image_project, base_images_project)
+
+ run_build(steps, project_name, FUZZING_BUILD_TAG)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/infra/build/functions/builds_status.py b/infra/build/functions/builds_status.py
index a773a7e5..12bcf672 120000..100755
--- a/infra/build/functions/builds_status.py
+++ b/infra/build/functions/builds_status.py
@@ -1 +1,282 @@
-../../gcb/builds_status.py \ No newline at end of file
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+#!/usr/bin/env python2
+"""Upload log files and update build statuses, badges for projects."""
+
+from __future__ import print_function
+from builtins import range
+
+import datetime
+import os
+import sys
+import json
+import time
+
+import dateutil.parser
+from oauth2client.client import GoogleCredentials
+from googleapiclient.discovery import build as gcb_build
+from google.cloud import storage
+
+import build_and_run_coverage
+import build_project
+
+STATUS_BUCKET = 'oss-fuzz-build-logs'
+SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
+BADGE_DIR = 'badges'
+RETRY_COUNT = 3
+RETRY_WAIT = 5
+MAX_BUILD_RESULTS = 2000
+BUILDS_PAGE_SIZE = 256
+BADGE_IMAGE_TYPES = {'svg': 'image/svg+xml', 'png': 'image/png'}
+
+# pylint: disable=invalid-name
+_client = None
+
+
+# pylint: disable=global-statement
+def get_storage_client():
+ """Return storage client."""
+ global _client
+ if not _client:
+ _client = storage.Client()
+
+ return _client
+
+
+def usage():
+ """Output usage syntax."""
+ sys.stderr.write('Usage: ' + sys.argv[0] + ' <projects_dir>\n')
+ sys.exit(1)
+
+
+# pylint: disable=unused-variable
+def scan_project_names(projects_dir):
+ """Scan and return project list from directory."""
+ projects = []
+ for root, dirs, files in os.walk(projects_dir):
+ for file in files:
+ if file == 'Dockerfile':
+ projects.append(os.path.basename(root))
+ return sorted(projects)
+
+
+def upload_status(successes, failures, status_filename):
+ """Upload main status page."""
+ data = {
+ 'projects': failures + successes,
+ 'failures': failures,
+ 'successes': successes,
+ 'last_updated': datetime.datetime.utcnow().ctime()
+ }
+
+ bucket = get_storage_client().get_bucket(STATUS_BUCKET)
+ blob = bucket.blob(status_filename)
+ blob.cache_control = 'no-cache'
+ blob.upload_from_string(json.dumps(data), content_type='application/json')
+
+
+def is_build_successful(build):
+ """Check build success."""
+ return build['status'] == 'SUCCESS'
+
+
+def upload_log(build_id):
+ """Uploads log file oss-fuzz-build-logs."""
+ status_bucket = get_storage_client().get_bucket(STATUS_BUCKET)
+ gcb_bucket = get_storage_client().get_bucket(build_project.GCB_LOGS_BUCKET)
+ log_name = 'log-{0}.txt'.format(build_id)
+ log = gcb_bucket.blob(log_name)
+ dest_log = status_bucket.blob(log_name)
+
+ if not log.exists():
+ print('Failed to find build log {0}'.format(log_name), file=sys.stderr)
+ return False
+
+ if dest_log.exists():
+ return True
+
+ gcb_bucket.copy_blob(log, status_bucket)
+ return True
+
+
+def find_last_build(builds, project, build_tag_suffix):
+ """Find last finished build of project."""
+ delay_minutes = 40
+ tag = project + '-' + build_tag_suffix
+
+ builds = builds.get(tag)
+ if not builds:
+ print('Failed to find builds with tag {0}'.format(tag), file=sys.stderr)
+ return None
+
+ for build in builds:
+ if build['status'] == 'WORKING':
+ continue
+
+ if tag not in build['tags']:
+ continue
+
+ if not 'finishTime' in build:
+ continue
+
+ finish_time = dateutil.parser.parse(build['finishTime'], ignoretz=True)
+ if (datetime.datetime.utcnow() - finish_time >=
+ datetime.timedelta(minutes=delay_minutes)):
+
+ if not upload_log(build['id']):
+ continue
+
+ return build
+
+ return None
+
+
+def execute_with_retries(request):
+ """Execute request with retries."""
+ for i in range(RETRY_COUNT + 1):
+ try:
+ return request.execute()
+ except Exception as error:
+ print('request failed with {0}, retrying...'.format(str(error)))
+ if i < RETRY_COUNT:
+ time.sleep(RETRY_WAIT)
+ continue
+
+ raise
+
+
+def get_builds(cloudbuild):
+ """Get a batch of the latest builds (up to MAX_BUILD_RESULTS), grouped by
+ tag."""
+ ungrouped_builds = []
+ next_page_token = None
+
+ while True:
+ page_size = min(BUILDS_PAGE_SIZE, MAX_BUILD_RESULTS - len(ungrouped_builds))
+ response = execute_with_retries(cloudbuild.projects().builds().list(
+ projectId='oss-fuzz', pageSize=page_size, pageToken=next_page_token))
+
+ if not 'builds' in response:
+ print('Invalid response from builds list: {0}'.format(response),
+ file=sys.stderr)
+ return None
+
+ ungrouped_builds.extend(response['builds'])
+ if len(ungrouped_builds) >= MAX_BUILD_RESULTS:
+ break
+
+ next_page_token = response.get('nextPageToken')
+
+ builds = {}
+ for build in ungrouped_builds:
+ for tag in build['tags']:
+ builds.setdefault(tag, []).append(build)
+
+ return builds
+
+
+def update_build_status(builds, projects, build_tag_suffix, status_filename):
+ """Update build statuses of projects as a json to cloud storage."""
+ successes = []
+ failures = []
+
+ for project in projects:
+ print(project)
+
+ last_build = find_last_build(builds, project, build_tag_suffix)
+ if not last_build:
+ print('Failed to get build for {0}'.format(project), file=sys.stderr)
+ continue
+
+ print(last_build['startTime'], last_build['status'], last_build['id'])
+ if is_build_successful(last_build):
+ successes.append({
+ 'name': project,
+ 'build_id': last_build['id'],
+ 'finish_time': last_build['finishTime'],
+ 'success': True,
+ })
+ else:
+ failures.append({
+ 'name': project,
+ 'build_id': last_build['id'],
+ 'finish_time': last_build['finishTime'],
+ 'success': False,
+ })
+
+ upload_status(successes, failures, status_filename)
+
+
+def update_build_badges(builds, projects, build_tag, coverage_tag):
+ """Update build badges of projects."""
+ for project in projects:
+ last_build = find_last_build(builds, project, build_tag)
+ last_coverage_build = find_last_build(builds, project, coverage_tag)
+ if not last_build or not last_coverage_build:
+ continue
+
+ badge = 'building'
+ if not is_build_successful(last_coverage_build):
+ badge = 'coverage_failing'
+ if not is_build_successful(last_build):
+ badge = 'failing'
+
+ print("[badge] {}: {}".format(project, badge))
+
+ for extension, mime_type in BADGE_IMAGE_TYPES.items():
+ badge_name = '{badge}.{extension}'.format(badge=badge,
+ extension=extension)
+ # Retrieve the image relative to this script's location
+ badge_file = os.path.join(SCRIPT_DIR, 'badge_images', badge_name)
+
+ # The uploaded blob name should look like `badges/project.png`
+ blob_name = '{badge_dir}/{project_name}.{extension}'.format(
+ badge_dir=BADGE_DIR, project_name=project, extension=extension)
+
+ status_bucket = get_storage_client().get_bucket(STATUS_BUCKET)
+ badge_blob = status_bucket.blob(blob_name)
+ badge_blob.upload_from_filename(badge_file, content_type=mime_type)
+
+
+def main():
+ """Update build statuses and badges."""
+ if len(sys.argv) != 2:
+ usage()
+
+ projects_dir = sys.argv[1]
+ projects = scan_project_names(projects_dir)
+
+ credentials = GoogleCredentials.get_application_default()
+ cloudbuild = gcb_build('cloudbuild', 'v1', credentials=credentials)
+
+ builds = get_builds(cloudbuild)
+ update_build_status(builds,
+ projects,
+ build_project.FUZZING_BUILD_TAG,
+ status_filename='status.json')
+ update_build_status(builds,
+ projects,
+ build_and_run_coverage.COVERAGE_BUILD_TAG,
+ status_filename='status-coverage.json')
+
+ update_build_badges(builds,
+ projects,
+ build_tag=build_project.FUZZING_BUILD_TAG,
+ coverage_tag=build_and_run_coverage.COVERAGE_BUILD_TAG)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/infra/gcb/templates/bower.json b/infra/build/status/bower.json
index b6b6d765..b6b6d765 100644
--- a/infra/gcb/templates/bower.json
+++ b/infra/build/status/bower.json
diff --git a/infra/build/status/deploy.sh b/infra/build/status/deploy.sh
new file mode 100755
index 00000000..060eba94
--- /dev/null
+++ b/infra/build/status/deploy.sh
@@ -0,0 +1,18 @@
+#!/bin/bash -ex
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+gsutil -h "Cache-Control:no-cache,max-age=0" -m cp -r bower_components index.html src manifest.json gs://oss-fuzz-build-logs
diff --git a/infra/gcb/templates/index.html b/infra/build/status/index.html
index 36dd15b3..25a3d8ed 100644
--- a/infra/gcb/templates/index.html
+++ b/infra/build/status/index.html
@@ -1,3 +1,14 @@
+<!-- Copyright 2020 Google Inc.
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License. -->
+
<!doctype html>
<html lang="en">
<head>
diff --git a/infra/gcb/templates/manifest.json b/infra/build/status/manifest.json
index c2d45e4f..c2d45e4f 100644
--- a/infra/gcb/templates/manifest.json
+++ b/infra/build/status/manifest.json
diff --git a/infra/gcb/templates/polymer.json b/infra/build/status/polymer.json
index 2bd10b64..2bd10b64 100644
--- a/infra/gcb/templates/polymer.json
+++ b/infra/build/status/polymer.json
diff --git a/infra/gcb/templates/src/build-status/build-status.html b/infra/build/status/src/build-status/build-status.html
index 1170e578..1170e578 100644
--- a/infra/gcb/templates/src/build-status/build-status.html
+++ b/infra/build/status/src/build-status/build-status.html
diff --git a/infra/gcb/.gitignore b/infra/gcb/.gitignore
deleted file mode 100644
index 4d3dae74..00000000
--- a/infra/gcb/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-default/
diff --git a/infra/gcb/badge_images/building.png b/infra/gcb/badge_images/building.png
deleted file mode 100644
index 9e7e7344..00000000
--- a/infra/gcb/badge_images/building.png
+++ /dev/null
Binary files differ
diff --git a/infra/gcb/badge_images/building.svg b/infra/gcb/badge_images/building.svg
deleted file mode 100644
index 19f28d7f..00000000
--- a/infra/gcb/badge_images/building.svg
+++ /dev/null
@@ -1 +0,0 @@
-<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="104" height="20"><linearGradient id="b" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><clipPath id="a"><rect width="104" height="20" rx="3" fill="#fff"/></clipPath><g clip-path="url(#a)"><path fill="#555" d="M0 0h55v20H0z"/><path fill="#4c1" d="M55 0h49v20H55z"/><path fill="url(#b)" d="M0 0h104v20H0z"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="110"> <text x="285" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="450">oss-fuzz</text><text x="285" y="140" transform="scale(.1)" textLength="450">oss-fuzz</text><text x="785" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="390">fuzzing</text><text x="785" y="140" transform="scale(.1)" textLength="390">fuzzing</text></g> </svg> \ No newline at end of file
diff --git a/infra/gcb/badge_images/coverage_failing.png b/infra/gcb/badge_images/coverage_failing.png
deleted file mode 100644
index 85abe235..00000000
--- a/infra/gcb/badge_images/coverage_failing.png
+++ /dev/null
Binary files differ
diff --git a/infra/gcb/badge_images/coverage_failing.svg b/infra/gcb/badge_images/coverage_failing.svg
deleted file mode 100644
index dc7b72e3..00000000
--- a/infra/gcb/badge_images/coverage_failing.svg
+++ /dev/null
@@ -1 +0,0 @@
-<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="152" height="20"><linearGradient id="b" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><clipPath id="a"><rect width="152" height="20" rx="3" fill="#fff"/></clipPath><g clip-path="url(#a)"><path fill="#555" d="M0 0h55v20H0z"/><path fill="#dfb317" d="M55 0h97v20H55z"/><path fill="url(#b)" d="M0 0h152v20H0z"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="110"> <text x="285" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="450">oss-fuzz</text><text x="285" y="140" transform="scale(.1)" textLength="450">oss-fuzz</text><text x="1025" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="870">coverage failing</text><text x="1025" y="140" transform="scale(.1)" textLength="870">coverage failing</text></g> </svg> \ No newline at end of file
diff --git a/infra/gcb/badge_images/failing.png b/infra/gcb/badge_images/failing.png
deleted file mode 100644
index 0d2bb470..00000000
--- a/infra/gcb/badge_images/failing.png
+++ /dev/null
Binary files differ
diff --git a/infra/gcb/badge_images/failing.svg b/infra/gcb/badge_images/failing.svg
deleted file mode 100644
index ed0f8621..00000000
--- a/infra/gcb/badge_images/failing.svg
+++ /dev/null
@@ -1 +0,0 @@
-<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="128" height="20"><linearGradient id="b" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><clipPath id="a"><rect width="128" height="20" rx="3" fill="#fff"/></clipPath><g clip-path="url(#a)"><path fill="#555" d="M0 0h55v20H0z"/><path fill="#e05d44" d="M55 0h73v20H55z"/><path fill="url(#b)" d="M0 0h128v20H0z"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="110"> <text x="285" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="450">oss-fuzz</text><text x="285" y="140" transform="scale(.1)" textLength="450">oss-fuzz</text><text x="905" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="630">build failing</text><text x="905" y="140" transform="scale(.1)" textLength="630">build failing</text></g> </svg> \ No newline at end of file
diff --git a/infra/gcb/build_and_run_coverage.py b/infra/gcb/build_and_run_coverage.py
deleted file mode 100644
index dcb2f1a0..00000000
--- a/infra/gcb/build_and_run_coverage.py
+++ /dev/null
@@ -1,275 +0,0 @@
-# Copyright 2020 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-#!/usr/bin/python2
-"""Starts and runs coverage build on Google Cloud Builder.
-Usage: build_and_run_coverage.py <project_dir>
-"""
-import datetime
-import json
-import logging
-import os
-import sys
-
-import build_lib
-import build_project
-
-SANITIZER = 'coverage'
-CONFIGURATION = ['FUZZING_ENGINE=libfuzzer', 'SANITIZER=%s' % SANITIZER]
-PLATFORM = 'linux'
-
-COVERAGE_BUILD_TAG = 'coverage'
-
-# Where code coverage reports need to be uploaded to.
-COVERAGE_BUCKET_NAME = 'oss-fuzz-coverage'
-
-# Link to the code coverage report in HTML format.
-HTML_REPORT_URL_FORMAT = (build_lib.GCS_URL_BASENAME + COVERAGE_BUCKET_NAME +
- '/{project}/reports/{date}/{platform}/index.html')
-
-# This is needed for ClusterFuzz to pick up the most recent reports data.
-LATEST_REPORT_INFO_URL = ('/' + COVERAGE_BUCKET_NAME +
- '/latest_report_info/{project}.json')
-LATEST_REPORT_INFO_CONTENT_TYPE = 'application/json'
-
-# Link where to upload code coverage report files to.
-UPLOAD_URL_FORMAT = 'gs://' + COVERAGE_BUCKET_NAME + '/{project}/{type}/{date}'
-
-# Languages from project.yaml that have code coverage support.
-LANGUAGES_WITH_COVERAGE_SUPPORT = ['c', 'c++']
-
-
-def usage():
- """Exit with code 1 and display syntax to use this file."""
- sys.stderr.write("Usage: " + sys.argv[0] + " <project_dir>\n")
- sys.exit(1)
-
-
-# pylint: disable=too-many-locals
-def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
- image_project, base_images_project):
- """Returns build steps for project."""
- project_yaml = build_project.load_project_yaml(project_name,
- project_yaml_file,
- image_project)
- if project_yaml['disabled']:
- logging.info('Project "%s" is disabled.', project_name)
- return []
-
- if project_yaml['language'] not in LANGUAGES_WITH_COVERAGE_SUPPORT:
- logging.info(
- 'Project "%s" is written in "%s", coverage is not supported yet.',
- project_name, project_yaml['language'])
- return []
-
- name = project_yaml['name']
- image = project_yaml['image']
- language = project_yaml['language']
- report_date = datetime.datetime.now().strftime('%Y%m%d')
-
- build_steps = build_lib.project_image_steps(name, image, language)
-
- env = CONFIGURATION[:]
- out = '/workspace/out/' + SANITIZER
- env.append('OUT=' + out)
- env.append('FUZZING_LANGUAGE=' + language)
-
- workdir = build_project.workdir_from_dockerfile(dockerfile_lines)
- if not workdir:
- workdir = '/src'
-
- failure_msg = ('*' * 80 + '\nCoverage build failed.\nTo reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
- 'python infra/helper.py build_fuzzers --sanitizer coverage '
- '{name}\n' + '*' * 80).format(name=name)
-
- # Compilation step.
- build_steps.append({
- 'name':
- image,
- 'env':
- env,
- 'args': [
- 'bash',
- '-c',
- # Remove /out to make sure there are non instrumented binaries.
- # `cd /src && cd {workdir}` (where {workdir} is parsed from the
- # Dockerfile). Container Builder overrides our workdir so we need
- # to add this step to set it back.
- ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} && '
- 'compile || (echo "{failure_msg}" && false)'
- ).format(workdir=workdir, out=out, failure_msg=failure_msg),
- ],
- })
-
- download_corpora_steps = build_lib.download_corpora_steps(project_name)
- if not download_corpora_steps:
- logging.info('Skipping code coverage build for %s.', project_name)
- return []
-
- build_steps.extend(download_corpora_steps)
-
- failure_msg = ('*' * 80 + '\nCode coverage report generation failed.\n'
- 'To reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
- 'python infra/helper.py build_fuzzers --sanitizer coverage '
- '{name}\n'
- 'python infra/helper.py coverage {name}\n' +
- '*' * 80).format(name=name)
-
- # Unpack the corpus and run coverage script.
- coverage_env = env + [
- 'HTTP_PORT=',
- 'COVERAGE_EXTRA_ARGS=%s' % project_yaml['coverage_extra_args'].strip(),
- ]
- if 'dataflow' in project_yaml['fuzzing_engines']:
- coverage_env.append('FULL_SUMMARY_PER_TARGET=1')
-
- build_steps.append({
- 'name': 'gcr.io/{0}/base-runner'.format(base_images_project),
- 'env': coverage_env,
- 'args': [
- 'bash', '-c',
- ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*} || ('
- 'echo "Failed to unpack the corpus for $(basename ${f%%.*}). '
- 'This usually means that corpus backup for a particular fuzz '
- 'target does not exist. If a fuzz target was added in the last '
- '24 hours, please wait one more day. Otherwise, something is '
- 'wrong with the fuzz target or the infrastructure, and corpus '
- 'pruning task does not finish successfully." && exit 1'
- '); done && coverage || (echo "' + failure_msg + '" && false)')
- ],
- 'volumes': [{
- 'name': 'corpus',
- 'path': '/corpus'
- }],
- })
-
- # Upload the report.
- upload_report_url = UPLOAD_URL_FORMAT.format(project=project_name,
- type='reports',
- date=report_date)
-
- # Delete the existing report as gsutil cannot overwrite it in a sane way due
- # to the lack of `-T` option (it creates a subdir in the destination dir).
- build_steps.append(build_lib.gsutil_rm_rf_step(upload_report_url))
- build_steps.append({
- 'name':
- 'gcr.io/cloud-builders/gsutil',
- 'args': [
- '-m',
- 'cp',
- '-r',
- os.path.join(out, 'report'),
- upload_report_url,
- ],
- })
-
- # Upload the fuzzer stats. Delete the old ones just in case.
- upload_fuzzer_stats_url = UPLOAD_URL_FORMAT.format(project=project_name,
- type='fuzzer_stats',
- date=report_date)
- build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_stats_url))
- build_steps.append({
- 'name':
- 'gcr.io/cloud-builders/gsutil',
- 'args': [
- '-m',
- 'cp',
- '-r',
- os.path.join(out, 'fuzzer_stats'),
- upload_fuzzer_stats_url,
- ],
- })
-
- # Upload the fuzzer logs. Delete the old ones just in case
- upload_fuzzer_logs_url = UPLOAD_URL_FORMAT.format(project=project_name,
- type='logs',
- date=report_date)
- build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_logs_url))
- build_steps.append({
- 'name':
- 'gcr.io/cloud-builders/gsutil',
- 'args': [
- '-m',
- 'cp',
- '-r',
- os.path.join(out, 'logs'),
- upload_fuzzer_logs_url,
- ],
- })
-
- # Upload srcmap.
- srcmap_upload_url = UPLOAD_URL_FORMAT.format(project=project_name,
- type='srcmap',
- date=report_date)
- srcmap_upload_url = srcmap_upload_url.rstrip('/') + '.json'
- build_steps.append({
- 'name': 'gcr.io/cloud-builders/gsutil',
- 'args': [
- 'cp',
- '/workspace/srcmap.json',
- srcmap_upload_url,
- ],
- })
-
- # Update the latest report information file for ClusterFuzz.
- latest_report_info_url = build_lib.get_signed_url(
- LATEST_REPORT_INFO_URL.format(project=project_name),
- content_type=LATEST_REPORT_INFO_CONTENT_TYPE)
- latest_report_info_body = json.dumps({
- 'fuzzer_stats_dir':
- upload_fuzzer_stats_url,
- 'html_report_url':
- HTML_REPORT_URL_FORMAT.format(project=project_name,
- date=report_date,
- platform=PLATFORM),
- 'report_date':
- report_date,
- 'report_summary_path':
- os.path.join(upload_report_url, PLATFORM, 'summary.json'),
- })
-
- build_steps.append(
- build_lib.http_upload_step(latest_report_info_body,
- latest_report_info_url,
- LATEST_REPORT_INFO_CONTENT_TYPE))
- return build_steps
-
-
-def main():
- """Build and run coverage for projects."""
- if len(sys.argv) != 2:
- usage()
-
- image_project = 'oss-fuzz'
- base_images_project = 'oss-fuzz-base'
- project_dir = sys.argv[1].rstrip(os.path.sep)
- project_name = os.path.basename(project_dir)
- dockerfile_path = os.path.join(project_dir, 'Dockerfile')
- project_yaml_path = os.path.join(project_dir, 'project.yaml')
-
- with open(dockerfile_path) as docker_file:
- dockerfile_lines = docker_file.readlines()
-
- with open(project_yaml_path) as project_yaml_file:
- steps = get_build_steps(project_name, project_yaml_file, dockerfile_lines,
- image_project, base_images_project)
-
- build_project.run_build(steps, project_name, COVERAGE_BUILD_TAG)
-
-
-if __name__ == "__main__":
- main()
diff --git a/infra/gcb/build_base_images.py b/infra/gcb/build_base_images.py
deleted file mode 100755
index daadb9ef..00000000
--- a/infra/gcb/build_base_images.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# Copyright 2020 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-#!/usr/bin/python2
-"""Build base images on Google Cloud Builder.
-
-Usage: build_base_images.py
-"""
-from __future__ import print_function
-
-import os
-import sys
-import yaml
-
-from googleapiclient.discovery import build
-from oauth2client.client import GoogleCredentials
-
-BASE_IMAGES = [
- 'base-image',
- 'base-clang',
- 'base-builder',
- 'base-runner',
- 'base-runner-debug',
- 'base-sanitizer-libs-builder',
-]
-
-TAG_PREFIX = 'gcr.io/oss-fuzz-base/'
-
-
-def get_steps(images, tag_prefix=TAG_PREFIX):
- """Returns build steps for given images."""
- steps = [{
- 'args': [
- 'clone',
- 'https://github.com/google/oss-fuzz.git',
- ],
- 'name': 'gcr.io/cloud-builders/git',
- }]
-
- for base_image in images:
- steps.append({
- 'args': [
- 'build',
- '-t',
- tag_prefix + base_image,
- '.',
- ],
- 'dir': 'oss-fuzz/infra/base-images/' + base_image,
- 'name': 'gcr.io/cloud-builders/docker',
- })
-
- return steps
-
-
-def get_logs_url(build_id, project_id='oss-fuzz-base'):
- """Returns url that displays the build logs."""
- url_format = ('https://console.developers.google.com/logs/viewer?'
- 'resource=build%2Fbuild_id%2F{0}&project={1}')
- return url_format.format(build_id, project_id)
-
-
-# pylint: disable=no-member, missing-function-docstring
-def main():
- options = {}
- if 'GCB_OPTIONS' in os.environ:
- options = yaml.safe_load(os.environ['GCB_OPTIONS'])
-
- build_body = {
- 'steps': get_steps(BASE_IMAGES),
- 'timeout': str(4 * 3600) + 's',
- 'options': options,
- 'images': [TAG_PREFIX + base_image for base_image in BASE_IMAGES],
- }
-
- credentials = GoogleCredentials.get_application_default()
- cloudbuild = build('cloudbuild', 'v1', credentials=credentials)
- build_info = cloudbuild.projects().builds().create(projectId='oss-fuzz-base',
- body=build_body).execute()
- build_id = build_info['metadata']['build']['id']
-
- print('Logs:', get_logs_url(build_id), file=sys.stderr)
- print(build_id)
-
-
-if __name__ == '__main__':
- main()
diff --git a/infra/gcb/build_lib.py b/infra/gcb/build_lib.py
deleted file mode 100644
index 007579ef..00000000
--- a/infra/gcb/build_lib.py
+++ /dev/null
@@ -1,239 +0,0 @@
-# Copyright 2020 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-"""Utility module for Google Cloud Build scripts."""
-import base64
-import collections
-import os
-import six.moves.urllib.parse as urlparse
-import sys
-import time
-
-import requests
-
-import google.auth
-import googleapiclient.discovery
-from oauth2client.service_account import ServiceAccountCredentials
-
-BUILD_TIMEOUT = 12 * 60 * 60
-
-# Needed for reading public target.list.* files.
-GCS_URL_BASENAME = 'https://storage.googleapis.com/'
-
-GCS_UPLOAD_URL_FORMAT = '/{0}/{1}/{2}'
-
-# Where corpus backups can be downloaded from.
-CORPUS_BACKUP_URL = ('/{project}-backup.clusterfuzz-external.appspot.com/'
- 'corpus/libFuzzer/{fuzzer}/latest.zip')
-
-# Cloud Builder has a limit of 100 build steps and 100 arguments for each step.
-CORPUS_DOWNLOAD_BATCH_SIZE = 100
-
-TARGETS_LIST_BASENAME = 'targets.list'
-
-EngineInfo = collections.namedtuple(
- 'EngineInfo',
- ['upload_bucket', 'supported_sanitizers', 'supported_architectures'])
-
-ENGINE_INFO = {
- 'libfuzzer':
- EngineInfo(upload_bucket='clusterfuzz-builds',
- supported_sanitizers=['address', 'memory', 'undefined'],
- supported_architectures=['x86_64', 'i386']),
- 'afl':
- EngineInfo(upload_bucket='clusterfuzz-builds-afl',
- supported_sanitizers=['address'],
- supported_architectures=['x86_64']),
- 'honggfuzz':
- EngineInfo(upload_bucket='clusterfuzz-builds-honggfuzz',
- supported_sanitizers=['address'],
- supported_architectures=['x86_64']),
- 'dataflow':
- EngineInfo(upload_bucket='clusterfuzz-builds-dataflow',
- supported_sanitizers=['dataflow'],
- supported_architectures=['x86_64']),
- 'none':
- EngineInfo(upload_bucket='clusterfuzz-builds-no-engine',
- supported_sanitizers=['address'],
- supported_architectures=['x86_64']),
-}
-
-
-def get_targets_list_filename(sanitizer):
- """Returns target list filename."""
- return TARGETS_LIST_BASENAME + '.' + sanitizer
-
-
-def get_targets_list_url(bucket, project, sanitizer):
- """Returns target list url."""
- filename = get_targets_list_filename(sanitizer)
- url = GCS_UPLOAD_URL_FORMAT.format(bucket, project, filename)
- return url
-
-
-def _get_targets_list(project_name):
- """Returns target list."""
- # libFuzzer ASan is the default configuration, get list of targets from it.
- url = get_targets_list_url(ENGINE_INFO['libfuzzer'].upload_bucket,
- project_name, 'address')
-
- url = urlparse.urljoin(GCS_URL_BASENAME, url)
- response = requests.get(url)
- if not response.status_code == 200:
- sys.stderr.write('Failed to get list of targets from "%s".\n' % url)
- sys.stderr.write('Status code: %d \t\tText:\n%s\n' %
- (response.status_code, response.text))
- return None
-
- return response.text.split()
-
-
-# pylint: disable=no-member
-def get_signed_url(path, method='PUT', content_type=''):
- """Returns signed url."""
- timestamp = int(time.time() + BUILD_TIMEOUT)
- blob = '{0}\n\n{1}\n{2}\n{3}'.format(method, content_type, timestamp, path)
-
- service_account_path = os.environ.get('GOOGLE_APPLICATION_CREDENTIALS')
- if service_account_path:
- creds = ServiceAccountCredentials.from_json_keyfile_name(
- os.environ['GOOGLE_APPLICATION_CREDENTIALS'])
- client_id = creds.service_account_email
- signature = base64.b64encode(creds.sign_blob(blob)[1])
- else:
- credentials, project = google.auth.default()
- iam = googleapiclient.discovery.build('iamcredentials',
- 'v1',
- credentials=credentials,
- cache_discovery=False)
- client_id = project + '@appspot.gserviceaccount.com'
- service_account = 'projects/-/serviceAccounts/{0}'.format(client_id)
- response = iam.projects().serviceAccounts().signBlob(
- name=service_account,
- body={
- 'delegates': [],
- 'payload': base64.b64encode(blob.encode('utf-8')).decode('utf-8'),
- }).execute()
- signature = response['signedBlob']
-
- values = {
- 'GoogleAccessId': client_id,
- 'Expires': timestamp,
- 'Signature': signature,
- }
- return ('https://storage.googleapis.com{0}?'.format(path) +
- urlparse.urlencode(values))
-
-
-def download_corpora_steps(project_name):
- """Returns GCB steps for downloading corpora backups for the given project.
- """
- fuzz_targets = _get_targets_list(project_name)
- if not fuzz_targets:
- sys.stderr.write('No fuzz targets found for project "%s".\n' % project_name)
- return None
-
- steps = []
- # Split fuzz targets into batches of CORPUS_DOWNLOAD_BATCH_SIZE.
- for i in range(0, len(fuzz_targets), CORPUS_DOWNLOAD_BATCH_SIZE):
- download_corpus_args = []
- for binary_name in fuzz_targets[i:i + CORPUS_DOWNLOAD_BATCH_SIZE]:
- qualified_name = binary_name
- qualified_name_prefix = '%s_' % project_name
- if not binary_name.startswith(qualified_name_prefix):
- qualified_name = qualified_name_prefix + binary_name
-
- url = get_signed_url(CORPUS_BACKUP_URL.format(project=project_name,
- fuzzer=qualified_name),
- method='GET')
-
- corpus_archive_path = os.path.join('/corpus', binary_name + '.zip')
- download_corpus_args.append('%s %s' % (corpus_archive_path, url))
-
- steps.append({
- 'name': 'gcr.io/oss-fuzz-base/base-runner',
- 'entrypoint': 'download_corpus',
- 'args': download_corpus_args,
- 'volumes': [{
- 'name': 'corpus',
- 'path': '/corpus'
- }],
- })
-
- return steps
-
-
-def http_upload_step(data, signed_url, content_type):
- """Returns a GCB step to upload data to the given URL via GCS HTTP API."""
- step = {
- 'name':
- 'gcr.io/cloud-builders/curl',
- 'args': [
- '-H',
- 'Content-Type: ' + content_type,
- '-X',
- 'PUT',
- '-d',
- data,
- signed_url,
- ],
- }
- return step
-
-
-def gsutil_rm_rf_step(url):
- """Returns a GCB step to recursively delete the object with given GCS url."""
- step = {
- 'name': 'gcr.io/cloud-builders/gsutil',
- 'entrypoint': 'sh',
- 'args': [
- '-c',
- 'gsutil -m rm -rf %s || exit 0' % url,
- ],
- }
- return step
-
-
-def project_image_steps(name, image, language):
- """Returns GCB steps to build OSS-Fuzz project image."""
- steps = [{
- 'args': [
- 'clone',
- 'https://github.com/google/oss-fuzz.git',
- ],
- 'name': 'gcr.io/cloud-builders/git',
- }, {
- 'name': 'gcr.io/cloud-builders/docker',
- 'args': [
- 'build',
- '-t',
- image,
- '.',
- ],
- 'dir': 'oss-fuzz/projects/' + name,
- }, {
- 'name':
- image,
- 'args': [
- 'bash', '-c',
- 'srcmap > /workspace/srcmap.json && cat /workspace/srcmap.json'
- ],
- 'env': [
- 'OSSFUZZ_REVISION=$REVISION_ID',
- 'FUZZING_LANGUAGE=%s' % language,
- ],
- }]
-
- return steps
diff --git a/infra/gcb/build_msan_libs.py b/infra/gcb/build_msan_libs.py
deleted file mode 100755
index 33526247..00000000
--- a/infra/gcb/build_msan_libs.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# Copyright 2020 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-#!/usr/bin/python2
-"""Build base images on Google Cloud Builder.
-
-Usage: build_base_images.py
-"""
-from __future__ import print_function
-
-import datetime
-import os
-import sys
-import yaml
-
-from oauth2client.client import GoogleCredentials
-from googleapiclient.discovery import build
-
-import build_base_images
-
-
-def get_steps(image):
- """Get build steps for msan-libs-builder."""
-
- timestamp = datetime.datetime.utcnow().strftime('%Y%m%d%H%M')
- upload_name = 'msan-libs-' + timestamp + '.zip'
-
- steps = build_base_images.get_steps([
- 'base-sanitizer-libs-builder',
- 'msan-libs-builder',
- ])
-
- steps.extend([{
- 'name': image,
- 'args': [
- 'bash',
- '-c',
- 'cd /msan && zip -r /workspace/libs.zip .',
- ],
- }, {
- 'name':
- 'gcr.io/cloud-builders/gsutil',
- 'args': [
- 'cp',
- '/workspace/libs.zip',
- 'gs://oss-fuzz-msan-libs/' + upload_name,
- ],
- }])
-
- return steps
-
-
-# pylint: disable=no-member
-def main():
- """Build msan libs."""
- options = {}
- if 'GCB_OPTIONS' in os.environ:
- options = yaml.safe_load(os.environ['GCB_OPTIONS'])
-
- image = 'gcr.io/oss-fuzz-base/msan-libs-builder'
- steps = get_steps(image)
- build_body = {
- 'steps': steps,
- 'timeout': str(6 * 3600) + 's',
- 'options': options,
- 'images': [
- 'gcr.io/oss-fuzz-base/base-sanitizer-libs-builder',
- image,
- ],
- }
- credentials = GoogleCredentials.get_application_default()
- cloudbuild = build('cloudbuild', 'v1', credentials=credentials)
- build_info = cloudbuild.projects().builds().create(projectId='oss-fuzz-base',
- body=build_body).execute()
- build_id = build_info['metadata']['build']['id']
-
- print('Logs:', build_base_images.get_logs_url(build_id), file=sys.stderr)
- print(build_id)
-
-
-if __name__ == '__main__':
- main()
diff --git a/infra/gcb/build_project.py b/infra/gcb/build_project.py
deleted file mode 100644
index 84cd7d56..00000000
--- a/infra/gcb/build_project.py
+++ /dev/null
@@ -1,449 +0,0 @@
-# Copyright 2020 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-#!/usr/bin/python2
-"""Starts project build on Google Cloud Builder.
-
-Usage: build_project.py <project_dir>
-"""
-
-from __future__ import print_function
-
-import datetime
-import json
-import logging
-import os
-import re
-import sys
-
-import six
-import yaml
-
-from oauth2client.client import GoogleCredentials
-from googleapiclient.discovery import build
-
-import build_lib
-
-FUZZING_BUILD_TAG = 'fuzzing'
-
-GCB_LOGS_BUCKET = 'oss-fuzz-gcb-logs'
-
-CONFIGURATIONS = {
- 'sanitizer-address': ['SANITIZER=address'],
- 'sanitizer-dataflow': ['SANITIZER=dataflow'],
- 'sanitizer-memory': ['SANITIZER=memory'],
- 'sanitizer-undefined': ['SANITIZER=undefined'],
- 'engine-libfuzzer': ['FUZZING_ENGINE=libfuzzer'],
- 'engine-afl': ['FUZZING_ENGINE=afl'],
- 'engine-honggfuzz': ['FUZZING_ENGINE=honggfuzz'],
- 'engine-dataflow': ['FUZZING_ENGINE=dataflow'],
- 'engine-none': ['FUZZING_ENGINE=none'],
-}
-
-DEFAULT_ARCHITECTURES = ['x86_64']
-DEFAULT_ENGINES = ['libfuzzer', 'afl', 'honggfuzz']
-DEFAULT_SANITIZERS = ['address', 'undefined']
-
-LATEST_VERSION_FILENAME = 'latest.version'
-LATEST_VERSION_CONTENT_TYPE = 'text/plain'
-
-QUEUE_TTL_SECONDS = 60 * 60 * 24 # 24 hours.
-
-
-def usage():
- """Exit with code 1 and display syntax to use this file."""
- sys.stderr.write('Usage: ' + sys.argv[0] + ' <project_dir>\n')
- sys.exit(1)
-
-
-def set_yaml_defaults(project_name, project_yaml, image_project):
- """Set project.yaml's default parameters."""
- project_yaml.setdefault('disabled', False)
- project_yaml.setdefault('name', project_name)
- project_yaml.setdefault('image',
- 'gcr.io/{0}/{1}'.format(image_project, project_name))
- project_yaml.setdefault('architectures', DEFAULT_ARCHITECTURES)
- project_yaml.setdefault('sanitizers', DEFAULT_SANITIZERS)
- project_yaml.setdefault('fuzzing_engines', DEFAULT_ENGINES)
- project_yaml.setdefault('run_tests', True)
- project_yaml.setdefault('coverage_extra_args', '')
- project_yaml.setdefault('labels', {})
-
-
-def is_supported_configuration(fuzzing_engine, sanitizer, architecture):
- """Check if the given configuration is supported."""
- fuzzing_engine_info = build_lib.ENGINE_INFO[fuzzing_engine]
- if architecture == 'i386' and sanitizer != 'address':
- return False
- return (sanitizer in fuzzing_engine_info.supported_sanitizers and
- architecture in fuzzing_engine_info.supported_architectures)
-
-
-def get_sanitizers(project_yaml):
- """Retrieve sanitizers from project.yaml."""
- sanitizers = project_yaml['sanitizers']
- assert isinstance(sanitizers, list)
-
- processed_sanitizers = []
- for sanitizer in sanitizers:
- if isinstance(sanitizer, six.string_types):
- processed_sanitizers.append(sanitizer)
- elif isinstance(sanitizer, dict):
- for key in sanitizer.keys():
- processed_sanitizers.append(key)
-
- return processed_sanitizers
-
-
-def workdir_from_dockerfile(dockerfile_lines):
- """Parse WORKDIR from the Dockerfile."""
- workdir_regex = re.compile(r'\s*WORKDIR\s*([^\s]+)')
- for line in dockerfile_lines:
- match = re.match(workdir_regex, line)
- if match:
- # We need to escape '$' since they're used for subsitutions in Container
- # Builer builds.
- return match.group(1).replace('$', '$$')
-
- return None
-
-
-def load_project_yaml(project_name, project_yaml_file, image_project):
- """Loads project yaml and sets default values."""
- project_yaml = yaml.safe_load(project_yaml_file)
- set_yaml_defaults(project_name, project_yaml, image_project)
- return project_yaml
-
-
-# pylint: disable=too-many-locals, too-many-statements, too-many-branches
-def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
- image_project, base_images_project):
- """Returns build steps for project."""
- project_yaml = load_project_yaml(project_name, project_yaml_file,
- image_project)
-
- if project_yaml['disabled']:
- logging.info('Project "%s" is disabled.', project_name)
- return []
-
- name = project_yaml['name']
- image = project_yaml['image']
- language = project_yaml['language']
- run_tests = project_yaml['run_tests']
- time_stamp = datetime.datetime.now().strftime('%Y%m%d%H%M')
-
- build_steps = build_lib.project_image_steps(name, image, language)
- # Copy over MSan instrumented libraries.
- build_steps.append({
- 'name': 'gcr.io/{0}/msan-libs-builder'.format(base_images_project),
- 'args': [
- 'bash',
- '-c',
- 'cp -r /msan /workspace',
- ],
- })
-
- for fuzzing_engine in project_yaml['fuzzing_engines']:
- for sanitizer in get_sanitizers(project_yaml):
- for architecture in project_yaml['architectures']:
- if not is_supported_configuration(fuzzing_engine, sanitizer,
- architecture):
- continue
-
- env = CONFIGURATIONS['engine-' + fuzzing_engine][:]
- env.extend(CONFIGURATIONS['sanitizer-' + sanitizer])
- out = '/workspace/out/' + sanitizer
- stamped_name = '-'.join([name, sanitizer, time_stamp])
- latest_version_file = '-'.join(
- [name, sanitizer, LATEST_VERSION_FILENAME])
- zip_file = stamped_name + '.zip'
- stamped_srcmap_file = stamped_name + '.srcmap.json'
- bucket = build_lib.ENGINE_INFO[fuzzing_engine].upload_bucket
- if architecture != 'x86_64':
- bucket += '-' + architecture
-
- upload_url = build_lib.get_signed_url(
- build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, name, zip_file))
- srcmap_url = build_lib.get_signed_url(
- build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, name,
- stamped_srcmap_file))
- latest_version_url = build_lib.GCS_UPLOAD_URL_FORMAT.format(
- bucket, name, latest_version_file)
- latest_version_url = build_lib.get_signed_url(
- latest_version_url, content_type=LATEST_VERSION_CONTENT_TYPE)
-
- targets_list_filename = build_lib.get_targets_list_filename(sanitizer)
- targets_list_url = build_lib.get_signed_url(
- build_lib.get_targets_list_url(bucket, name, sanitizer))
-
- env.append('OUT=' + out)
- env.append('MSAN_LIBS_PATH=/workspace/msan')
- env.append('ARCHITECTURE=' + architecture)
- env.append('FUZZING_LANGUAGE=' + language)
-
- workdir = workdir_from_dockerfile(dockerfile_lines)
- if not workdir:
- workdir = '/src'
-
- failure_msg = ('*' * 80 + '\nFailed to build.\nTo reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
- 'python infra/helper.py build_fuzzers --sanitizer '
- '{sanitizer} --engine {engine} --architecture '
- '{architecture} {name}\n' + '*' * 80).format(
- name=name,
- sanitizer=sanitizer,
- engine=fuzzing_engine,
- architecture=architecture)
-
- build_steps.append(
- # compile
- {
- 'name':
- image,
- 'env':
- env,
- 'args': [
- 'bash',
- '-c',
- # Remove /out to break loudly when a build script
- # incorrectly uses /out instead of $OUT.
- # `cd /src && cd {workdir}` (where {workdir} is parsed from
- # the Dockerfile). Container Builder overrides our workdir
- # so we need to add this step to set it back.
- ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} '
- '&& compile || (echo "{failure_msg}" && false)'
- ).format(workdir=workdir, out=out, failure_msg=failure_msg),
- ],
- })
-
- if sanitizer == 'memory':
- # Patch dynamic libraries to use instrumented ones.
- build_steps.append({
- 'name':
- 'gcr.io/{0}/msan-libs-builder'.format(base_images_project),
- 'args': [
- 'bash',
- '-c',
- # TODO(ochang): Replace with just patch_build.py once
- # permission in image is fixed.
- 'python /usr/local/bin/patch_build.py {0}'.format(out),
- ],
- })
-
- if run_tests:
- failure_msg = ('*' * 80 + '\nBuild checks failed.\n'
- 'To reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
- 'python infra/helper.py build_fuzzers --sanitizer '
- '{sanitizer} --engine {engine} --architecture '
- '{architecture} {name}\n'
- 'python infra/helper.py check_build --sanitizer '
- '{sanitizer} --engine {engine} --architecture '
- '{architecture} {name}\n' + '*' * 80).format(
- name=name,
- sanitizer=sanitizer,
- engine=fuzzing_engine,
- architecture=architecture)
-
- build_steps.append(
- # test binaries
- {
- 'name':
- 'gcr.io/{0}/base-runner'.format(base_images_project),
- 'env':
- env,
- 'args': [
- 'bash', '-c',
- 'test_all || (echo "{0}" && false)'.format(failure_msg)
- ],
- })
-
- if project_yaml['labels']:
- # write target labels
- build_steps.append({
- 'name':
- image,
- 'env':
- env,
- 'args': [
- '/usr/local/bin/write_labels.py',
- json.dumps(project_yaml['labels']),
- out,
- ],
- })
-
- if sanitizer == 'dataflow' and fuzzing_engine == 'dataflow':
- dataflow_steps = dataflow_post_build_steps(name, env,
- base_images_project)
- if dataflow_steps:
- build_steps.extend(dataflow_steps)
- else:
- sys.stderr.write('Skipping dataflow post build steps.\n')
-
- build_steps.extend([
- # generate targets list
- {
- 'name':
- 'gcr.io/{0}/base-runner'.format(base_images_project),
- 'env':
- env,
- 'args': [
- 'bash',
- '-c',
- 'targets_list > /workspace/{0}'.format(
- targets_list_filename),
- ],
- },
- # zip binaries
- {
- 'name':
- image,
- 'args': [
- 'bash', '-c',
- 'cd {out} && zip -r {zip_file} *'.format(out=out,
- zip_file=zip_file)
- ],
- },
- # upload srcmap
- {
- 'name': 'gcr.io/{0}/uploader'.format(base_images_project),
- 'args': [
- '/workspace/srcmap.json',
- srcmap_url,
- ],
- },
- # upload binaries
- {
- 'name': 'gcr.io/{0}/uploader'.format(base_images_project),
- 'args': [
- os.path.join(out, zip_file),
- upload_url,
- ],
- },
- # upload targets list
- {
- 'name':
- 'gcr.io/{0}/uploader'.format(base_images_project),
- 'args': [
- '/workspace/{0}'.format(targets_list_filename),
- targets_list_url,
- ],
- },
- # upload the latest.version file
- build_lib.http_upload_step(zip_file, latest_version_url,
- LATEST_VERSION_CONTENT_TYPE),
- # cleanup
- {
- 'name': image,
- 'args': [
- 'bash',
- '-c',
- 'rm -r ' + out,
- ],
- },
- ])
-
- return build_steps
-
-
-def dataflow_post_build_steps(project_name, env, base_images_project):
- """Appends dataflow post build steps."""
- steps = build_lib.download_corpora_steps(project_name)
- if not steps:
- return None
-
- steps.append({
- 'name':
- 'gcr.io/{0}/base-runner'.format(base_images_project),
- 'env':
- env + [
- 'COLLECT_DFT_TIMEOUT=2h',
- 'DFT_FILE_SIZE_LIMIT=65535',
- 'DFT_MIN_TIMEOUT=2.0',
- 'DFT_TIMEOUT_RANGE=6.0',
- ],
- 'args': [
- 'bash', '-c',
- ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*}; done && '
- 'collect_dft || (echo "DFT collection failed." && false)')
- ],
- 'volumes': [{
- 'name': 'corpus',
- 'path': '/corpus'
- }],
- })
- return steps
-
-
-def get_logs_url(build_id, image_project='oss-fuzz'):
- """Returns url where logs are displayed for the build."""
- url_format = ('https://console.developers.google.com/logs/viewer?'
- 'resource=build%2Fbuild_id%2F{0}&project={1}')
- return url_format.format(build_id, image_project)
-
-
-# pylint: disable=no-member
-def run_build(build_steps, project_name, tag):
- """Run the build for given steps on cloud build."""
- options = {}
- if 'GCB_OPTIONS' in os.environ:
- options = yaml.safe_load(os.environ['GCB_OPTIONS'])
-
- build_body = {
- 'steps': build_steps,
- 'timeout': str(build_lib.BUILD_TIMEOUT) + 's',
- 'options': options,
- 'logsBucket': GCB_LOGS_BUCKET,
- 'tags': [project_name + '-' + tag,],
- 'queueTtl': str(QUEUE_TTL_SECONDS) + 's',
- }
-
- credentials = GoogleCredentials.get_application_default()
- cloudbuild = build('cloudbuild',
- 'v1',
- credentials=credentials,
- cache_discovery=False)
- build_info = cloudbuild.projects().builds().create(projectId='oss-fuzz',
- body=build_body).execute()
- build_id = build_info['metadata']['build']['id']
-
- print('Logs:', get_logs_url(build_id), file=sys.stderr)
- print(build_id)
-
-
-def main():
- """Build and run projects."""
- if len(sys.argv) != 2:
- usage()
-
- image_project = 'oss-fuzz'
- base_images_project = 'oss-fuzz-base'
- project_dir = sys.argv[1].rstrip(os.path.sep)
- dockerfile_path = os.path.join(project_dir, 'Dockerfile')
- project_yaml_path = os.path.join(project_dir, 'project.yaml')
- project_name = os.path.basename(project_dir)
-
- with open(dockerfile_path) as dockerfile:
- dockerfile_lines = dockerfile.readlines()
-
- with open(project_yaml_path) as project_yaml_file:
- steps = get_build_steps(project_name, project_yaml_file, dockerfile_lines,
- image_project, base_images_project)
-
- run_build(steps, project_name, FUZZING_BUILD_TAG)
-
-
-if __name__ == '__main__':
- main()
diff --git a/infra/gcb/builds_status.py b/infra/gcb/builds_status.py
deleted file mode 100755
index 12bcf672..00000000
--- a/infra/gcb/builds_status.py
+++ /dev/null
@@ -1,282 +0,0 @@
-# Copyright 2020 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-#!/usr/bin/env python2
-"""Upload log files and update build statuses, badges for projects."""
-
-from __future__ import print_function
-from builtins import range
-
-import datetime
-import os
-import sys
-import json
-import time
-
-import dateutil.parser
-from oauth2client.client import GoogleCredentials
-from googleapiclient.discovery import build as gcb_build
-from google.cloud import storage
-
-import build_and_run_coverage
-import build_project
-
-STATUS_BUCKET = 'oss-fuzz-build-logs'
-SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
-BADGE_DIR = 'badges'
-RETRY_COUNT = 3
-RETRY_WAIT = 5
-MAX_BUILD_RESULTS = 2000
-BUILDS_PAGE_SIZE = 256
-BADGE_IMAGE_TYPES = {'svg': 'image/svg+xml', 'png': 'image/png'}
-
-# pylint: disable=invalid-name
-_client = None
-
-
-# pylint: disable=global-statement
-def get_storage_client():
- """Return storage client."""
- global _client
- if not _client:
- _client = storage.Client()
-
- return _client
-
-
-def usage():
- """Output usage syntax."""
- sys.stderr.write('Usage: ' + sys.argv[0] + ' <projects_dir>\n')
- sys.exit(1)
-
-
-# pylint: disable=unused-variable
-def scan_project_names(projects_dir):
- """Scan and return project list from directory."""
- projects = []
- for root, dirs, files in os.walk(projects_dir):
- for file in files:
- if file == 'Dockerfile':
- projects.append(os.path.basename(root))
- return sorted(projects)
-
-
-def upload_status(successes, failures, status_filename):
- """Upload main status page."""
- data = {
- 'projects': failures + successes,
- 'failures': failures,
- 'successes': successes,
- 'last_updated': datetime.datetime.utcnow().ctime()
- }
-
- bucket = get_storage_client().get_bucket(STATUS_BUCKET)
- blob = bucket.blob(status_filename)
- blob.cache_control = 'no-cache'
- blob.upload_from_string(json.dumps(data), content_type='application/json')
-
-
-def is_build_successful(build):
- """Check build success."""
- return build['status'] == 'SUCCESS'
-
-
-def upload_log(build_id):
- """Uploads log file oss-fuzz-build-logs."""
- status_bucket = get_storage_client().get_bucket(STATUS_BUCKET)
- gcb_bucket = get_storage_client().get_bucket(build_project.GCB_LOGS_BUCKET)
- log_name = 'log-{0}.txt'.format(build_id)
- log = gcb_bucket.blob(log_name)
- dest_log = status_bucket.blob(log_name)
-
- if not log.exists():
- print('Failed to find build log {0}'.format(log_name), file=sys.stderr)
- return False
-
- if dest_log.exists():
- return True
-
- gcb_bucket.copy_blob(log, status_bucket)
- return True
-
-
-def find_last_build(builds, project, build_tag_suffix):
- """Find last finished build of project."""
- delay_minutes = 40
- tag = project + '-' + build_tag_suffix
-
- builds = builds.get(tag)
- if not builds:
- print('Failed to find builds with tag {0}'.format(tag), file=sys.stderr)
- return None
-
- for build in builds:
- if build['status'] == 'WORKING':
- continue
-
- if tag not in build['tags']:
- continue
-
- if not 'finishTime' in build:
- continue
-
- finish_time = dateutil.parser.parse(build['finishTime'], ignoretz=True)
- if (datetime.datetime.utcnow() - finish_time >=
- datetime.timedelta(minutes=delay_minutes)):
-
- if not upload_log(build['id']):
- continue
-
- return build
-
- return None
-
-
-def execute_with_retries(request):
- """Execute request with retries."""
- for i in range(RETRY_COUNT + 1):
- try:
- return request.execute()
- except Exception as error:
- print('request failed with {0}, retrying...'.format(str(error)))
- if i < RETRY_COUNT:
- time.sleep(RETRY_WAIT)
- continue
-
- raise
-
-
-def get_builds(cloudbuild):
- """Get a batch of the latest builds (up to MAX_BUILD_RESULTS), grouped by
- tag."""
- ungrouped_builds = []
- next_page_token = None
-
- while True:
- page_size = min(BUILDS_PAGE_SIZE, MAX_BUILD_RESULTS - len(ungrouped_builds))
- response = execute_with_retries(cloudbuild.projects().builds().list(
- projectId='oss-fuzz', pageSize=page_size, pageToken=next_page_token))
-
- if not 'builds' in response:
- print('Invalid response from builds list: {0}'.format(response),
- file=sys.stderr)
- return None
-
- ungrouped_builds.extend(response['builds'])
- if len(ungrouped_builds) >= MAX_BUILD_RESULTS:
- break
-
- next_page_token = response.get('nextPageToken')
-
- builds = {}
- for build in ungrouped_builds:
- for tag in build['tags']:
- builds.setdefault(tag, []).append(build)
-
- return builds
-
-
-def update_build_status(builds, projects, build_tag_suffix, status_filename):
- """Update build statuses of projects as a json to cloud storage."""
- successes = []
- failures = []
-
- for project in projects:
- print(project)
-
- last_build = find_last_build(builds, project, build_tag_suffix)
- if not last_build:
- print('Failed to get build for {0}'.format(project), file=sys.stderr)
- continue
-
- print(last_build['startTime'], last_build['status'], last_build['id'])
- if is_build_successful(last_build):
- successes.append({
- 'name': project,
- 'build_id': last_build['id'],
- 'finish_time': last_build['finishTime'],
- 'success': True,
- })
- else:
- failures.append({
- 'name': project,
- 'build_id': last_build['id'],
- 'finish_time': last_build['finishTime'],
- 'success': False,
- })
-
- upload_status(successes, failures, status_filename)
-
-
-def update_build_badges(builds, projects, build_tag, coverage_tag):
- """Update build badges of projects."""
- for project in projects:
- last_build = find_last_build(builds, project, build_tag)
- last_coverage_build = find_last_build(builds, project, coverage_tag)
- if not last_build or not last_coverage_build:
- continue
-
- badge = 'building'
- if not is_build_successful(last_coverage_build):
- badge = 'coverage_failing'
- if not is_build_successful(last_build):
- badge = 'failing'
-
- print("[badge] {}: {}".format(project, badge))
-
- for extension, mime_type in BADGE_IMAGE_TYPES.items():
- badge_name = '{badge}.{extension}'.format(badge=badge,
- extension=extension)
- # Retrieve the image relative to this script's location
- badge_file = os.path.join(SCRIPT_DIR, 'badge_images', badge_name)
-
- # The uploaded blob name should look like `badges/project.png`
- blob_name = '{badge_dir}/{project_name}.{extension}'.format(
- badge_dir=BADGE_DIR, project_name=project, extension=extension)
-
- status_bucket = get_storage_client().get_bucket(STATUS_BUCKET)
- badge_blob = status_bucket.blob(blob_name)
- badge_blob.upload_from_filename(badge_file, content_type=mime_type)
-
-
-def main():
- """Update build statuses and badges."""
- if len(sys.argv) != 2:
- usage()
-
- projects_dir = sys.argv[1]
- projects = scan_project_names(projects_dir)
-
- credentials = GoogleCredentials.get_application_default()
- cloudbuild = gcb_build('cloudbuild', 'v1', credentials=credentials)
-
- builds = get_builds(cloudbuild)
- update_build_status(builds,
- projects,
- build_project.FUZZING_BUILD_TAG,
- status_filename='status.json')
- update_build_status(builds,
- projects,
- build_and_run_coverage.COVERAGE_BUILD_TAG,
- status_filename='status-coverage.json')
-
- update_build_badges(builds,
- projects,
- build_tag=build_project.FUZZING_BUILD_TAG,
- coverage_tag=build_and_run_coverage.COVERAGE_BUILD_TAG)
-
-
-if __name__ == '__main__':
- main()
diff --git a/infra/gcb/cancel.py b/infra/gcb/cancel.py
deleted file mode 100755
index 8393a514..00000000
--- a/infra/gcb/cancel.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/python2
-"""Cancels project build on Google Cloud Builder.
-
-Usage: cancel.py <build_id>
-"""
-
-import base64
-import collections
-import datetime
-import os
-import subprocess
-import sys
-import time
-import urllib
-import yaml
-
-from oauth2client.client import GoogleCredentials
-from googleapiclient.discovery import build
-
-
-def usage():
- sys.stderr.write('Usage: ' + sys.argv[0] + ' <build_id>\n')
- exit(1)
-
-
-def main():
- if len(sys.argv) != 2:
- usage()
-
- build_id = sys.argv[1]
-
- credentials = GoogleCredentials.get_application_default()
- cloudbuild = build('cloudbuild', 'v1', credentials=credentials)
- print cloudbuild.projects().builds().cancel(projectId='oss-fuzz',
- id=build_id,
- body={}).execute()
-
-
-if __name__ == '__main__':
- main()
diff --git a/infra/gcb/jenkins_config/base_job.xml b/infra/gcb/jenkins_config/base_job.xml
deleted file mode 100644
index fa90aa47..00000000
--- a/infra/gcb/jenkins_config/base_job.xml
+++ /dev/null
@@ -1,62 +0,0 @@
-<?xml version='1.0' encoding='UTF-8'?>
-<project>
- <actions/>
- <description></description>
- <keepDependencies>false</keepDependencies>
- <properties/>
- <scm class="hudson.plugins.git.GitSCM" plugin="git@3.1.0">
- <configVersion>2</configVersion>
- <userRemoteConfigs>
- <hudson.plugins.git.UserRemoteConfig>
- <url>https://github.com/google/oss-fuzz.git</url>
- </hudson.plugins.git.UserRemoteConfig>
- </userRemoteConfigs>
- <branches>
- <hudson.plugins.git.BranchSpec>
- <name>*/master</name>
- </hudson.plugins.git.BranchSpec>
- </branches>
- <doGenerateSubmoduleConfigurations>false</doGenerateSubmoduleConfigurations>
- <submoduleCfg class="list"/>
- <extensions>
- <hudson.plugins.git.extensions.impl.RelativeTargetDirectory>
- <relativeTargetDir>oss-fuzz</relativeTargetDir>
- </hudson.plugins.git.extensions.impl.RelativeTargetDirectory>
- </extensions>
- </scm>
- <canRoam>true</canRoam>
- <disabled>false</disabled>
- <blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
- <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
- <triggers>
- <jenkins.triggers.ReverseBuildTrigger>
- <spec/>
- <upstreamProjects>infra/base-images</upstreamProjects>
- <threshold>
- <name>SUCCESS</name>
- <ordinal>0</ordinal>
- <color>BLUE</color>
- <completeBuild>true</completeBuild>
- </threshold>
- </jenkins.triggers.ReverseBuildTrigger>
- </triggers>
- <concurrentBuild>false</concurrentBuild>
- <builders>
- <hudson.tasks.Shell>
- <command>#!/bin/bash -eux
-
-virtualenv ENV
-set +o nounset
-. ENV/bin/activate
-set -o nounset
-
-cd $WORKSPACE/oss-fuzz/infra/gcb
-pip install -r requirements.txt
-build_id=$(python build_project.py $WORKSPACE/oss-fuzz/$JOB_NAME)
-python wait_for_build.py $build_id
-</command>
- </hudson.tasks.Shell>
- </builders>
- <publishers/>
- <buildWrappers/>
-</project>
diff --git a/infra/gcb/jenkins_config/coverage_job.xml b/infra/gcb/jenkins_config/coverage_job.xml
deleted file mode 100644
index be5cb829..00000000
--- a/infra/gcb/jenkins_config/coverage_job.xml
+++ /dev/null
@@ -1,60 +0,0 @@
-<?xml version='1.0' encoding='UTF-8'?>
-<project>
- <actions/>
- <description></description>
- <keepDependencies>false</keepDependencies>
- <properties/>
- <scm class="hudson.plugins.git.GitSCM" plugin="git@3.1.0">
- <configVersion>2</configVersion>
- <userRemoteConfigs>
- <hudson.plugins.git.UserRemoteConfig>
- <url>https://github.com/google/oss-fuzz.git</url>
- </hudson.plugins.git.UserRemoteConfig>
- </userRemoteConfigs>
- <branches>
- <hudson.plugins.git.BranchSpec>
- <name>*/master</name>
- </hudson.plugins.git.BranchSpec>
- </branches>
- <doGenerateSubmoduleConfigurations>false</doGenerateSubmoduleConfigurations>
- <submoduleCfg class="list"/>
- <extensions>
- <hudson.plugins.git.extensions.impl.RelativeTargetDirectory>
- <relativeTargetDir>oss-fuzz</relativeTargetDir>
- </hudson.plugins.git.extensions.impl.RelativeTargetDirectory>
- </extensions>
- </scm>
- <canRoam>true</canRoam>
- <disabled>false</disabled>
- <blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
- <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
- <triggers>
- <hudson.triggers.TimerTrigger>
- <spec>H 6 * * *</spec>
- </hudson.triggers.TimerTrigger>
- </triggers>
- <concurrentBuild>false</concurrentBuild>
- <builders>
- <hudson.tasks.Shell>
- <command>#!/bin/bash -eux
-
-virtualenv ENV
-set +o nounset
-. ENV/bin/activate
-set -o nounset
-
-cd $WORKSPACE/oss-fuzz/infra/gcb
-pip install -r requirements.txt
-project_dir=$WORKSPACE/oss-fuzz/projects/$(basename $JOB_NAME)
-build_id=$(python build_and_run_coverage.py $project_dir)
-if [[ "$build_id" == "0" ]]; then
- echo "Intentionally skipping code coverage job."
-else
- python wait_for_build.py $build_id
-fi
-</command>
- </hudson.tasks.Shell>
- </builders>
- <publishers/>
- <buildWrappers/>
-</project>
diff --git a/infra/gcb/requirements.txt b/infra/gcb/requirements.txt
deleted file mode 100644
index faaf9eb1..00000000
--- a/infra/gcb/requirements.txt
+++ /dev/null
@@ -1,36 +0,0 @@
-cachetools==2.1.0
-certifi==2018.4.16
-chardet==3.0.4
-enum34==1.1.6
-future==0.18.2
-futures==3.2.0
-google-api-core==1.2.0
-google-api-python-client==1.7.0
-google-auth==1.5.0
-google-auth-httplib2==0.0.3
-google-cloud-core==0.28.1
-google-cloud-logging==1.6.0
-google-cloud-pubsub==0.35.2
-google-cloud-storage==1.10.0
-google-resumable-media==0.3.1
-googleapis-common-protos==1.5.3
-grpc-google-iam-v1==0.11.4
-grpcio==1.12.0
-httplib2==0.18.0
-idna==2.6
-Jinja2==2.10.1
-MarkupSafe==1.0
-multi-key-dict==2.0.3
-oauth2client==4.1.2
-pbr==4.0.3
-protobuf==3.5.2.post1
-pyasn1==0.4.3
-pyasn1-modules==0.2.1
-python-dateutil==2.7.3
-python-jenkins==1.0.0
-pytz==2018.4
-PyYAML==5.1
-requests==2.21.0
-rsa==3.4.2
-six==1.11.0
-uritemplate==3.0.0
diff --git a/infra/gcb/sync.py b/infra/gcb/sync.py
deleted file mode 100755
index bbf321e1..00000000
--- a/infra/gcb/sync.py
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/usr/bin/env python
-"""Script to sync CF and Jenkins jobs."""
-
-import json
-import os
-import re
-import sys
-import yaml
-
-import jenkins
-
-JENKINS_SERVER = ('localhost', 8080)
-
-JOB_TEMPLATES = [
- {'prefix': 'projects/', 'config': 'base_job.xml'},
-]
-
-SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
-OSSFUZZ_DIR = os.path.dirname(os.path.dirname(SCRIPT_DIR))
-
-VALID_PROJECT_NAME = re.compile(r'^[a-zA-Z0-9_-]+$')
-
-
-def main():
- # Connect to jenkins server.
- jenkins_login = get_jenkins_login()
- server = jenkins.Jenkins(
- 'http://%s:%d' % JENKINS_SERVER,
- username=jenkins_login[0],
- password=jenkins_login[1])
-
- for project in get_projects():
- print 'syncing configs for', project
- try:
- # Create/update jenkins build job.
- sync_jenkins_job(server, project)
-
- except Exception as e:
- print >> sys.stderr, 'Failed to setup job with exception', e
-
-
-def _has_dockerfile(project_dir):
- """Whether or not the project has a Dockerfile."""
- if os.path.exists(os.path.join(project_dir, 'Dockerfile')):
- return True
-
- project_yaml_path = os.path.join(project_dir, 'project.yaml')
- if not os.path.exists(project_yaml_path):
- return False
-
- with open(project_yaml_path) as f:
- project_info = yaml.safe_load(f)
-
- return 'dockerfile' in project_info
-
-
-def get_projects():
- """Return list of projects for oss-fuzz."""
- projects = []
- projects_dir = os.path.join(OSSFUZZ_DIR, 'projects')
- for name in os.listdir(projects_dir):
- full_path = os.path.join(projects_dir, name)
- if not os.path.isdir(full_path) or not _has_dockerfile(full_path):
- continue
-
- if not VALID_PROJECT_NAME.match(name):
- print >> sys.stderr, 'Invalid project name:', name
- continue
-
- projects.append(name)
-
- if not projects:
- print >> sys.stderr, 'No projects found.'
-
- return projects
-
-
-def get_jenkins_login():
- """Returns (username, password) for jenkins."""
- username = os.getenv('JENKINS_USER')
- password = os.getenv('JENKINS_PASS')
-
- return username, password
-
-
-def sync_jenkins_job(server, project):
- """Sync the config with jenkins."""
- project_yaml = os.path.join(OSSFUZZ_DIR, 'projects', project, 'project.yaml')
- with open(project_yaml, 'r') as f:
- project_json_string = json.dumps(json.dumps(yaml.safe_load(f)))
-
- for job in JOB_TEMPLATES:
- job_name = job['prefix'] + project
- with open(os.path.join(SCRIPT_DIR, 'jenkins_config', job['config'])) as f:
- job_config_xml = f.read()
-
- if server.job_exists(job_name):
- server.reconfig_job(job_name, job_config_xml)
- else:
- server.create_job(job_name, job_config_xml)
- server.build_job(job_name)
-
-
-if __name__ == '__main__':
- main()
diff --git a/infra/gcb/templates/deploy.sh b/infra/gcb/templates/deploy.sh
deleted file mode 100755
index 36aa8716..00000000
--- a/infra/gcb/templates/deploy.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-gsutil -h "Cache-Control:no-cache,max-age=0" -m cp -r bower_components index.html src manifest.json gs://oss-fuzz-build-logs
diff --git a/infra/gcb/wait_for_build.py b/infra/gcb/wait_for_build.py
deleted file mode 100755
index e6103154..00000000
--- a/infra/gcb/wait_for_build.py
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/python2
-"""Waits for project build on Google Cloud Builder.
-
-Usage: wait_for_build.py <build_id>
-"""
-
-import argparse
-import sys
-import time
-import datetime
-
-from googleapiclient.discovery import build
-from oauth2client.client import GoogleCredentials
-
-POLL_INTERVAL = 15
-cloudbuild = None
-
-
-def _print(msg):
- # Print helper writing to stdout and instantly flushing it to ensure the
- # output is visible in Jenkins console viewer as soon as possible.
- sys.stdout.write(msg)
- sys.stdout.write('\n')
- sys.stdout.flush()
-
-
-def get_build(build_id, cloudbuild, project):
- return cloudbuild.projects().builds().get(projectId=project,
- id=build_id).execute()
-
-
-def wait_for_build(build_id, project):
- DONE_STATUSES = [
- 'SUCCESS',
- 'FAILURE',
- 'INTERNAL_ERROR',
- 'CANCELLED',
- 'TIMEOUT',
- ]
-
- status = None
- while True:
- build_info = get_build(build_id, cloudbuild, project)
-
- current_status = build_info['status']
- if current_status != status:
- _print('%s %s' % (str(datetime.datetime.now()), current_status))
- status = current_status
- if status in DONE_STATUSES:
- return status == 'SUCCESS'
-
- time.sleep(POLL_INTERVAL)
-
-
-def main():
- global cloudbuild
-
- parser = argparse.ArgumentParser(description='Wait for build to complete')
- parser.add_argument('-p',
- '--project',
- help='Cloud Project',
- default='oss-fuzz')
- parser.add_argument('build_id', help='The Container Builder build ID.')
-
- args = parser.parse_args()
-
- credentials = GoogleCredentials.get_application_default()
- cloudbuild = build('cloudbuild', 'v1', credentials=credentials)
-
- if wait_for_build(args.build_id, args.project):
- return
-
- _print('The build failed. Retrying the same build one more time.')
- retry_info = cloudbuild.projects().builds().retry(projectId=args.project,
- id=args.build_id).execute()
- new_build_id = retry_info['metadata']['build']['id']
- if not wait_for_build(new_build_id, args.project):
- sys.exit(1)
-
-
-if __name__ == '__main__':
- main()
diff --git a/infra/jenkins-cluster/deployment-jenkins.yaml b/infra/jenkins-cluster/deployment-jenkins.yaml
deleted file mode 100644
index 6ff526d8..00000000
--- a/infra/jenkins-cluster/deployment-jenkins.yaml
+++ /dev/null
@@ -1,60 +0,0 @@
-apiVersion: extensions/v1beta1
-kind: Deployment
-metadata:
- name: jenkins-master
-spec:
- replicas: 1
- template:
- metadata:
- name: jenkins-master
- labels:
- app: jenkins-master
- spec:
- containers:
- - name: jenkins
- image: gcr.io/oss-fuzz-base/jenkins:20170726v4
- env:
- - name: JAVA_OPTS
- value: "-Duser.timezone=America/Los_Angeles -Dhudson.security.csrf.requestfield=Jenkins-Crumb -Xmx16g -Djenkins.security.ApiTokenProperty.showTokenToAdmins=true"
- ports:
- - containerPort: 8080
- - containerPort: 50000
- volumeMounts:
- - name: jenkins-home
- mountPath: "/var/jenkins_home"
- - name: secrets
- mountPath: "/var/secrets"
- readOnly: true
- livenessProbe:
- httpGet:
- path: "/login"
- port: 8080
- initialDelaySeconds: 60
- timeoutSeconds: 15
- resources:
- requests:
- memory: "4Gi"
- cpu: "4"
- - name: kubectl-proxy
- image: lachlanevenson/k8s-kubectl
- args:
- - "proxy"
- - "-p"
- - "8081"
- volumes:
- - name: jenkins-home
- gcePersistentDisk:
- pdName: jenkins-home
- fsType: ext4
- - name: secrets
- secret:
- secretName: secrets
- items:
- - key: build-service-account.json
- path: build-service-account.json
- - key: base-build-service-account.json
- path: base-build-service-account.json
- - key: tls-cert
- path: cert.pem
- - key: tls-key
- path: cert.key
diff --git a/infra/jenkins-cluster/ingress-jenkins-https.yaml b/infra/jenkins-cluster/ingress-jenkins-https.yaml
deleted file mode 100644
index 396a9b6f..00000000
--- a/infra/jenkins-cluster/ingress-jenkins-https.yaml
+++ /dev/null
@@ -1,10 +0,0 @@
-apiVersion: extensions/v1beta1
-kind: Ingress
-metadata:
- name: jenkins-https-ingress
-spec:
- backend:
- serviceName: jenkins-master
- servicePort: 8080
- tls:
- - secretName: tls
diff --git a/infra/jenkins-cluster/server/Dockerfile b/infra/jenkins-cluster/server/Dockerfile
deleted file mode 100644
index c164a0c9..00000000
--- a/infra/jenkins-cluster/server/Dockerfile
+++ /dev/null
@@ -1,19 +0,0 @@
-FROM jenkins/jenkins:lts
-USER root
-
-RUN mkdir /var/secrets
-RUN apt-get -y update && apt-get -y upgrade && apt-get -y install python-dev virtualenv python-pip build-essential
-
-WORKDIR /
-RUN wget https://dl.google.com/dl/cloudsdk/release/google-cloud-sdk.zip
-RUN unzip google-cloud-sdk.zip
-
-RUN /google-cloud-sdk/install.sh --usage-reporting=false --bash-completion=false --disable-installation-options
-RUN /google-cloud-sdk/bin/gcloud -q components install alpha beta
-RUN /google-cloud-sdk/bin/gcloud -q components update
-
-RUN chown -R jenkins:jenkins /google-cloud-sdk
-
-USER jenkins
-ENV JENKINS_OPTS --httpPort=8080 --httpsPort=8082 --httpsCertificate=/var/secrets/cert.pem --httpsPrivateKey=/var/secrets/cert.key
-ENV PATH=$PATH:/google-cloud-sdk/bin
diff --git a/infra/jenkins-cluster/service-jenkins-master.yaml b/infra/jenkins-cluster/service-jenkins-master.yaml
deleted file mode 100644
index f68dc2ce..00000000
--- a/infra/jenkins-cluster/service-jenkins-master.yaml
+++ /dev/null
@@ -1,16 +0,0 @@
-apiVersion: v1
-kind: Service
-metadata:
- name: jenkins-master
- labels:
- app: jenkins-master
-spec:
- type: NodePort
- selector:
- app: jenkins-master
- ports:
- - name: agent
- port: 50000
- - name: http
- port: 8080
- targetPort: 8080