aboutsummaryrefslogtreecommitdiffhomepage
path: root/infra
diff options
context:
space:
mode:
authorGravatar kabeer27 <32016558+kabeer27@users.noreply.github.com>2020-07-16 01:11:18 +0000
committerGravatar GitHub <noreply@github.com>2020-07-16 11:11:18 +1000
commitf0d54c33cbf22d32cebe447d636ce432cb3e4325 (patch)
tree1d7885d3722b9024bb1fdf0bfc69a136ac09ff57 /infra
parent21ea9d6e3dcd85d40efd8943c3fa0e75eae09fd2 (diff)
Refactoring and request build cloud function (#4120)
Co-authored-by: Kabeer Seth <kabeerseth@google.com>
Diffstat (limited to 'infra')
l---------infra/build/functions/base_image/build_base_images.py1
-rwxr-xr-xinfra/build/functions/base_image/deploy.sh55
l---------infra/build/functions/base_image/requirements.txt1
-rw-r--r--infra/build/functions/base_images.py (renamed from infra/build/functions/base_image/main.py)0
l---------infra/build/functions/build_base_images.py1
l---------infra/build/functions/build_lib.py1
l---------infra/build/functions/build_project.py1
-rw-r--r--infra/build/functions/datastore_entities.py32
-rwxr-xr-xinfra/build/functions/deploy.sh112
-rw-r--r--infra/build/functions/expected_build_steps.json330
-rw-r--r--infra/build/functions/main.py35
-rw-r--r--infra/build/functions/request_build.py87
-rw-r--r--infra/build/functions/request_build_test.py93
-rw-r--r--infra/build/functions/sync.py (renamed from infra/build/functions/sync/main.py)21
-rwxr-xr-xinfra/build/functions/sync/deploy.sh64
l---------infra/build/functions/sync/requirements.txt1
-rw-r--r--infra/build/functions/sync_test.py (renamed from infra/build/functions/sync/main_test.py)97
-rw-r--r--infra/build/functions/test_utils.py84
-rw-r--r--infra/gcb/build_and_run_coverage.py67
-rw-r--r--infra/gcb/build_lib.py57
-rw-r--r--infra/gcb/build_project.py141
21 files changed, 983 insertions, 298 deletions
diff --git a/infra/build/functions/base_image/build_base_images.py b/infra/build/functions/base_image/build_base_images.py
deleted file mode 120000
index 7541a4f9..00000000
--- a/infra/build/functions/base_image/build_base_images.py
+++ /dev/null
@@ -1 +0,0 @@
-../../../gcb/build_base_images.py \ No newline at end of file
diff --git a/infra/build/functions/base_image/deploy.sh b/infra/build/functions/base_image/deploy.sh
deleted file mode 100755
index ab982e68..00000000
--- a/infra/build/functions/base_image/deploy.sh
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2020 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-
-JOB_TOPIC=schedule-base-image-build
-SCHEDULER_JOB=base-image-scheduler
-JOB_SCHEDULE="0 3 * * *"
-MESSAGE="Start base image build"
-ENTRY_POINT=base_builder
-
-if [ "$1" ]; then
- PROJECT_ID=$1
-else
- echo -e "\n Usage ./deploy.sh my-project-name"; exit;
-fi
-
-# Checking if the given pubsub topic exists
-if ! gcloud pubsub topics describe $JOB_TOPIC --project $PROJECT_ID ;
- then
- gcloud pubsub topics create $JOB_TOPIC \
- --project $PROJECT_ID
-fi
-# Checking if the given scheduler job exists
-if gcloud scheduler jobs describe $SCHEDULER_JOB --project $PROJECT_ID ;
- then
- gcloud scheduler jobs update pubsub $SCHEDULER_JOB \
- --schedule "$JOB_SCHEDULE" \
- --topic $JOB_TOPIC \
- --message-body "$MESSAGE" \
- --project $PROJECT_ID
- else
- gcloud scheduler jobs create pubsub $SCHEDULER_JOB \
- --schedule "$JOB_SCHEDULE" \
- --topic $JOB_TOPIC \
- --message-body "$MESSAGE" \
- --project $PROJECT_ID
-fi
-
-gcloud functions deploy base-image-build \
- --entry-point $ENTRY_POINT \
- --trigger-topic $JOB_TOPIC \
- --runtime python37 \
- --project $PROJECT_ID
diff --git a/infra/build/functions/base_image/requirements.txt b/infra/build/functions/base_image/requirements.txt
deleted file mode 120000
index dc833dd4..00000000
--- a/infra/build/functions/base_image/requirements.txt
+++ /dev/null
@@ -1 +0,0 @@
-../requirements.txt \ No newline at end of file
diff --git a/infra/build/functions/base_image/main.py b/infra/build/functions/base_images.py
index f3f13224..f3f13224 100644
--- a/infra/build/functions/base_image/main.py
+++ b/infra/build/functions/base_images.py
diff --git a/infra/build/functions/build_base_images.py b/infra/build/functions/build_base_images.py
new file mode 120000
index 00000000..b048e4dc
--- /dev/null
+++ b/infra/build/functions/build_base_images.py
@@ -0,0 +1 @@
+../../gcb/build_base_images.py \ No newline at end of file
diff --git a/infra/build/functions/build_lib.py b/infra/build/functions/build_lib.py
new file mode 120000
index 00000000..3a075303
--- /dev/null
+++ b/infra/build/functions/build_lib.py
@@ -0,0 +1 @@
+../../gcb/build_lib.py \ No newline at end of file
diff --git a/infra/build/functions/build_project.py b/infra/build/functions/build_project.py
new file mode 120000
index 00000000..451e86b3
--- /dev/null
+++ b/infra/build/functions/build_project.py
@@ -0,0 +1 @@
+../../gcb/build_project.py \ No newline at end of file
diff --git a/infra/build/functions/datastore_entities.py b/infra/build/functions/datastore_entities.py
new file mode 100644
index 00000000..f8389112
--- /dev/null
+++ b/infra/build/functions/datastore_entities.py
@@ -0,0 +1,32 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+"""Cloud datastore entity classes."""
+from google.cloud import ndb
+
+
+# pylint: disable=too-few-public-methods
+class Project(ndb.Model):
+ """Represents an integrated OSS-Fuzz project."""
+ name = ndb.StringProperty()
+ schedule = ndb.StringProperty()
+ project_yaml_contents = ndb.TextProperty()
+ dockerfile_contents = ndb.TextProperty()
+
+
+# pylint: disable=too-few-public-methods
+class GitAuth(ndb.Model):
+ """Represents Github access token entity."""
+ access_token = ndb.StringProperty()
diff --git a/infra/build/functions/deploy.sh b/infra/build/functions/deploy.sh
new file mode 100755
index 00000000..f7c68d96
--- /dev/null
+++ b/infra/build/functions/deploy.sh
@@ -0,0 +1,112 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+BASE_IMAGE_JOB_TOPIC=schedule-base-image-build
+BASE_IMAGE_SCHEDULER_JOB=base-image-scheduler
+BASE_IMAGE_SCHEDULE="0 3 * * *"
+BASE_IMAGE_MESSAGE="Start base image build"
+
+BUILD_JOB_TOPIC=request-build
+
+SYNC_JOB_TOPIC=schedule-project-sync
+SYNC_SCHEDULER_JOB=sync-scheduler
+SYNC_JOB_SCHEDULE="*/30 * * * *"
+SYNC_MESSAGE="Start Sync"
+
+function deploy_pubsub_topic {
+ topic=$1
+ project=$2
+
+ if ! gcloud pubsub topics describe $topic --project $project ;
+ then
+ gcloud pubsub topics create $topic \
+ --project $project
+ fi
+}
+
+function deploy_scheduler {
+ scheduler_name=$1
+ schedule="$2"
+ topic=$3
+ message="$4"
+ project=$5
+
+ if gcloud scheduler jobs describe $scheduler_name --project $project ;
+ then
+ gcloud scheduler jobs update pubsub $scheduler_name \
+ --schedule "$schedule" \
+ --topic $topic \
+ --message-body "$message" \
+ --project $project
+ else
+ gcloud scheduler jobs create pubsub $scheduler_name \
+ --schedule "$schedule" \
+ --topic $topic \
+ --message-body "$message" \
+ --project $project
+ fi
+}
+
+function deploy_cloud_function {
+ name=$1
+ entry_point=$2
+ topic=$3
+ project=$4
+
+ gcloud functions deploy $name \
+ --entry-point $entry_point \
+ --trigger-topic topic \
+ --runtime python37 \
+ --project $project \
+ --timeout 540
+}
+
+if [ "$1" ]; then
+ PROJECT_ID=$1
+else
+ echo -e "\n Usage ./deploy.sh my-project-name"; exit;
+fi
+
+deploy_pubsub_topic $BUILD_JOB_TOPIC $PROJECT_ID
+deploy_pubsub_topic $SYNC_JOB_TOPIC $PROJECT_ID
+deploy_pubsub_topic $BASE_IMAGE_JOB_TOPIC $PROJECT_ID
+
+deploy_scheduler $SYNC_SCHEDULER_JOB \
+ "$SYNC_JOB_SCHEDULE" \
+ $SYNC_JOB_TOPIC \
+ "$SYNC_MESSAGE" \
+ $PROJECT_ID
+
+deploy_scheduler $BASE_IMAGE_SCHEDULER_JOB \
+ "$BASE_IMAGE SCHEDULE" \
+ $BASE_IMAGE_JOB_TOPIC \
+ "$BASE_IMAGE_MESSAGE" \
+ $PROJECT_ID
+
+deploy_cloud_function sync \
+ project_sync \
+ $SYNC_JOB_TOPIC \
+ $PROJECT_ID
+
+deploy_cloud_function base-image-build \
+ build_base_images \
+ $BASE_IMAGE_JOB_TOPIC \
+ $PROJECT_ID
+
+deploy_cloud_function request-build \
+ build_project \
+ $BUILD_JOB_TOPIC \
+ $PROJECT_ID
diff --git a/infra/build/functions/expected_build_steps.json b/infra/build/functions/expected_build_steps.json
new file mode 100644
index 00000000..a2d8bb95
--- /dev/null
+++ b/infra/build/functions/expected_build_steps.json
@@ -0,0 +1,330 @@
+[
+ {
+ "args": [
+ "clone",
+ "https://github.com/google/oss-fuzz.git"
+ ],
+ "name": "gcr.io/cloud-builders/git"
+ },
+ {
+ "name": "gcr.io/cloud-builders/docker",
+ "args": [
+ "build",
+ "-t",
+ "gcr.io/oss-fuzz/test-project",
+ "."
+ ],
+ "dir": "oss-fuzz/projects/test-project"
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "srcmap > /workspace/srcmap.json && cat /workspace/srcmap.json"
+ ],
+ "env": [
+ "OSSFUZZ_REVISION=$REVISION_ID",
+ "FUZZING_LANGUAGE=c++"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/msan-builder",
+ "args": [
+ "bash",
+ "-c",
+ "cp -r /msan /workspace"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "env": [
+ "FUZZING_ENGINE=libfuzzer",
+ "SANITIZER=address",
+ "OUT=/workspace/out/address",
+ "MSAN_LIBS_PATH=/workspace/msan",
+ "ARCHITECTURE=x86_64",
+ "FUZZING_LANGUAGE=c++"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/address && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "FUZZING_ENGINE=libfuzzer",
+ "SANITIZER=address",
+ "OUT=/workspace/out/address",
+ "MSAN_LIBS_PATH=/workspace/msan",
+ "ARCHITECTURE=x86_64",
+ "FUZZING_LANGUAGE=c++"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "test_all || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "FUZZING_ENGINE=libfuzzer",
+ "SANITIZER=address",
+ "OUT=/workspace/out/address",
+ "MSAN_LIBS_PATH=/workspace/msan",
+ "ARCHITECTURE=x86_64",
+ "FUZZING_LANGUAGE=c++"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "targets_list > /workspace/targets.list.address"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "cd /workspace/out/address && zip -r test-project-address-202001010000.zip *"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/srcmap.json",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/out/address/test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/targets.list.address",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/cloud-builders/curl",
+ "args": [
+ "-H",
+ "Content-Type: text/plain",
+ "-X",
+ "PUT",
+ "-d",
+ "test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /workspace/out/address"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "env": [
+ "FUZZING_ENGINE=afl",
+ "SANITIZER=address",
+ "OUT=/workspace/out/address",
+ "MSAN_LIBS_PATH=/workspace/msan",
+ "ARCHITECTURE=x86_64",
+ "FUZZING_LANGUAGE=c++"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/address && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine afl --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "FUZZING_ENGINE=afl",
+ "SANITIZER=address",
+ "OUT=/workspace/out/address",
+ "MSAN_LIBS_PATH=/workspace/msan",
+ "ARCHITECTURE=x86_64",
+ "FUZZING_LANGUAGE=c++"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "test_all || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine afl --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine afl --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "FUZZING_ENGINE=afl",
+ "SANITIZER=address",
+ "OUT=/workspace/out/address",
+ "MSAN_LIBS_PATH=/workspace/msan",
+ "ARCHITECTURE=x86_64",
+ "FUZZING_LANGUAGE=c++"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "targets_list > /workspace/targets.list.address"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "cd /workspace/out/address && zip -r test-project-address-202001010000.zip *"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/srcmap.json",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/out/address/test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/targets.list.address",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/cloud-builders/curl",
+ "args": [
+ "-H",
+ "Content-Type: text/plain",
+ "-X",
+ "PUT",
+ "-d",
+ "test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /workspace/out/address"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "env": [
+ "FUZZING_ENGINE=honggfuzz",
+ "SANITIZER=address",
+ "OUT=/workspace/out/address",
+ "MSAN_LIBS_PATH=/workspace/msan",
+ "ARCHITECTURE=x86_64",
+ "FUZZING_LANGUAGE=c++"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/address && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine honggfuzz --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "FUZZING_ENGINE=honggfuzz",
+ "SANITIZER=address",
+ "OUT=/workspace/out/address",
+ "MSAN_LIBS_PATH=/workspace/msan",
+ "ARCHITECTURE=x86_64",
+ "FUZZING_LANGUAGE=c++"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "test_all || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine honggfuzz --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine honggfuzz --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "FUZZING_ENGINE=honggfuzz",
+ "SANITIZER=address",
+ "OUT=/workspace/out/address",
+ "MSAN_LIBS_PATH=/workspace/msan",
+ "ARCHITECTURE=x86_64",
+ "FUZZING_LANGUAGE=c++"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "targets_list > /workspace/targets.list.address"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "cd /workspace/out/address && zip -r test-project-address-202001010000.zip *"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/srcmap.json",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/out/address/test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/targets.list.address",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/cloud-builders/curl",
+ "args": [
+ "-H",
+ "Content-Type: text/plain",
+ "-X",
+ "PUT",
+ "-d",
+ "test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /workspace/out/address"
+ ]
+ }
+] \ No newline at end of file
diff --git a/infra/build/functions/main.py b/infra/build/functions/main.py
new file mode 100644
index 00000000..04fb024a
--- /dev/null
+++ b/infra/build/functions/main.py
@@ -0,0 +1,35 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+"""Cloud functions for build infrastructure."""
+
+import base_images
+import sync
+import request_build
+
+
+def build_project(event, context):
+ """Entry point for cloud function to requesting project builds."""
+ request_build.request_build(event, context)
+
+
+def project_sync(event, context):
+ """Entry point for cloud function that syncs projects from github."""
+ sync.sync(event, context)
+
+
+def build_base_images(event, context):
+ """Entry point for cloud function that builds base images."""
+ base_images.base_builder(event, context)
diff --git a/infra/build/functions/request_build.py b/infra/build/functions/request_build.py
new file mode 100644
index 00000000..2a76d69e
--- /dev/null
+++ b/infra/build/functions/request_build.py
@@ -0,0 +1,87 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+"""Cloud function to request builds."""
+import base64
+import logging
+import sys
+
+import google.auth
+from googleapiclient.discovery import build
+from google.cloud import ndb
+
+import build_lib
+import build_project
+from datastore_entities import Project
+
+BASE_PROJECT = 'oss-fuzz-base'
+
+
+def get_project_data(project_name):
+ """Retrieve project metadata from datastore."""
+ with ndb.Client().context():
+ query = Project.query(Project.name == project_name)
+ project = query.get()
+ if project is None:
+ raise RuntimeError(
+ 'Project {0} not available in cloud datastore'.format(project_name))
+ project_yaml_contents = project.project_yaml_contents
+ dockerfile_lines = project.dockerfile_contents.split('\n')
+
+ return (project_yaml_contents, dockerfile_lines)
+
+
+def get_build_steps(project_name, image_project, base_images_project):
+ """Retrieve build steps."""
+ project_yaml_contents, dockerfile_lines = get_project_data(project_name)
+ build_steps = build_project.get_build_steps(project_name,
+ project_yaml_contents,
+ dockerfile_lines, image_project,
+ base_images_project)
+ return build_steps
+
+
+# pylint: disable=no-member
+def request_build(event, context):
+ """Entry point for cloud function to request builds."""
+ del context #unused
+ if 'data' in event:
+ project_name = base64.b64decode(event['data']).decode('utf-8')
+ else:
+ logging.error('Project name missing from payload')
+ sys.exit(1)
+
+ credentials, image_project = google.auth.default()
+ build_steps = get_build_steps(project_name, image_project, BASE_PROJECT)
+
+ build_body = {
+ 'steps': build_steps,
+ 'timeout': str(build_lib.BUILD_TIMEOUT) + 's',
+ 'options': {
+ 'machineType': 'N1_HIGHCPU_32'
+ },
+ 'tags': [project_name + '-fuzzing',],
+ }
+
+ cloudbuild = build('cloudbuild',
+ 'v1',
+ credentials=credentials,
+ cache_discovery=False)
+ build_info = cloudbuild.projects().builds().create(projectId=image_project,
+ body=build_body).execute()
+ build_id = build_info['metadata']['build']['id']
+
+ logging.info('Build ID: %s', build_id)
+ logging.info('Logs: %s', build_project.get_logs_url(build_id, image_project))
diff --git a/infra/build/functions/request_build_test.py b/infra/build/functions/request_build_test.py
new file mode 100644
index 00000000..a436a30a
--- /dev/null
+++ b/infra/build/functions/request_build_test.py
@@ -0,0 +1,93 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+"""Unit tests for Cloud Function request builds which builds projects."""
+import json
+import datetime
+import os
+import unittest
+from unittest import mock
+
+from google.cloud import ndb
+
+from datastore_entities import Project
+from request_build import get_build_steps
+import test_utils
+
+
+# pylint: disable=arguments-differ
+class SpoofedDatetime(datetime.datetime):
+ """Mocking Datetime class for now() function."""
+
+ @classmethod
+ def now(cls):
+ return datetime.datetime(2020, 1, 1, 0, 0, 0)
+
+
+class TestRequestBuilds(unittest.TestCase):
+ """Unit tests for sync."""
+
+ @classmethod
+ def setUpClass(cls):
+ cls.ds_emulator = test_utils.start_datastore_emulator()
+ test_utils.wait_for_emulator_ready(cls.ds_emulator, 'datastore',
+ test_utils.DATASTORE_READY_INDICATOR)
+ os.environ['DATASTORE_EMULATOR_HOST'] = 'localhost:' + str(
+ test_utils.DATASTORE_EMULATOR_PORT)
+ os.environ['GOOGLE_CLOUD_PROJECT'] = test_utils.TEST_PROJECT_ID
+ os.environ['DATASTORE_DATASET'] = test_utils.TEST_PROJECT_ID
+ os.environ['GCP_PROJECT'] = 'test-project'
+ os.environ['FUNCTION_REGION'] = 'us-central1'
+
+ def setUp(self):
+ test_utils.reset_ds_emulator()
+
+ @mock.patch('build_lib.get_signed_url', return_value='test_url')
+ @mock.patch('datetime.datetime')
+ def test_get_build_steps(self, mocked_url, mocked_time):
+ """Test for get_build_steps."""
+ del mocked_url, mocked_time
+ datetime.datetime = SpoofedDatetime
+ project_yaml_contents = 'language: c++\nsanitizers:\n - address\narchitectures:\n - x86_64\n'
+ image_project = 'oss-fuzz'
+ base_images_project = 'oss-fuzz-base'
+ testcase_path = os.path.join(os.path.dirname(__file__),
+ 'expected_build_steps.json')
+ with open(testcase_path) as testcase_file:
+ expected_build_steps = json.load(testcase_file)
+
+ with ndb.Client().context():
+ Project(name='test-project',
+ project_yaml_contents=project_yaml_contents,
+ dockerfile_contents='test line').put()
+
+ build_steps = get_build_steps('test-project', image_project,
+ base_images_project)
+ self.assertEqual(build_steps, expected_build_steps)
+
+ def test_get_build_steps_no_project(self):
+ """Test for when project isn't available in datastore."""
+ with ndb.Client().context():
+ self.assertRaises(RuntimeError, get_build_steps, 'test-project',
+ 'oss-fuzz', 'oss-fuzz-base')
+
+ @classmethod
+ def tearDownClass(cls):
+ # TODO: replace this with a cleaner way of killing the process
+ test_utils.cleanup_emulator(cls.ds_emulator)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/infra/build/functions/sync/main.py b/infra/build/functions/sync.py
index 5524bea4..8ceabe68 100644
--- a/infra/build/functions/sync/main.py
+++ b/infra/build/functions/sync.py
@@ -26,6 +26,9 @@ from google.api_core import exceptions
from google.cloud import ndb
from google.cloud import scheduler_v1
+from datastore_entities import GitAuth
+from datastore_entities import Project
+
VALID_PROJECT_NAME = re.compile(r'^[a-zA-Z0-9_-]+$')
DEFAULT_BUILDS_PER_DAY = 1
MAX_BUILDS_PER_DAY = 4
@@ -38,21 +41,6 @@ class ProjectYamlError(Exception):
"""Error in project.yaml format."""
-# pylint: disable=too-few-public-methods
-class Project(ndb.Model):
- """Represents an integrated OSS-Fuzz project."""
- name = ndb.StringProperty()
- schedule = ndb.StringProperty()
- project_yaml_contents = ndb.TextProperty()
- dockerfile_contents = ndb.TextProperty()
-
-
-# pylint: disable=too-few-public-methods
-class GitAuth(ndb.Model):
- """Represents Github access token entity."""
- access_token = ndb.StringProperty()
-
-
def create_scheduler(cloud_scheduler_client, project_name, schedule):
"""Creates schedulers for new projects."""
project_id = os.environ.get('GCP_PROJECT')
@@ -220,9 +208,8 @@ def get_access_token():
def sync(event, context):
"""Sync projects with cloud datastore."""
del event, context #unused
- client = ndb.Client()
- with client.context():
+ with ndb.Client().context():
github_client = Github(get_access_token())
repo = github_client.get_repo('google/oss-fuzz')
projects = get_projects(repo)
diff --git a/infra/build/functions/sync/deploy.sh b/infra/build/functions/sync/deploy.sh
deleted file mode 100755
index d3279464..00000000
--- a/infra/build/functions/sync/deploy.sh
+++ /dev/null
@@ -1,64 +0,0 @@
-# Copyright 2020 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-
-SCHEDULE_JOB_TOPIC=schedule-project-sync
-BUILD_JOB_TOPIC=request-build
-SCHEDULER_JOB=sync-scheduler
-JOB_SCHEDULE="*/30 * * * *"
-MESSAGE="Start Sync"
-ENTRY_POINT=sync
-
-if [ "$1" ]; then
- PROJECT_ID=$1
-else
- echo -e "\n Usage ./deploy.sh my-project-name"; exit;
-fi
-
-# All the individual project build schedulers will rely on this
-if ! gcloud pubsub topics describe $BUILD_JOB_TOPIC --project $PROJECT_ID ;
- then
- gcloud pubsub topics create $BUILD_JOB_TOPIC \
- --project $PROJECT_ID
-fi
-
-# Checking if the given pubsub topic exists
-if ! gcloud pubsub topics describe $SCHEDULE_JOB_TOPIC --project $PROJECT_ID ;
- then
- gcloud pubsub topics create $SCHEDULE_JOB_TOPIC \
- --project $PROJECT_ID
-fi
-# Checking if the given scheduler job exists
-if gcloud scheduler jobs describe $SCHEDULER_JOB --project $PROJECT_ID ;
- then
- gcloud scheduler jobs update pubsub $SCHEDULER_JOB \
- --schedule "$JOB_SCHEDULE" \
- --topic $SCHEDULE_JOB_TOPIC \
- --message-body "$MESSAGE" \
- --project $PROJECT_ID
- else
- gcloud scheduler jobs create pubsub $SCHEDULER_JOB \
- --schedule "$JOB_SCHEDULE" \
- --topic $SCHEDULE_JOB_TOPIC \
- --message-body "$MESSAGE" \
- --project $PROJECT_ID
-fi
-
-gcloud functions deploy sync \
- --entry-point $ENTRY_POINT \
- --trigger-topic $SCHEDULE_JOB_TOPIC \
- --runtime python37 \
- --project $PROJECT_ID \
- --timeout 540
diff --git a/infra/build/functions/sync/requirements.txt b/infra/build/functions/sync/requirements.txt
deleted file mode 120000
index dc833dd4..00000000
--- a/infra/build/functions/sync/requirements.txt
+++ /dev/null
@@ -1 +0,0 @@
-../requirements.txt \ No newline at end of file
diff --git a/infra/build/functions/sync/main_test.py b/infra/build/functions/sync_test.py
index 315f0e90..203eab6c 100644
--- a/infra/build/functions/sync/main_test.py
+++ b/infra/build/functions/sync_test.py
@@ -17,69 +17,16 @@
and uploads them to the Cloud Datastore."""
import os
-import subprocess
-import threading
import unittest
-import requests
-
from google.cloud import ndb
-from main import get_access_token
-from main import get_projects
-from main import sync_projects
-from main import Project
-from main import ProjectMetadata
-
-_EMULATOR_TIMEOUT = 20
-_DATASTORE_READY_INDICATOR = b'is now running'
-_DATASTORE_EMULATOR_PORT = 8432
-_TEST_PROJECT_ID = 'test-project'
-
-
-def start_datastore_emulator():
- """Start Datastore emulator."""
- return subprocess.Popen([
- 'gcloud',
- 'beta',
- 'emulators',
- 'datastore',
- 'start',
- '--consistency=1.0',
- '--host-port=localhost:' + str(_DATASTORE_EMULATOR_PORT),
- '--project=' + _TEST_PROJECT_ID,
- '--no-store-on-disk',
- ],
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
-
-
-def _wait_for_emulator_ready(proc,
- emulator,
- indicator,
- timeout=_EMULATOR_TIMEOUT):
- """Wait for emulator to be ready."""
-
- def _read_thread(proc, ready_event):
- """Thread to continuously read from the process stdout."""
- ready = False
- while True:
- line = proc.stdout.readline()
- if not line:
- break
- if not ready and indicator in line:
- ready = True
- ready_event.set()
-
- # Wait for process to become ready.
- ready_event = threading.Event()
- thread = threading.Thread(target=_read_thread, args=(proc, ready_event))
- thread.daemon = True
- thread.start()
- if not ready_event.wait(timeout):
- raise RuntimeError(
- '{} emulator did not get ready in time.'.format(emulator))
- return thread
+from datastore_entities import Project
+from sync import get_access_token
+from sync import get_projects
+from sync import ProjectMetadata
+from sync import sync_projects
+import test_utils
# pylint: disable=too-few-public-methods
@@ -151,27 +98,24 @@ class TestDataSync(unittest.TestCase):
@classmethod
def setUpClass(cls):
- ds_emulator = start_datastore_emulator()
- _wait_for_emulator_ready(ds_emulator, 'datastore',
- _DATASTORE_READY_INDICATOR)
+ cls.ds_emulator = test_utils.start_datastore_emulator()
+ test_utils.wait_for_emulator_ready(cls.ds_emulator, 'datastore',
+ test_utils.DATASTORE_READY_INDICATOR)
os.environ['DATASTORE_EMULATOR_HOST'] = 'localhost:' + str(
- _DATASTORE_EMULATOR_PORT)
- os.environ['GOOGLE_CLOUD_PROJECT'] = _TEST_PROJECT_ID
- os.environ['DATASTORE_DATASET'] = _TEST_PROJECT_ID
+ test_utils.DATASTORE_EMULATOR_PORT)
+ os.environ['GOOGLE_CLOUD_PROJECT'] = test_utils.TEST_PROJECT_ID
+ os.environ['DATASTORE_DATASET'] = test_utils.TEST_PROJECT_ID
os.environ['GCP_PROJECT'] = 'test-project'
os.environ['FUNCTION_REGION'] = 'us-central1'
def setUp(self):
- req = requests.post(
- 'http://localhost:{}/reset'.format(_DATASTORE_EMULATOR_PORT))
- req.raise_for_status()
+ test_utils.reset_ds_emulator()
def test_sync_projects_update(self):
"""Testing sync_projects() updating a schedule."""
- client = ndb.Client()
cloud_scheduler_client = CloudSchedulerClient()
- with client.context():
+ with ndb.Client().context():
Project(name='test1',
schedule='0 8 * * *',
project_yaml_contents='',
@@ -195,10 +139,9 @@ class TestDataSync(unittest.TestCase):
def test_sync_projects_create(self):
""""Testing sync_projects() creating new schedule."""
- client = ndb.Client()
cloud_scheduler_client = CloudSchedulerClient()
- with client.context():
+ with ndb.Client().context():
Project(name='test1',
schedule='0 8 * * *',
project_yaml_contents='',
@@ -218,10 +161,9 @@ class TestDataSync(unittest.TestCase):
def test_sync_projects_delete(self):
"""Testing sync_projects() deleting."""
- client = ndb.Client()
cloud_scheduler_client = CloudSchedulerClient()
- with client.context():
+ with ndb.Client().context():
Project(name='test1',
schedule='0 8 * * *',
project_yaml_contents='',
@@ -342,17 +284,14 @@ class TestDataSync(unittest.TestCase):
def test_get_access_token(self):
"""Testing get_access_token()."""
- client = ndb.Client()
-
- with client.context():
+ with ndb.Client().context():
self.assertRaises(RuntimeError, get_access_token)
@classmethod
def tearDownClass(cls):
# TODO: replace this with a cleaner way of killing the process
- os.system('pkill -f datastore')
+ test_utils.cleanup_emulator(cls.ds_emulator)
if __name__ == '__main__':
-
unittest.main(exit=False)
diff --git a/infra/build/functions/test_utils.py b/infra/build/functions/test_utils.py
new file mode 100644
index 00000000..d6859a78
--- /dev/null
+++ b/infra/build/functions/test_utils.py
@@ -0,0 +1,84 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+"""Utility functions for testing cloud functions."""
+import os
+import subprocess
+import threading
+
+import requests
+
+DATASTORE_READY_INDICATOR = b'is now running'
+DATASTORE_EMULATOR_PORT = 8432
+EMULATOR_TIMEOUT = 20
+TEST_PROJECT_ID = 'test-project'
+
+
+def start_datastore_emulator():
+ """Start Datastore emulator."""
+ return subprocess.Popen([
+ 'gcloud',
+ 'beta',
+ 'emulators',
+ 'datastore',
+ 'start',
+ '--consistency=1.0',
+ '--host-port=localhost:' + str(DATASTORE_EMULATOR_PORT),
+ '--project=' + TEST_PROJECT_ID,
+ '--no-store-on-disk',
+ ],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+
+
+def wait_for_emulator_ready(proc,
+ emulator,
+ indicator,
+ timeout=EMULATOR_TIMEOUT):
+ """Wait for emulator to be ready."""
+
+ def _read_thread(proc, ready_event):
+ """Thread to continuously read from the process stdout."""
+ ready = False
+ while True:
+ line = proc.stdout.readline()
+ if not line:
+ break
+ if not ready and indicator in line:
+ ready = True
+ ready_event.set()
+
+ # Wait for process to become ready.
+ ready_event = threading.Event()
+ thread = threading.Thread(target=_read_thread, args=(proc, ready_event))
+ thread.daemon = True
+ thread.start()
+ if not ready_event.wait(timeout):
+ raise RuntimeError(
+ '{} emulator did not get ready in time.'.format(emulator))
+ return thread
+
+
+def reset_ds_emulator():
+ """Reset ds emulator/clean all entities."""
+ req = requests.post(
+ 'http://localhost:{}/reset'.format(DATASTORE_EMULATOR_PORT))
+ req.raise_for_status()
+
+
+def cleanup_emulator(ds_emulator):
+ """Cleanup the system processes made by ds emulator."""
+ del ds_emulator #To do, find a better way to cleanup emulator
+ os.system('pkill -f datastore')
diff --git a/infra/gcb/build_and_run_coverage.py b/infra/gcb/build_and_run_coverage.py
index 6357192a..fc4177f0 100644
--- a/infra/gcb/build_and_run_coverage.py
+++ b/infra/gcb/build_and_run_coverage.py
@@ -1,15 +1,27 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
#!/usr/bin/python2
"""Starts and runs coverage build on Google Cloud Builder.
-
Usage: build_and_run_coverage.py <project_dir>
"""
import datetime
import json
import os
-import requests
import sys
-import urlparse
import build_lib
import build_project
@@ -45,30 +57,31 @@ def skip_build(message):
# Since the script should print build_id, print '0' as a special value.
print('0')
- exit(0)
+ sys.exit(0)
def usage():
+ """Exit with code 1 and display syntax to use this file."""
sys.stderr.write("Usage: " + sys.argv[0] + " <project_dir>\n")
- exit(1)
+ sys.exit(1)
-def get_build_steps(project_dir):
- project_name = os.path.basename(project_dir)
- project_yaml = build_project.load_project_yaml(project_dir)
+# pylint: disable=too-many-locals
+def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
+ image_project, base_images_project):
+ """Returns build steps for project."""
+ project_yaml = build_project.load_project_yaml(project_name,
+ project_yaml_file,
+ image_project)
if project_yaml['disabled']:
skip_build('Project "%s" is disabled.' % project_name)
- build_script_path = os.path.join(project_dir, 'build.sh')
- if os.path.exists(build_script_path):
- with open(build_script_path) as fh:
- if project_yaml['language'] not in LANGUAGES_WITH_COVERAGE_SUPPORT:
- skip_build(('Project "{project_name}" is written in "{language}", '
- 'coverage is not supported yet.').format(
- project_name=project_name,
- language=project_yaml['language']))
+ if project_yaml['language'] not in LANGUAGES_WITH_COVERAGE_SUPPORT:
+ skip_build(('Project "{project_name}" is written in "{language}", '
+ 'coverage is not supported yet.').format(
+ project_name=project_name,
+ language=project_yaml['language']))
- dockerfile_path = os.path.join(project_dir, 'Dockerfile')
name = project_yaml['name']
image = project_yaml['image']
language = project_yaml['language']
@@ -81,7 +94,7 @@ def get_build_steps(project_dir):
env.append('OUT=' + out)
env.append('FUZZING_LANGUAGE=' + language)
- workdir = build_project.workdir_from_dockerfile(dockerfile_path)
+ workdir = build_project.workdir_from_dockerfile(dockerfile_lines)
if not workdir:
workdir = '/src'
@@ -132,7 +145,7 @@ def get_build_steps(project_dir):
coverage_env.append('FULL_SUMMARY_PER_TARGET=1')
build_steps.append({
- 'name': 'gcr.io/oss-fuzz-base/base-runner',
+ 'name': 'gcr.io/{0}/base-runner'.format(base_images_project),
'env': coverage_env,
'args': [
'bash', '-c',
@@ -191,7 +204,7 @@ def get_build_steps(project_dir):
# Upload the fuzzer logs. Delete the old ones just in case
upload_fuzzer_logs_url = UPLOAD_URL_FORMAT.format(project=project_name,
type='logs',
- date=report_date),
+ date=report_date)
build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_logs_url))
build_steps.append({
'name':
@@ -244,12 +257,24 @@ def get_build_steps(project_dir):
def main():
+ """Build and run coverage for projects."""
if len(sys.argv) != 2:
usage()
+ image_project = 'oss-fuzz'
+ base_images_project = 'oss-fuzz-base'
project_dir = sys.argv[1].rstrip(os.path.sep)
project_name = os.path.basename(project_dir)
- steps = get_build_steps(project_dir)
+ dockerfile_path = os.path.join(project_dir, 'Dockerfile')
+ project_yaml_path = os.path.join(project_dir, 'project.yaml')
+
+ with open(dockerfile_path) as docker_file:
+ dockerfile_lines = docker_file.readlines()
+
+ with open(project_yaml_path) as project_yaml_file:
+ steps = get_build_steps(project_name, project_yaml_file, dockerfile_lines,
+ image_project, base_images_project)
+
build_project.run_build(steps, project_name, COVERAGE_BUILD_TAG)
diff --git a/infra/gcb/build_lib.py b/infra/gcb/build_lib.py
index 0fe22bbd..007579ef 100644
--- a/infra/gcb/build_lib.py
+++ b/infra/gcb/build_lib.py
@@ -1,13 +1,30 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
"""Utility module for Google Cloud Build scripts."""
import base64
import collections
import os
-import requests
+import six.moves.urllib.parse as urlparse
import sys
import time
-import urllib
-import urlparse
+import requests
+
+import google.auth
+import googleapiclient.discovery
from oauth2client.service_account import ServiceAccountCredentials
BUILD_TIMEOUT = 12 * 60 * 60
@@ -55,16 +72,19 @@ ENGINE_INFO = {
def get_targets_list_filename(sanitizer):
+ """Returns target list filename."""
return TARGETS_LIST_BASENAME + '.' + sanitizer
def get_targets_list_url(bucket, project, sanitizer):
+ """Returns target list url."""
filename = get_targets_list_filename(sanitizer)
url = GCS_UPLOAD_URL_FORMAT.format(bucket, project, filename)
return url
def _get_targets_list(project_name):
+ """Returns target list."""
# libFuzzer ASan is the default configuration, get list of targets from it.
url = get_targets_list_url(ENGINE_INFO['libfuzzer'].upload_bucket,
project_name, 'address')
@@ -80,22 +100,41 @@ def _get_targets_list(project_name):
return response.text.split()
+# pylint: disable=no-member
def get_signed_url(path, method='PUT', content_type=''):
+ """Returns signed url."""
timestamp = int(time.time() + BUILD_TIMEOUT)
blob = '{0}\n\n{1}\n{2}\n{3}'.format(method, content_type, timestamp, path)
- creds = ServiceAccountCredentials.from_json_keyfile_name(
- os.environ['GOOGLE_APPLICATION_CREDENTIALS'])
- client_id = creds.service_account_email
- signature = base64.b64encode(creds.sign_blob(blob)[1])
+ service_account_path = os.environ.get('GOOGLE_APPLICATION_CREDENTIALS')
+ if service_account_path:
+ creds = ServiceAccountCredentials.from_json_keyfile_name(
+ os.environ['GOOGLE_APPLICATION_CREDENTIALS'])
+ client_id = creds.service_account_email
+ signature = base64.b64encode(creds.sign_blob(blob)[1])
+ else:
+ credentials, project = google.auth.default()
+ iam = googleapiclient.discovery.build('iamcredentials',
+ 'v1',
+ credentials=credentials,
+ cache_discovery=False)
+ client_id = project + '@appspot.gserviceaccount.com'
+ service_account = 'projects/-/serviceAccounts/{0}'.format(client_id)
+ response = iam.projects().serviceAccounts().signBlob(
+ name=service_account,
+ body={
+ 'delegates': [],
+ 'payload': base64.b64encode(blob.encode('utf-8')).decode('utf-8'),
+ }).execute()
+ signature = response['signedBlob']
+
values = {
'GoogleAccessId': client_id,
'Expires': timestamp,
'Signature': signature,
}
-
return ('https://storage.googleapis.com{0}?'.format(path) +
- urllib.urlencode(values))
+ urlparse.urlencode(values))
def download_corpora_steps(project_name):
diff --git a/infra/gcb/build_project.py b/infra/gcb/build_project.py
index e9d0480a..c7db87d6 100644
--- a/infra/gcb/build_project.py
+++ b/infra/gcb/build_project.py
@@ -1,3 +1,18 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
#!/usr/bin/python2
"""Starts project build on Google Cloud Builder.
@@ -11,6 +26,8 @@ import json
import os
import re
import sys
+
+import six
import yaml
from oauth2client.client import GoogleCredentials
@@ -43,28 +60,27 @@ LATEST_VERSION_CONTENT_TYPE = 'text/plain'
def usage():
+ """Exit with code 1 and display syntax to use this file."""
sys.stderr.write('Usage: ' + sys.argv[0] + ' <project_dir>\n')
- exit(1)
+ sys.exit(1)
-def load_project_yaml(project_dir):
- project_name = os.path.basename(project_dir)
- project_yaml_path = os.path.join(project_dir, 'project.yaml')
- with open(project_yaml_path) as f:
- project_yaml = yaml.safe_load(f)
- project_yaml.setdefault('disabled', False)
- project_yaml.setdefault('name', project_name)
- project_yaml.setdefault('image', 'gcr.io/oss-fuzz/' + project_name)
- project_yaml.setdefault('architectures', DEFAULT_ARCHITECTURES)
- project_yaml.setdefault('sanitizers', DEFAULT_SANITIZERS)
- project_yaml.setdefault('fuzzing_engines', DEFAULT_ENGINES)
- project_yaml.setdefault('run_tests', True)
- project_yaml.setdefault('coverage_extra_args', '')
- project_yaml.setdefault('labels', {})
- return project_yaml
+def set_yaml_defaults(project_name, project_yaml, image_project):
+ """Set project.yaml's default parameters."""
+ project_yaml.setdefault('disabled', False)
+ project_yaml.setdefault('name', project_name)
+ project_yaml.setdefault('image',
+ 'gcr.io/{0}/{1}'.format(image_project, project_name))
+ project_yaml.setdefault('architectures', DEFAULT_ARCHITECTURES)
+ project_yaml.setdefault('sanitizers', DEFAULT_SANITIZERS)
+ project_yaml.setdefault('fuzzing_engines', DEFAULT_ENGINES)
+ project_yaml.setdefault('run_tests', True)
+ project_yaml.setdefault('coverage_extra_args', '')
+ project_yaml.setdefault('labels', {})
def is_supported_configuration(fuzzing_engine, sanitizer, architecture):
+ """Check if the given configuration is supported."""
fuzzing_engine_info = build_lib.ENGINE_INFO[fuzzing_engine]
if architecture == 'i386' and sanitizer != 'address':
return False
@@ -73,12 +89,13 @@ def is_supported_configuration(fuzzing_engine, sanitizer, architecture):
def get_sanitizers(project_yaml):
+ """Retrieve sanitizers from project.yaml."""
sanitizers = project_yaml['sanitizers']
assert isinstance(sanitizers, list)
processed_sanitizers = []
for sanitizer in sanitizers:
- if isinstance(sanitizer, basestring):
+ if isinstance(sanitizer, six.string_types):
processed_sanitizers.append(sanitizer)
elif isinstance(sanitizer, dict):
for key in sanitizer.iterkeys():
@@ -87,15 +104,11 @@ def get_sanitizers(project_yaml):
return processed_sanitizers
-def workdir_from_dockerfile(dockerfile):
+def workdir_from_dockerfile(dockerfile_lines):
"""Parse WORKDIR from the Dockerfile."""
- WORKDIR_REGEX = re.compile(r'\s*WORKDIR\s*([^\s]+)')
-
- with open(dockerfile) as f:
- lines = f.readlines()
-
- for line in lines:
- match = re.match(WORKDIR_REGEX, line)
+ workdir_regex = re.compile(r'\s*WORKDIR\s*([^\s]+)')
+ for line in dockerfile_lines:
+ match = re.match(workdir_regex, line)
if match:
# We need to escape '$' since they're used for subsitutions in Container
# Builer builds.
@@ -104,21 +117,29 @@ def workdir_from_dockerfile(dockerfile):
return None
-def get_build_steps(project_dir):
- project_yaml = load_project_yaml(project_dir)
- dockerfile_path = os.path.join(project_dir, 'Dockerfile')
+def load_project_yaml(project_name, project_yaml_file, image_project):
+ """Loads project yaml and sets default values."""
+ project_yaml = yaml.safe_load(project_yaml_file)
+ set_yaml_defaults(project_name, project_yaml, image_project)
+ return project_yaml
+
+
+# pylint: disable=too-many-locals
+def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
+ image_project, base_images_project):
+ """Returns build steps for project."""
+ project_yaml = load_project_yaml(project_name, project_yaml_file,
+ image_project)
name = project_yaml['name']
image = project_yaml['image']
language = project_yaml['language']
run_tests = project_yaml['run_tests']
-
- ts = datetime.datetime.now().strftime('%Y%m%d%H%M')
+ time_stamp = datetime.datetime.now().strftime('%Y%m%d%H%M')
build_steps = build_lib.project_image_steps(name, image, language)
-
# Copy over MSan instrumented libraries.
build_steps.append({
- 'name': 'gcr.io/oss-fuzz-base/msan-builder',
+ 'name': 'gcr.io/{0}/msan-builder'.format(base_images_project),
'args': [
'bash',
'-c',
@@ -136,7 +157,7 @@ def get_build_steps(project_dir):
env = CONFIGURATIONS['engine-' + fuzzing_engine][:]
env.extend(CONFIGURATIONS['sanitizer-' + sanitizer])
out = '/workspace/out/' + sanitizer
- stamped_name = '-'.join([name, sanitizer, ts])
+ stamped_name = '-'.join([name, sanitizer, time_stamp])
latest_version_file = '-'.join(
[name, sanitizer, LATEST_VERSION_FILENAME])
zip_file = stamped_name + '.zip'
@@ -144,6 +165,7 @@ def get_build_steps(project_dir):
bucket = build_lib.ENGINE_INFO[fuzzing_engine].upload_bucket
if architecture != 'x86_64':
bucket += '-' + architecture
+
upload_url = build_lib.get_signed_url(
build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, name, zip_file))
srcmap_url = build_lib.get_signed_url(
@@ -163,7 +185,7 @@ def get_build_steps(project_dir):
env.append('ARCHITECTURE=' + architecture)
env.append('FUZZING_LANGUAGE=' + language)
- workdir = workdir_from_dockerfile(dockerfile_path)
+ workdir = workdir_from_dockerfile(dockerfile_lines)
if not workdir:
workdir = '/src'
@@ -202,7 +224,7 @@ def get_build_steps(project_dir):
# Patch dynamic libraries to use instrumented ones.
build_steps.append({
'name':
- 'gcr.io/oss-fuzz-base/msan-builder',
+ 'gcr.io/{0}/msan-builder'.format(base_images_project),
'args': [
'bash',
'-c',
@@ -231,7 +253,7 @@ def get_build_steps(project_dir):
# test binaries
{
'name':
- 'gcr.io/oss-fuzz-base/base-runner',
+ 'gcr.io/{0}/base-runner'.format(base_images_project),
'env':
env,
'args': [
@@ -255,7 +277,8 @@ def get_build_steps(project_dir):
})
if sanitizer == 'dataflow' and fuzzing_engine == 'dataflow':
- dataflow_steps = dataflow_post_build_steps(name, env)
+ dataflow_steps = dataflow_post_build_steps(name, env,
+ base_images_project)
if dataflow_steps:
build_steps.extend(dataflow_steps)
else:
@@ -265,7 +288,7 @@ def get_build_steps(project_dir):
# generate targets list
{
'name':
- 'gcr.io/oss-fuzz-base/base-runner',
+ 'gcr.io/{0}/base-runner'.format(base_images_project),
'env':
env,
'args': [
@@ -287,7 +310,7 @@ def get_build_steps(project_dir):
},
# upload srcmap
{
- 'name': 'gcr.io/oss-fuzz-base/uploader',
+ 'name': 'gcr.io/{0}/uploader'.format(base_images_project),
'args': [
'/workspace/srcmap.json',
srcmap_url,
@@ -295,7 +318,7 @@ def get_build_steps(project_dir):
},
# upload binaries
{
- 'name': 'gcr.io/oss-fuzz-base/uploader',
+ 'name': 'gcr.io/{0}/uploader'.format(base_images_project),
'args': [
os.path.join(out, zip_file),
upload_url,
@@ -304,7 +327,7 @@ def get_build_steps(project_dir):
# upload targets list
{
'name':
- 'gcr.io/oss-fuzz-base/uploader',
+ 'gcr.io/{0}/uploader'.format(base_images_project),
'args': [
'/workspace/{0}'.format(targets_list_filename),
targets_list_url,
@@ -327,14 +350,15 @@ def get_build_steps(project_dir):
return build_steps
-def dataflow_post_build_steps(project_name, env):
+def dataflow_post_build_steps(project_name, env, base_images_project):
+ """Appends dataflow post build steps."""
steps = build_lib.download_corpora_steps(project_name)
if not steps:
return None
steps.append({
'name':
- 'gcr.io/oss-fuzz-base/base-runner',
+ 'gcr.io/{0}/base-runner'.format(base_images_project),
'env':
env + [
'COLLECT_DFT_TIMEOUT=2h',
@@ -355,13 +379,16 @@ def dataflow_post_build_steps(project_name, env):
return steps
-def get_logs_url(build_id):
- URL_FORMAT = ('https://console.developers.google.com/logs/viewer?'
- 'resource=build%2Fbuild_id%2F{0}&project=oss-fuzz')
- return URL_FORMAT.format(build_id)
+def get_logs_url(build_id, image_project='oss-fuzz'):
+ """Returns url where logs are displayed for the build."""
+ url_format = ('https://console.developers.google.com/logs/viewer?'
+ 'resource=build%2Fbuild_id%2F{0}&project={1}')
+ return url_format.format(build_id, image_project)
+# pylint: disable=no-member
def run_build(build_steps, project_name, tag):
+ """Run the build for given steps on cloud build."""
options = {}
if 'GCB_OPTIONS' in os.environ:
options = yaml.safe_load(os.environ['GCB_OPTIONS'])
@@ -375,7 +402,10 @@ def run_build(build_steps, project_name, tag):
}
credentials = GoogleCredentials.get_application_default()
- cloudbuild = build('cloudbuild', 'v1', credentials=credentials)
+ cloudbuild = build('cloudbuild',
+ 'v1',
+ credentials=credentials,
+ cache_discovery=False)
build_info = cloudbuild.projects().builds().create(projectId='oss-fuzz',
body=build_body).execute()
build_id = build_info['metadata']['build']['id']
@@ -385,13 +415,24 @@ def run_build(build_steps, project_name, tag):
def main():
+ """Build and run projects."""
if len(sys.argv) != 2:
usage()
+ image_project = 'oss-fuzz'
+ base_images_project = 'oss-fuzz-base'
project_dir = sys.argv[1].rstrip(os.path.sep)
- steps = get_build_steps(project_dir)
-
+ dockerfile_path = os.path.join(project_dir, 'Dockerfile')
+ project_yaml_path = os.path.join(project_dir, 'project.yaml')
project_name = os.path.basename(project_dir)
+
+ with open(dockerfile_path) as dockerfile:
+ dockerfile_lines = dockerfile.readlines()
+
+ with open(project_yaml_path) as project_yaml_file:
+ steps = get_build_steps(project_name, project_yaml_file, dockerfile_lines,
+ image_project, base_images_project)
+
run_build(steps, project_name, FUZZING_BUILD_TAG)