aboutsummaryrefslogtreecommitdiffhomepage
path: root/infra/bots
diff options
context:
space:
mode:
authorGravatar borenet <borenet@chromium.org>2016-10-10 10:37:30 -0700
committerGravatar Commit bot <commit-bot@chromium.org>2016-10-10 10:37:30 -0700
commit76bcd4f189b43e0235e496c0eddf7b3e9f4167f5 (patch)
tree428325922b5eb4447164da9ac376113b4cb7853a /infra/bots
parente17881889474270b39b03bd3def1ad4026548168 (diff)
RecreateSKPs: Don't download and use the .boto file
It's not needed for the new bucket. BUG=skia:5843 GOLD_TRYBOT_URL= https://gold.skia.org/search?issue=2408793002 Review-Url: https://codereview.chromium.org/2408793002
Diffstat (limited to 'infra/bots')
-rw-r--r--infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Nightly-RecreateSKPs_Canary.json29
-rw-r--r--infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Weekly-RecreateSKPs.json29
-rw-r--r--infra/bots/recipes/swarm_RecreateSKPs.py31
3 files changed, 1 insertions, 88 deletions
diff --git a/infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Nightly-RecreateSKPs_Canary.json b/infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Nightly-RecreateSKPs_Canary.json
index deb5d695e3..ef5134d956 100644
--- a/infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Nightly-RecreateSKPs_Canary.json
+++ b/infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Nightly-RecreateSKPs_Canary.json
@@ -187,33 +187,6 @@
"cmd": [
"python",
"-u",
- "\nimport os\nimport urllib2\n\nBOTO_URL = 'http://metadata/computeMetadata/v1/project/attributes/boto-file'\n\ndest_path = '[SLAVE_BUILD]/tmp/.boto'\ndest_dir = os.path.dirname(dest_path)\nif not os.path.exists(dest_dir):\n os.makedirs(dest_dir)\n\nreq = urllib2.Request(BOTO_URL, headers={'Metadata-Flavor': 'Google'})\ncontents = urllib2.urlopen(req).read()\n\nwith open(dest_path, 'w') as f:\n f.write(contents)\n"
- ],
- "name": "download boto file",
- "~followup_annotations": [
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@import os@@@",
- "@@@STEP_LOG_LINE@python.inline@import urllib2@@@",
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@BOTO_URL = 'http://metadata/computeMetadata/v1/project/attributes/boto-file'@@@",
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@dest_path = '[SLAVE_BUILD]/tmp/.boto'@@@",
- "@@@STEP_LOG_LINE@python.inline@dest_dir = os.path.dirname(dest_path)@@@",
- "@@@STEP_LOG_LINE@python.inline@if not os.path.exists(dest_dir):@@@",
- "@@@STEP_LOG_LINE@python.inline@ os.makedirs(dest_dir)@@@",
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@req = urllib2.Request(BOTO_URL, headers={'Metadata-Flavor': 'Google'})@@@",
- "@@@STEP_LOG_LINE@python.inline@contents = urllib2.urlopen(req).read()@@@",
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@with open(dest_path, 'w') as f:@@@",
- "@@@STEP_LOG_LINE@python.inline@ f.write(contents)@@@",
- "@@@STEP_LOG_END@python.inline@@@"
- ]
- },
- {
- "cmd": [
- "python",
- "-u",
"\nimport os, sys\nfrom common import chromium_utils # Error? See https://crbug.com/584783.\n\n\nif os.path.exists(sys.argv[1]):\n chromium_utils.RemoveDirectory(sys.argv[1])\n",
"[SLAVE_BUILD]/skp_output"
],
@@ -264,8 +237,6 @@
],
"cwd": "[CUSTOM_/_B_WORK]/skia",
"env": {
- "AWS_CREDENTIAL_FILE": "[SLAVE_BUILD]/tmp/.boto",
- "BOTO_CONFIG": "[SLAVE_BUILD]/tmp/.boto",
"CHROME_HEADLESS": "1",
"PATH": "[DEPOT_TOOLS]:%(PATH)s"
},
diff --git a/infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Weekly-RecreateSKPs.json b/infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Weekly-RecreateSKPs.json
index d5953ee14e..05e83a393c 100644
--- a/infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Weekly-RecreateSKPs.json
+++ b/infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Weekly-RecreateSKPs.json
@@ -187,33 +187,6 @@
"cmd": [
"python",
"-u",
- "\nimport os\nimport urllib2\n\nBOTO_URL = 'http://metadata/computeMetadata/v1/project/attributes/boto-file'\n\ndest_path = '[SLAVE_BUILD]/tmp/.boto'\ndest_dir = os.path.dirname(dest_path)\nif not os.path.exists(dest_dir):\n os.makedirs(dest_dir)\n\nreq = urllib2.Request(BOTO_URL, headers={'Metadata-Flavor': 'Google'})\ncontents = urllib2.urlopen(req).read()\n\nwith open(dest_path, 'w') as f:\n f.write(contents)\n"
- ],
- "name": "download boto file",
- "~followup_annotations": [
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@import os@@@",
- "@@@STEP_LOG_LINE@python.inline@import urllib2@@@",
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@BOTO_URL = 'http://metadata/computeMetadata/v1/project/attributes/boto-file'@@@",
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@dest_path = '[SLAVE_BUILD]/tmp/.boto'@@@",
- "@@@STEP_LOG_LINE@python.inline@dest_dir = os.path.dirname(dest_path)@@@",
- "@@@STEP_LOG_LINE@python.inline@if not os.path.exists(dest_dir):@@@",
- "@@@STEP_LOG_LINE@python.inline@ os.makedirs(dest_dir)@@@",
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@req = urllib2.Request(BOTO_URL, headers={'Metadata-Flavor': 'Google'})@@@",
- "@@@STEP_LOG_LINE@python.inline@contents = urllib2.urlopen(req).read()@@@",
- "@@@STEP_LOG_LINE@python.inline@@@@",
- "@@@STEP_LOG_LINE@python.inline@with open(dest_path, 'w') as f:@@@",
- "@@@STEP_LOG_LINE@python.inline@ f.write(contents)@@@",
- "@@@STEP_LOG_END@python.inline@@@"
- ]
- },
- {
- "cmd": [
- "python",
- "-u",
"\nimport os, sys\nfrom common import chromium_utils # Error? See https://crbug.com/584783.\n\n\nif os.path.exists(sys.argv[1]):\n chromium_utils.RemoveDirectory(sys.argv[1])\n",
"[SLAVE_BUILD]/skp_output"
],
@@ -265,8 +238,6 @@
],
"cwd": "[CUSTOM_/_B_WORK]/skia",
"env": {
- "AWS_CREDENTIAL_FILE": "[SLAVE_BUILD]/tmp/.boto",
- "BOTO_CONFIG": "[SLAVE_BUILD]/tmp/.boto",
"CHROME_HEADLESS": "1",
"PATH": "[DEPOT_TOOLS]:%(PATH)s"
},
diff --git a/infra/bots/recipes/swarm_RecreateSKPs.py b/infra/bots/recipes/swarm_RecreateSKPs.py
index 38fd8534b3..eb92484068 100644
--- a/infra/bots/recipes/swarm_RecreateSKPs.py
+++ b/infra/bots/recipes/swarm_RecreateSKPs.py
@@ -110,28 +110,6 @@ def RunSteps(api):
['ninja', '-C', out_dir, 'chrome'],
cwd=src_dir)
- # Download boto file (needed by recreate_skps.py) to tmp dir.
- boto_file = api.path['slave_build'].join('tmp', '.boto')
- api.python.inline(
- 'download boto file',
- """
-import os
-import urllib2
-
-BOTO_URL = 'http://metadata/computeMetadata/v1/project/attributes/boto-file'
-
-dest_path = '%s'
-dest_dir = os.path.dirname(dest_path)
-if not os.path.exists(dest_dir):
- os.makedirs(dest_dir)
-
-req = urllib2.Request(BOTO_URL, headers={'Metadata-Flavor': 'Google'})
-contents = urllib2.urlopen(req).read()
-
-with open(dest_path, 'w') as f:
- f.write(contents)
- """ % boto_file)
-
# Clean up the output dir.
output_dir = api.path['slave_build'].join('skp_output')
if api.path.exists(output_dir):
@@ -144,13 +122,6 @@ with open(dest_path, 'w') as f:
'CHROME_HEADLESS': '1',
'PATH': path_var,
}
- boto_env = {
- 'AWS_CREDENTIAL_FILE': boto_file,
- 'BOTO_CONFIG': boto_file,
- }
- recreate_skps_env = {}
- recreate_skps_env.update(env)
- recreate_skps_env.update(boto_env)
asset_dir = api.vars.infrabots_dir.join('assets', 'skp')
cmd = ['python', asset_dir.join('create.py'),
'--chrome_src_path', src_dir,
@@ -161,7 +132,7 @@ with open(dest_path, 'w') as f:
api.step('Recreate SKPs',
cmd=cmd,
cwd=api.vars.skia_dir,
- env=recreate_skps_env)
+ env=env)
# Upload the SKPs.
if 'Canary' not in api.properties['buildername']: