From 76bcd4f189b43e0235e496c0eddf7b3e9f4167f5 Mon Sep 17 00:00:00 2001 From: borenet Date: Mon, 10 Oct 2016 10:37:30 -0700 Subject: RecreateSKPs: Don't download and use the .boto file It's not needed for the new bucket. BUG=skia:5843 GOLD_TRYBOT_URL= https://gold.skia.org/search?issue=2408793002 Review-Url: https://codereview.chromium.org/2408793002 --- .../Housekeeper-Nightly-RecreateSKPs_Canary.json | 29 -------------------- .../Housekeeper-Weekly-RecreateSKPs.json | 29 -------------------- infra/bots/recipes/swarm_RecreateSKPs.py | 31 +--------------------- 3 files changed, 1 insertion(+), 88 deletions(-) (limited to 'infra/bots') diff --git a/infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Nightly-RecreateSKPs_Canary.json b/infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Nightly-RecreateSKPs_Canary.json index deb5d695e3..ef5134d956 100644 --- a/infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Nightly-RecreateSKPs_Canary.json +++ b/infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Nightly-RecreateSKPs_Canary.json @@ -183,33 +183,6 @@ "cwd": "[CUSTOM_/_B_WORK]/src", "name": "Build Chrome" }, - { - "cmd": [ - "python", - "-u", - "\nimport os\nimport urllib2\n\nBOTO_URL = 'http://metadata/computeMetadata/v1/project/attributes/boto-file'\n\ndest_path = '[SLAVE_BUILD]/tmp/.boto'\ndest_dir = os.path.dirname(dest_path)\nif not os.path.exists(dest_dir):\n os.makedirs(dest_dir)\n\nreq = urllib2.Request(BOTO_URL, headers={'Metadata-Flavor': 'Google'})\ncontents = urllib2.urlopen(req).read()\n\nwith open(dest_path, 'w') as f:\n f.write(contents)\n" - ], - "name": "download boto file", - "~followup_annotations": [ - "@@@STEP_LOG_LINE@python.inline@@@@", - "@@@STEP_LOG_LINE@python.inline@import os@@@", - "@@@STEP_LOG_LINE@python.inline@import urllib2@@@", - "@@@STEP_LOG_LINE@python.inline@@@@", - "@@@STEP_LOG_LINE@python.inline@BOTO_URL = 'http://metadata/computeMetadata/v1/project/attributes/boto-file'@@@", - "@@@STEP_LOG_LINE@python.inline@@@@", - "@@@STEP_LOG_LINE@python.inline@dest_path = '[SLAVE_BUILD]/tmp/.boto'@@@", - "@@@STEP_LOG_LINE@python.inline@dest_dir = os.path.dirname(dest_path)@@@", - "@@@STEP_LOG_LINE@python.inline@if not os.path.exists(dest_dir):@@@", - "@@@STEP_LOG_LINE@python.inline@ os.makedirs(dest_dir)@@@", - "@@@STEP_LOG_LINE@python.inline@@@@", - "@@@STEP_LOG_LINE@python.inline@req = urllib2.Request(BOTO_URL, headers={'Metadata-Flavor': 'Google'})@@@", - "@@@STEP_LOG_LINE@python.inline@contents = urllib2.urlopen(req).read()@@@", - "@@@STEP_LOG_LINE@python.inline@@@@", - "@@@STEP_LOG_LINE@python.inline@with open(dest_path, 'w') as f:@@@", - "@@@STEP_LOG_LINE@python.inline@ f.write(contents)@@@", - "@@@STEP_LOG_END@python.inline@@@" - ] - }, { "cmd": [ "python", @@ -264,8 +237,6 @@ ], "cwd": "[CUSTOM_/_B_WORK]/skia", "env": { - "AWS_CREDENTIAL_FILE": "[SLAVE_BUILD]/tmp/.boto", - "BOTO_CONFIG": "[SLAVE_BUILD]/tmp/.boto", "CHROME_HEADLESS": "1", "PATH": "[DEPOT_TOOLS]:%(PATH)s" }, diff --git a/infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Weekly-RecreateSKPs.json b/infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Weekly-RecreateSKPs.json index d5953ee14e..05e83a393c 100644 --- a/infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Weekly-RecreateSKPs.json +++ b/infra/bots/recipes/swarm_RecreateSKPs.expected/Housekeeper-Weekly-RecreateSKPs.json @@ -183,33 +183,6 @@ "cwd": "[CUSTOM_/_B_WORK]/src", "name": "Build Chrome" }, - { - "cmd": [ - "python", - "-u", - "\nimport os\nimport urllib2\n\nBOTO_URL = 'http://metadata/computeMetadata/v1/project/attributes/boto-file'\n\ndest_path = '[SLAVE_BUILD]/tmp/.boto'\ndest_dir = os.path.dirname(dest_path)\nif not os.path.exists(dest_dir):\n os.makedirs(dest_dir)\n\nreq = urllib2.Request(BOTO_URL, headers={'Metadata-Flavor': 'Google'})\ncontents = urllib2.urlopen(req).read()\n\nwith open(dest_path, 'w') as f:\n f.write(contents)\n" - ], - "name": "download boto file", - "~followup_annotations": [ - "@@@STEP_LOG_LINE@python.inline@@@@", - "@@@STEP_LOG_LINE@python.inline@import os@@@", - "@@@STEP_LOG_LINE@python.inline@import urllib2@@@", - "@@@STEP_LOG_LINE@python.inline@@@@", - "@@@STEP_LOG_LINE@python.inline@BOTO_URL = 'http://metadata/computeMetadata/v1/project/attributes/boto-file'@@@", - "@@@STEP_LOG_LINE@python.inline@@@@", - "@@@STEP_LOG_LINE@python.inline@dest_path = '[SLAVE_BUILD]/tmp/.boto'@@@", - "@@@STEP_LOG_LINE@python.inline@dest_dir = os.path.dirname(dest_path)@@@", - "@@@STEP_LOG_LINE@python.inline@if not os.path.exists(dest_dir):@@@", - "@@@STEP_LOG_LINE@python.inline@ os.makedirs(dest_dir)@@@", - "@@@STEP_LOG_LINE@python.inline@@@@", - "@@@STEP_LOG_LINE@python.inline@req = urllib2.Request(BOTO_URL, headers={'Metadata-Flavor': 'Google'})@@@", - "@@@STEP_LOG_LINE@python.inline@contents = urllib2.urlopen(req).read()@@@", - "@@@STEP_LOG_LINE@python.inline@@@@", - "@@@STEP_LOG_LINE@python.inline@with open(dest_path, 'w') as f:@@@", - "@@@STEP_LOG_LINE@python.inline@ f.write(contents)@@@", - "@@@STEP_LOG_END@python.inline@@@" - ] - }, { "cmd": [ "python", @@ -265,8 +238,6 @@ ], "cwd": "[CUSTOM_/_B_WORK]/skia", "env": { - "AWS_CREDENTIAL_FILE": "[SLAVE_BUILD]/tmp/.boto", - "BOTO_CONFIG": "[SLAVE_BUILD]/tmp/.boto", "CHROME_HEADLESS": "1", "PATH": "[DEPOT_TOOLS]:%(PATH)s" }, diff --git a/infra/bots/recipes/swarm_RecreateSKPs.py b/infra/bots/recipes/swarm_RecreateSKPs.py index 38fd8534b3..eb92484068 100644 --- a/infra/bots/recipes/swarm_RecreateSKPs.py +++ b/infra/bots/recipes/swarm_RecreateSKPs.py @@ -110,28 +110,6 @@ def RunSteps(api): ['ninja', '-C', out_dir, 'chrome'], cwd=src_dir) - # Download boto file (needed by recreate_skps.py) to tmp dir. - boto_file = api.path['slave_build'].join('tmp', '.boto') - api.python.inline( - 'download boto file', - """ -import os -import urllib2 - -BOTO_URL = 'http://metadata/computeMetadata/v1/project/attributes/boto-file' - -dest_path = '%s' -dest_dir = os.path.dirname(dest_path) -if not os.path.exists(dest_dir): - os.makedirs(dest_dir) - -req = urllib2.Request(BOTO_URL, headers={'Metadata-Flavor': 'Google'}) -contents = urllib2.urlopen(req).read() - -with open(dest_path, 'w') as f: - f.write(contents) - """ % boto_file) - # Clean up the output dir. output_dir = api.path['slave_build'].join('skp_output') if api.path.exists(output_dir): @@ -144,13 +122,6 @@ with open(dest_path, 'w') as f: 'CHROME_HEADLESS': '1', 'PATH': path_var, } - boto_env = { - 'AWS_CREDENTIAL_FILE': boto_file, - 'BOTO_CONFIG': boto_file, - } - recreate_skps_env = {} - recreate_skps_env.update(env) - recreate_skps_env.update(boto_env) asset_dir = api.vars.infrabots_dir.join('assets', 'skp') cmd = ['python', asset_dir.join('create.py'), '--chrome_src_path', src_dir, @@ -161,7 +132,7 @@ with open(dest_path, 'w') as f: api.step('Recreate SKPs', cmd=cmd, cwd=api.vars.skia_dir, - env=recreate_skps_env) + env=env) # Upload the SKPs. if 'Canary' not in api.properties['buildername']: -- cgit v1.2.3