From f11c6bcab87ee8927e23a23d2300900a0922616d Mon Sep 17 00:00:00 2001 From: laszlocsomor Date: Thu, 5 Jul 2018 01:58:06 -0700 Subject: python tools: ensure files are closed Use the "with" statement to open files in various Python scripts used by Bazel, to ensure these files are closed eagerly. See https://github.com/bazelbuild/bazel/issues/5512 RELNOTES: none PiperOrigin-RevId: 203346678 --- scripts/packages/debian/generate_changelog.py | 32 +++++++-------- src/test/py/bazel/bazel_windows_cpp_test.py | 12 +++--- src/test/py/bazel/cc_import_test.py | 7 ++-- src/test/shell/bazel/testing_server.py | 4 +- tools/build_defs/docker/create_image.py | 57 ++++++++++++++++++++------- tools/build_defs/docker/join_layers.py | 19 +++++++-- tools/j2objc/j2objc_wrapper.py | 9 ++--- tools/objc/j2objc_dead_code_pruner.py | 9 ++--- tools/objc/make_hashed_objlist.py | 39 +++++++++--------- 9 files changed, 114 insertions(+), 74 deletions(-) diff --git a/scripts/packages/debian/generate_changelog.py b/scripts/packages/debian/generate_changelog.py index f1945fbbe1..31f69644b6 100644 --- a/scripts/packages/debian/generate_changelog.py +++ b/scripts/packages/debian/generate_changelog.py @@ -19,23 +19,23 @@ import sys def main(input_file, output_file): - changelog_out = open(output_file, "w") - version = None - with open(input_file, "r") as status_file: - for line in status_file: - line = line.strip() - if line.startswith("RELEASE_NAME "): - version = line[len("RELEASE_NAME "):].strip() + with open(output_file, "w") as changelog_out: + version = None + with open(input_file, "r") as status_file: + for line in status_file: + line = line.strip() + if line.startswith("RELEASE_NAME "): + version = line[len("RELEASE_NAME "):].strip() - if version: - changelog_out.write("bazel (%s) unstable; urgency=low\n" % version) - changelog_out.write("\n Bumped Bazel version to %s.\n" % version) - else: - changelog_out.write("bazel (0.1.0~HEAD) unstable; urgency=low\n") - changelog_out.write("\n Development version\n") - changelog_out.write( - "\n -- The Bazel Authors %s\n\n" % - datetime.now().strftime("%a, %d %b %Y %H:%M:%S +0100")) + if version: + changelog_out.write("bazel (%s) unstable; urgency=low\n" % version) + changelog_out.write("\n Bumped Bazel version to %s.\n" % version) + else: + changelog_out.write("bazel (0.1.0~HEAD) unstable; urgency=low\n") + changelog_out.write("\n Development version\n") + changelog_out.write( + "\n -- The Bazel Authors %s\n\n" % + datetime.now().strftime("%a, %d %b %Y %H:%M:%S +0100")) if __name__ == "__main__": diff --git a/src/test/py/bazel/bazel_windows_cpp_test.py b/src/test/py/bazel/bazel_windows_cpp_test.py index 3d21929205..40fe50905c 100644 --- a/src/test/py/bazel/bazel_windows_cpp_test.py +++ b/src/test/py/bazel/bazel_windows_cpp_test.py @@ -436,14 +436,14 @@ class BazelWindowsCppTest(test_base.TestBase): self.AssertFileContentContains(def_file, 'hello_C') def AssertFileContentContains(self, file_path, entry): - f = open(file_path, 'r') - if entry not in f.read(): - self.fail('File "%s" does not contain "%s"' % (file_path, entry)) + with open(file_path, 'r') as f: + if entry not in f.read(): + self.fail('File "%s" does not contain "%s"' % (file_path, entry)) def AssertFileContentNotContains(self, file_path, entry): - f = open(file_path, 'r') - if entry in f.read(): - self.fail('File "%s" does contain "%s"' % (file_path, entry)) + with open(file_path, 'r') as f: + if entry in f.read(): + self.fail('File "%s" does contain "%s"' % (file_path, entry)) def testWinDefFileAttribute(self): self.ScratchFile('WORKSPACE') diff --git a/src/test/py/bazel/cc_import_test.py b/src/test/py/bazel/cc_import_test.py index 99ab086a28..b20479904d 100644 --- a/src/test/py/bazel/cc_import_test.py +++ b/src/test/py/bazel/cc_import_test.py @@ -247,10 +247,9 @@ class CcImportTest(test_base.TestBase): ''.join(stderr)) def AssertFileContentContains(self, file_path, entry): - f = open(file_path, 'r') - if entry not in f.read(): - self.fail('File "%s" does not contain "%s"' % (file_path, entry)) - f.close() + with open(file_path, 'r') as f: + if entry not in f.read(): + self.fail('File "%s" does not contain "%s"' % (file_path, entry)) if __name__ == '__main__': diff --git a/src/test/shell/bazel/testing_server.py b/src/test/shell/bazel/testing_server.py index 66906ad0d5..583e3ce86d 100644 --- a/src/test/shell/bazel/testing_server.py +++ b/src/test/shell/bazel/testing_server.py @@ -62,8 +62,8 @@ class Handler(BaseHTTPRequestHandler): self.wfile.write('not authenticated') def serve_file(self): - file_to_serve = open(str(os.getcwd()) + self.path) - self.wfile.write(file_to_serve.read()) + with open(str(os.getcwd()) + self.path) as file_to_serve: + self.wfile.write(file_to_serve.read()) if __name__ == '__main__': diff --git a/tools/build_defs/docker/create_image.py b/tools/build_defs/docker/create_image.py index 5dda217765..a5faf9f516 100644 --- a/tools/build_defs/docker/create_image.py +++ b/tools/build_defs/docker/create_image.py @@ -75,21 +75,21 @@ def _base_name_filter(name): return all([not name.endswith(s) for s in filter_names]) -def create_image(output, - identifier, - layers, - config, - tags=None, - base=None, - legacy_base=None, - metadata_id=None, - metadata=None, - name=None, - repository=None): +def _create_image(tar, + identifier, + layers, + config, + tags=None, + base=None, + legacy_base=None, + metadata_id=None, + metadata=None, + name=None, + repository=None): """Creates a Docker image. Args: - output: the name of the docker image file to create. + tar: archive.TarFileWriter object for the docker image file to create. identifier: the identifier for this image (sha256 of the metadata). layers: the layer content (a sha256 and a tar file). config: the configuration file for the image. @@ -101,8 +101,6 @@ def create_image(output, name: symbolic name for this docker image. repository: repository name for this docker image. """ - tar = archive.TarFileWriter(output) - # add the image config referenced by the Config section in the manifest # the name can be anything but docker uses the format below config_file_name = identifier + '.json' @@ -184,6 +182,37 @@ def create_image(output, '}'])) +def create_image(output, + identifier, + layers, + config, + tags=None, + base=None, + legacy_base=None, + metadata_id=None, + metadata=None, + name=None, + repository=None): + """Creates a Docker image. + + Args: + output: the name of the docker image file to create. + identifier: the identifier for this image (sha256 of the metadata). + layers: the layer content (a sha256 and a tar file). + config: the configuration file for the image. + tags: tags that apply to this image. + base: a base layer (optional) to build on top of. + legacy_base: a base layer (optional) to build on top of. + metadata_id: the identifier of the top layer for this image. + metadata: the json metadata file for the top layer. + name: symbolic name for this docker image. + repository: repository name for this docker image. + """ + with archive.TarFileWriter(output) as tar: + _create_image(tar, identifier, layers, config, tags, base, legacy_base, + metadata_id, metadata, name, repository) + + # Main program to create a docker image. It expect to be run with: # create_image --output=output_file \ # --id=@identifier \ diff --git a/tools/build_defs/docker/join_layers.py b/tools/build_defs/docker/join_layers.py index 0928aef70d..039583d6dc 100644 --- a/tools/build_defs/docker/join_layers.py +++ b/tools/build_defs/docker/join_layers.py @@ -60,11 +60,11 @@ def _add_top(tar, repositories): tar.add_file('top', content=layer_id) -def create_image(output, layers, repositories=None): +def _create_image(tar, layers, repositories=None): """Creates a Docker image from a list of layers. Args: - output: the name of the docker image file to create. + tar: archive.TarFileWriter object for the docker image file to create. layers: the layers (tar files) to join to the image. repositories: the repositories two-level dictionary, which is keyed by repo names at the top-level, and tag names at the second @@ -82,7 +82,6 @@ def create_image(output, layers, repositories=None): layers_to_tag[layer_name] = layer_tags manifests = [] - tar = archive.TarFileWriter(output) for layer in layers: tar.add_tar(layer, name_filter=_layer_filter) layer_manifests = utils.GetManifestFromTar(layer) @@ -120,6 +119,20 @@ def create_image(output, layers, repositories=None): content=json.dumps(repositories, sort_keys=True)) +def create_image(output, layers, repositories=None): + """Creates a Docker image from a list of layers. + + Args: + output: the name of the docker image file to create. + layers: the layers (tar files) to join to the image. + repositories: the repositories two-level dictionary, which is keyed by + repo names at the top-level, and tag names at the second + level pointing to layer ids. + """ + with archive.TarFileWriter(output) as tar: + _create_image(tar, layers, repositories) + + def resolve_layer(identifier): if not identifier: # TODO(mattmoor): This should not happen. diff --git a/tools/j2objc/j2objc_wrapper.py b/tools/j2objc/j2objc_wrapper.py index bc142d4656..339f092446 100755 --- a/tools/j2objc/j2objc_wrapper.py +++ b/tools/j2objc/j2objc_wrapper.py @@ -125,11 +125,10 @@ def WriteDepMappingFile(objc_files, entry_file, deps = output_dep_mapping_queue.get() dep_mapping[entry_file] = deps - f = file_open(output_dependency_mapping_file, 'w') - for entry in sorted(dep_mapping): - for dep in dep_mapping[entry]: - f.write(entry + ':' + dep + '\n') - f.close() + with file_open(output_dependency_mapping_file, 'w') as f: + for entry in sorted(dep_mapping): + for dep in dep_mapping[entry]: + f.write(entry + ':' + dep + '\n') def _ReadDepMapping(input_file_queue, output_dep_mapping_queue, diff --git a/tools/objc/j2objc_dead_code_pruner.py b/tools/objc/j2objc_dead_code_pruner.py index d2f52a4dba..54b8bedc66 100755 --- a/tools/objc/j2objc_dead_code_pruner.py +++ b/tools/objc/j2objc_dead_code_pruner.py @@ -200,11 +200,10 @@ def _PruneFile(file_queue, reachable_files, objc_file_path, file_open=open, if file_name in reachable_files: file_shutil.copy(input_file, output_file) else: - f = file_open(output_file, 'w') - # Use a static variable scoped to the source file to supress - # the "has no symbols" linker warning for empty object files. - f.write(PRUNED_SRC_CONTENT) - f.close() + with file_open(output_file, 'w') as f: + # Use a static variable scoped to the source file to supress + # the "has no symbols" linker warning for empty object files. + f.write(PRUNED_SRC_CONTENT) file_queue.task_done() diff --git a/tools/objc/make_hashed_objlist.py b/tools/objc/make_hashed_objlist.py index 85daeb65ba..9dfd54abcf 100644 --- a/tools/objc/make_hashed_objlist.py +++ b/tools/objc/make_hashed_objlist.py @@ -15,12 +15,14 @@ """Creates symbolic links for .o files with hashcode. -This script reads the file list containing the input files, creates symbolic -links with a path-hash appended to their original name (foo.o becomes -foo_{md5sum}.o), then saves the list of symbolic links to another file. - -This is to circumvent a bug in the original libtool that arises when two input -files have the same base name (even if they are in different directories). +This script reads the file list containing the input files, creates +symbolic links with a path-hash appended to their original name (foo.o +becomes foo_{md5sum}.o), then saves the list of symbolic links to another +file. + +This is to circumvent a bug in the original libtool that arises when two +input files have the same base name (even if they are in different +directories). """ import hashlib @@ -29,22 +31,21 @@ import sys def main(): - obj_file_list = open(sys.argv[1]) - hashed_obj_file_list = open(sys.argv[2], 'w') - - for line in obj_file_list: - obj_file_path = line.rstrip('\n') - hashed_obj_file_path = '%s_%s.o' % ( - os.path.splitext(obj_file_path)[0], - hashlib.md5(obj_file_path.encode('utf-8')).hexdigest()) + with open(sys.argv[1]) as obj_file_list: + with open(sys.argv[2], 'w') as hashed_obj_file_list: + for line in obj_file_list: + obj_file_path = line.rstrip('\n') + hashed_obj_file_path = '%s_%s.o' % ( + os.path.splitext(obj_file_path)[0], + hashlib.md5(obj_file_path.encode('utf-8')).hexdigest()) - hashed_obj_file_list.write(hashed_obj_file_path + '\n') + hashed_obj_file_list.write(hashed_obj_file_path + '\n') - # Create symlink only if the symlink doesn't exist. - if not os.path.exists(hashed_obj_file_path): - os.symlink(os.path.basename(obj_file_path), hashed_obj_file_path) + # Create symlink only if the symlink doesn't exist. + if not os.path.exists(hashed_obj_file_path): + os.symlink(os.path.basename(obj_file_path), + hashed_obj_file_path) - hashed_obj_file_list.close() if __name__ == '__main__': main() -- cgit v1.2.3