diff options
author | Han-Wen Nienhuys <hanwen@google.com> | 2015-02-25 16:45:20 +0100 |
---|---|---|
committer | Han-Wen Nienhuys <hanwen@google.com> | 2015-02-25 16:45:20 +0100 |
commit | d08b27fa9701fecfdb69e1b0d1ac2459efc2129b (patch) | |
tree | 5d50963026239ca5aebfb47ea5b8db7e814e57c8 /src/java_tools/singlejar |
Update from Google.
--
MOE_MIGRATED_REVID=85702957
Diffstat (limited to 'src/java_tools/singlejar')
28 files changed, 5681 insertions, 0 deletions
diff --git a/src/java_tools/singlejar/BUILD b/src/java_tools/singlejar/BUILD new file mode 100644 index 0000000000..463c5063b1 --- /dev/null +++ b/src/java_tools/singlejar/BUILD @@ -0,0 +1,32 @@ +package(default_visibility = ["//src:__pkg__"]) + +java_library( + name = "libSingleJar", + srcs = glob(["java/**/*.java"]), + deps = [ + "//src/main/java:shell", + "//third_party:guava", + "//third_party:jsr305", + ], +) + +java_binary( + name = "SingleJar", + main_class = "com.google.devtools.build.singlejar.SingleJar", + runtime_deps = [":libSingleJar"], +) + +java_test( + name = "tests", + srcs = glob(["javatests/**/*.java"]), + args = ["com.google.devtools.build.singlejar.SingleJarTests"], + deps = [ + ":libSingleJar", + "//src/main/java:shell", + "//src/test/java:testutil", + "//third_party:guava", + "//third_party:jsr305", + "//third_party:junit4", + "//third_party:truth", + ], +) diff --git a/src/java_tools/singlejar/README b/src/java_tools/singlejar/README new file mode 100644 index 0000000000..da92eb7a9b --- /dev/null +++ b/src/java_tools/singlejar/README @@ -0,0 +1,2 @@ +SingleJar is a tool used to combine multiple jar file into a single one. It is used by Bazel to +build java binaries that are self-contained.
\ No newline at end of file diff --git a/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/ConcatenateStrategy.java b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/ConcatenateStrategy.java new file mode 100644 index 0000000000..7dfb31fb1c --- /dev/null +++ b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/ConcatenateStrategy.java @@ -0,0 +1,74 @@ +// Copyright 2014 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import com.google.devtools.build.singlejar.ZipEntryFilter.CustomMergeStrategy; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +import javax.annotation.concurrent.NotThreadSafe; + +/** + * A strategy that merges a set of files by concatenating them. This is used + * for services files. By default, this class automatically adds a newline + * character {@code '\n'} between files if the previous file did not end with one. + * + * <p>Note: automatically inserting newline characters differs from the + * original behavior. Use {@link #ConcatenateStrategy(boolean)} to turn this + * behavior off. + */ +@NotThreadSafe +public final class ConcatenateStrategy implements CustomMergeStrategy { + + // The strategy assumes that files are generally small. This is a first guess + // about the size of the files. + private static final int BUFFER_SIZE = 4096; + + private final byte[] buffer = new byte[BUFFER_SIZE]; + private byte lastByteCopied = '\n'; + private final boolean appendNewLine; + + ConcatenateStrategy() { + this(true); + } + + /** + * @param appendNewLine Whether to add a newline character between files if + * the previous file did not end with one. + */ + ConcatenateStrategy(boolean appendNewLine) { + this.appendNewLine = appendNewLine; + } + + @Override + public void merge(InputStream in, OutputStream out) throws IOException { + if (appendNewLine && lastByteCopied != '\n') { + out.write('\n'); + lastByteCopied = '\n'; + } + int bytesRead; + while ((bytesRead = in.read(buffer)) != -1) { + out.write(buffer, 0, bytesRead); + lastByteCopied = buffer[bytesRead - 1]; + } + } + + @Override + public void finish(OutputStream out) { + // No need to do anything. All the data was already written. + } +} diff --git a/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/CopyEntryFilter.java b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/CopyEntryFilter.java new file mode 100644 index 0000000000..586c378d64 --- /dev/null +++ b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/CopyEntryFilter.java @@ -0,0 +1,33 @@ +// Copyright 2014 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import java.io.IOException; + +import javax.annotation.concurrent.Immutable; + +/** + * A filter which invokes {@link StrategyCallback#copy} for every entry. As a + * result, the first entry for every given name is copied and further entries + * with the same name are skipped. + */ +@Immutable +public final class CopyEntryFilter implements ZipEntryFilter { + + @Override + public void accept(String filename, StrategyCallback callback) throws IOException { + callback.copy(null); + } +} diff --git a/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/DefaultJarEntryFilter.java b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/DefaultJarEntryFilter.java new file mode 100644 index 0000000000..fd26c60986 --- /dev/null +++ b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/DefaultJarEntryFilter.java @@ -0,0 +1,119 @@ +// Copyright 2014 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import java.io.IOException; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.jar.JarFile; + +import javax.annotation.concurrent.Immutable; + +/** + * A default filter for JAR files. It merges all services files in the {@code META-INF/services/} + * directory. The original {@code MANIFEST} files are skipped, as are JAR signing files. Anything + * not in the supplied path filter, an arbitrary predicate, is also skipped. To use this filter + * properly, a new {@code MANIFEST} file should be explicitly added to the combined ZIP file. + */ +@Immutable +public class DefaultJarEntryFilter implements ZipEntryFilter { + + /** An interface to restrict which files are copied over and which are not. */ + public static interface PathFilter { + /** + * Returns true if an entry with the given name may be copied over. + */ + boolean allowed(String path); + } + + /** A filter that allows any path. */ + public static final PathFilter ANY_PATH = new PathFilter() { + @Override + public boolean allowed(String path) { + return true; + } + }; + + // ZIP timestamps have a resolution of 2 seconds, so this is the next timestamp after 1/1/1980. + // This is only Visible for testing. + static final Date DOS_EPOCH_PLUS_2_SECONDS = + new GregorianCalendar(1980, 0, 1, 0, 0, 2).getTime(); + + // Merge all files with a name in here: + private static final String SERVICES_DIR = "META-INF/services/"; + + // Merge all spring.handlers files. + private static final String SPRING_HANDLERS = "META-INF/spring.handlers"; + + // Merge all spring.schemas files. + private static final String SPRING_SCHEMAS = "META-INF/spring.schemas"; + + // Ignore all files with this name: + private static final String MANIFEST_NAME = JarFile.MANIFEST_NAME; + + // Merge all protobuf extension registries. + private static final String PROTOBUF_META = "protobuf.meta"; + + protected final Date date; + protected final Date classDate; + protected PathFilter allowedPaths; + + public DefaultJarEntryFilter(boolean normalize, PathFilter allowedPaths) { + this.date = normalize ? ZipCombiner.DOS_EPOCH : null; + this.classDate = normalize ? DOS_EPOCH_PLUS_2_SECONDS : null; + this.allowedPaths = allowedPaths; + } + + public DefaultJarEntryFilter(boolean normalize) { + this(normalize, ANY_PATH); + } + + public DefaultJarEntryFilter() { + this(true); + } + + @Override + public void accept(String filename, StrategyCallback callback) throws IOException { + if (!allowedPaths.allowed(filename)) { + callback.skip(); + } else if (filename.equals(SPRING_HANDLERS)) { + callback.customMerge(date, new ConcatenateStrategy()); + } else if (filename.equals(SPRING_SCHEMAS)) { + callback.customMerge(date, new ConcatenateStrategy()); + } else if (filename.startsWith(SERVICES_DIR)) { + // Merge all services files. + callback.customMerge(date, new ConcatenateStrategy()); + } else if (filename.equals(MANIFEST_NAME) || filename.endsWith(".SF") + || filename.endsWith(".DSA") || filename.endsWith(".RSA")) { + // Ignore existing manifests and any .SF, .DSA or .RSA jar signing files. + // TODO(bazel-team): I think we should be stricter and only skip signing + // files from the META-INF/ directory. + callback.skip(); + } else if (filename.endsWith(".class")) { + // Copy .class files over, but 2 seconds ahead of the dos epoch. If it finds both source and + // class files on the classpath, javac prefers the source file, if the class file is not newer + // than the source file. Since we normalize the timestamps, we need to provide timestamps for + // class files that are newer than those for the corresponding source files. + callback.copy(classDate); + } else if (filename.equals(PROTOBUF_META)) { + // Merge all protobuf meta data without inserting newlines, + // since the file is in protobuf binary format. + callback.customMerge(date, new ConcatenateStrategy(false)); + } else { + // Copy all other files over. + callback.copy(date); + } + } +} diff --git a/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/ExtraData.java b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/ExtraData.java new file mode 100644 index 0000000000..2e8cb75e02 --- /dev/null +++ b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/ExtraData.java @@ -0,0 +1,41 @@ +// Copyright 2014 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +/** + * A holder class for extra data in a ZIP entry. + * + * <p>Note: This class performs no defensive copying of the byte array, so the + * byte array passed into this class or returned from this class may not be + * modified. + */ +final class ExtraData { + + private final short id; + private final byte[] data; + + public ExtraData(short id, byte[] data) { + this.id = id; + this.data = data; + } + + public short getId() { + return id; + } + + public byte[] getData() { + return data; + } +} diff --git a/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/JarUtils.java b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/JarUtils.java new file mode 100644 index 0000000000..a9c8ee3f65 --- /dev/null +++ b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/JarUtils.java @@ -0,0 +1,39 @@ +// Copyright 2014 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import java.io.IOException; +import java.util.Date; + +/** + * Provides utilities for using ZipCombiner to pack up Jar files. + */ +public final class JarUtils { + private static final String MANIFEST_DIRECTORY = "META-INF/"; + private static final short MAGIC_JAR_ID = (short) 0xCAFE; + private static final ExtraData[] MAGIC_JAR_ID_EXTRA_ENTRIES = + new ExtraData[] { new ExtraData(MAGIC_JAR_ID, new byte[0]) }; + + /** + * Adds META-INF directory through ZipCombiner with the given date and the + * magic jar ID. + * + * @throws IOException if {@link ZipCombiner#addDirectory(String, Date, ExtraData[])} + * throws an IOException. + */ + public static void addMetaInf(ZipCombiner combiner, Date date) throws IOException { + combiner.addDirectory(MANIFEST_DIRECTORY, date, MAGIC_JAR_ID_EXTRA_ENTRIES); + } +} diff --git a/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/JavaIoFileSystem.java b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/JavaIoFileSystem.java new file mode 100644 index 0000000000..0da6e33040 --- /dev/null +++ b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/JavaIoFileSystem.java @@ -0,0 +1,43 @@ +// Copyright 2014 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +/** + * An implementation based on java.io. + */ +public final class JavaIoFileSystem implements SimpleFileSystem { + + @Override + public InputStream getInputStream(String filename) throws IOException { + return new FileInputStream(filename); + } + + @Override + public OutputStream getOutputStream(String filename) throws IOException { + return new FileOutputStream(filename); + } + + @Override + public boolean delete(String filename) { + return new File(filename).delete(); + } +}
\ No newline at end of file diff --git a/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/OptionFileExpander.java b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/OptionFileExpander.java new file mode 100644 index 0000000000..dafeb9d1d4 --- /dev/null +++ b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/OptionFileExpander.java @@ -0,0 +1,127 @@ +// Copyright 2014 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import static java.nio.charset.StandardCharsets.ISO_8859_1; + +import com.google.devtools.build.lib.shell.ShellUtils; +import com.google.devtools.build.lib.shell.ShellUtils.TokenizationException; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.util.ArrayList; +import java.util.List; + +import javax.annotation.concurrent.Immutable; + +/** + * A utility class to parse option files and expand them. + */ +@Immutable +final class OptionFileExpander { + + /** + * An interface that allows injecting different implementations for reading + * files. This is mostly used for testing. + */ + interface OptionFileProvider { + + /** + * Opens a file for reading and returns an input stream. + */ + InputStream getInputStream(String filename) throws IOException; + } + + private final OptionFileProvider fileSystem; + + /** + * Creates an instance with the given option file provider. + */ + public OptionFileExpander(OptionFileProvider fileSystem) { + this.fileSystem = fileSystem; + } + + /** + * Pre-processes an argument list, expanding options of the form &at;filename + * to read in the content of the file and add it to the list of arguments. + * + * @param args the List of arguments to pre-process. + * @return the List of pre-processed arguments. + * @throws IOException if one of the files containing options cannot be read. + */ + public List<String> expandArguments(List<String> args) throws IOException { + List<String> expanded = new ArrayList<>(args.size()); + for (String arg : args) { + expandArgument(arg, expanded); + } + return expanded; + } + + /** + * Expands a single argument, expanding options &at;filename to read in + * the content of the file and add it to the list of processed arguments. + * + * @param arg the argument to pre-process. + * @param expanded the List of pre-processed arguments. + * @throws IOException if one of the files containing options cannot be read. + */ + private void expandArgument(String arg, List<String> expanded) throws IOException { + if (arg.startsWith("@")) { + InputStream in = fileSystem.getInputStream(arg.substring(1)); + try { + // TODO(bazel-team): This code doesn't handle escaped newlines correctly. + // ShellUtils doesn't support them either. + for (String line : readAllLines(new InputStreamReader(in, ISO_8859_1))) { + List<String> parsedTokens = new ArrayList<>(); + try { + ShellUtils.tokenize(parsedTokens, line); + } catch (TokenizationException e) { + throw new IOException("Could not tokenize parameter file!", e); + } + for (String token : parsedTokens) { + expandArgument(token, expanded); + } + } + InputStream inToClose = in; + in = null; + inToClose.close(); + } finally { + if (in != null) { + try { + in.close(); + } catch (IOException e) { + // Ignore the exception. It can only occur if an exception already + // happened and in that case, we want to preserve the original one. + } + } + } + } else { + expanded.add(arg); + } + } + + private List<String> readAllLines(Reader in) throws IOException { + List<String> result = new ArrayList<>(); + BufferedReader reader = new BufferedReader(in); + String line; + while ((line = reader.readLine()) != null) { + result.add(line); + } + return result; + } +} diff --git a/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/PrefixListPathFilter.java b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/PrefixListPathFilter.java new file mode 100644 index 0000000000..a6e30d4478 --- /dev/null +++ b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/PrefixListPathFilter.java @@ -0,0 +1,40 @@ +// Copyright 2014 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import com.google.devtools.build.singlejar.DefaultJarEntryFilter.PathFilter; + +import java.util.List; + +/** + * A predicate used to filter jar entries according to a list of path prefixes. + */ +final class PrefixListPathFilter implements PathFilter { + private final List<String> prefixes; + + public PrefixListPathFilter(List<String> prefixes) { + this.prefixes = prefixes; + } + + @Override + public boolean allowed(String path) { + for (String prefix : prefixes) { + if (path.startsWith(prefix)) { + return true; + } + } + return false; + } +}
\ No newline at end of file diff --git a/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/SimpleFileSystem.java b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/SimpleFileSystem.java new file mode 100644 index 0000000000..844f12b714 --- /dev/null +++ b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/SimpleFileSystem.java @@ -0,0 +1,43 @@ +// Copyright 2014 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import com.google.devtools.build.singlejar.OptionFileExpander.OptionFileProvider; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +/** + * A simple virtual file system interface. It's much simpler than the Blaze + * virtual file system and only to be used inside this package. + */ +public interface SimpleFileSystem extends OptionFileProvider { + + @Override + InputStream getInputStream(String filename) throws IOException; + + /** + * Opens a file for output and returns an output stream. If a file of that + * name already exists, it is overwritten. + */ + OutputStream getOutputStream(String filename) throws IOException; + + /** + * Delete the file with the given name and return whether deleting it was + * successfull. + */ + boolean delete(String filename); +}
\ No newline at end of file diff --git a/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/SingleJar.java b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/SingleJar.java new file mode 100644 index 0000000000..4551fd1813 --- /dev/null +++ b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/SingleJar.java @@ -0,0 +1,401 @@ +// Copyright 2014 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import com.google.devtools.build.singlejar.DefaultJarEntryFilter.PathFilter; +import com.google.devtools.build.singlejar.ZipCombiner.OutputMode; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.Properties; +import java.util.jar.Attributes; +import java.util.jar.JarFile; +import java.util.jar.Manifest; + +import javax.annotation.concurrent.NotThreadSafe; + +/** + * An application that emulates the existing SingleJar tool, using the {@link + * ZipCombiner} class. + */ +@NotThreadSafe +public class SingleJar { + + private static final byte NEWLINE_BYTE = (byte) '\n'; + private static final String MANIFEST_FILENAME = JarFile.MANIFEST_NAME; + private static final String BUILD_DATA_FILENAME = "build-data.properties"; + + private final SimpleFileSystem fileSystem; + + /** The input jar files we want to combine into the output jar. */ + private final List<String> inputJars = new ArrayList<>(); + + /** Additional resources to be added to the output jar. */ + private final List<String> resources = new ArrayList<>(); + + /** Additional class path resources to be added to the output jar. */ + private final List<String> classpathResources = new ArrayList<>(); + + /** The name of the output Jar file. */ + private String outputJar; + + /** A filter for what jar entries to include */ + private PathFilter allowedPaths = DefaultJarEntryFilter.ANY_PATH; + + /** Extra manifest contents. */ + private String extraManifestContent; + /** The main class - this is put into the manifest and also into the build info. */ + private String mainClass; + + /** + * Warn about duplicate resource files, and skip them. Default behavior is to + * give an error message. + */ + private boolean warnDuplicateFiles = false; + + /** Indicates whether to set all timestamps to a fixed value. */ + private boolean normalize = false; + private OutputMode outputMode = OutputMode.FORCE_STORED; + + /** Whether to include build-data.properties file */ + protected boolean includeBuildData = true; + + /** List of build information properties files */ + protected List<String> buildInformationFiles = new ArrayList<String>(); + + /** Extraneous build informations (key=value) */ + protected List<String> buildInformations = new ArrayList<String>(); + + /** The (optional) native executable that will be prepended to this JAR. */ + private String launcherBin = null; + + // Only visible for testing. + protected SingleJar(SimpleFileSystem fileSystem) { + this.fileSystem = fileSystem; + } + + /** + * Creates a manifest and returns an input stream for its contents. + */ + private InputStream createManifest() throws IOException { + Manifest manifest = new Manifest(); + Attributes attributes = manifest.getMainAttributes(); + attributes.put(Attributes.Name.MANIFEST_VERSION, "1.0"); + attributes.put(new Attributes.Name("Created-By"), "blaze-singlejar"); + if (mainClass != null) { + attributes.put(Attributes.Name.MAIN_CLASS, mainClass); + } + if (extraManifestContent != null) { + ByteArrayInputStream in = new ByteArrayInputStream(extraManifestContent.getBytes("UTF8")); + manifest.read(in); + } + ByteArrayOutputStream out = new ByteArrayOutputStream(); + manifest.write(out); + return new ByteArrayInputStream(out.toByteArray()); + } + + private InputStream createBuildData() throws IOException { + Properties properties = mergeBuildData(); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + properties.store(outputStream, null); + byte[] output = outputStream.toByteArray(); + // Properties#store() adds a timestamp comment as first line, delete it. + output = stripFirstLine(output); + return new ByteArrayInputStream(output); + } + + static byte[] stripFirstLine(byte[] output) { + int i = 0; + while (i < output.length && output[i] != NEWLINE_BYTE) { + i++; + } + if (i < output.length) { + output = Arrays.copyOfRange(output, i + 1, output.length); + } else { + output = new byte[0]; + } + return output; + } + + private Properties mergeBuildData() throws IOException { + Properties properties = new Properties(); + for (String fileName : buildInformationFiles) { + InputStream file = fileSystem.getInputStream(fileName); + if (file != null) { + properties.load(file); + } + } + + // extra properties + for (String info : buildInformations) { + String[] split = info.split("=", 2); + String key = split[0]; + String value = ""; + if (split.length > 1) { + value = split[1]; + } + properties.put(key, value); + } + + // finally add generic information + // TODO(bazel-team) do we need to resolve the path to be absolute or canonical? + properties.put("build.target", outputJar); + if (mainClass != null) { + properties.put("main.class", mainClass); + } + return properties; + } + + private String getName(String filename) { + int index = filename.lastIndexOf('/'); + return index < 0 ? filename : filename.substring(index + 1); + } + + // Only visible for testing. + protected int run(List<String> args) throws IOException { + List<String> expandedArgs = new OptionFileExpander(fileSystem).expandArguments(args); + processCommandlineArgs(expandedArgs); + InputStream buildInfo = createBuildData(); + + ZipCombiner combiner = null; + try { + combiner = new ZipCombiner(outputMode, createEntryFilter(normalize, allowedPaths), + fileSystem.getOutputStream(outputJar)); + if (launcherBin != null) { + combiner.prependExecutable(fileSystem.getInputStream(launcherBin)); + } + Date date = normalize ? ZipCombiner.DOS_EPOCH : null; + + // Add a manifest file. + JarUtils.addMetaInf(combiner, date); + combiner.addFile(MANIFEST_FILENAME, date, createManifest()); + + if (includeBuildData) { + // Add the build data file. + combiner.addFile(BUILD_DATA_FILENAME, date, buildInfo); + } + + // Copy the resources to the top level of the jar file. + for (String classpathResource : classpathResources) { + String entryName = getName(classpathResource); + if (warnDuplicateFiles && combiner.containsFile(entryName)) { + System.err.println("File " + entryName + " clashes with a previous file"); + continue; + } + combiner.addFile(entryName, date, fileSystem.getInputStream(classpathResource)); + } + + // Copy the resources into the jar file. + for (String resource : resources) { + String from, to; + int i = resource.indexOf(':'); + if (i < 0) { + to = from = resource; + } else { + from = resource.substring(0, i); + to = resource.substring(i + 1); + } + if (warnDuplicateFiles && combiner.containsFile(to)) { + System.err.println("File " + from + " at " + to + " clashes with a previous file"); + continue; + } + combiner.addFile(to, date, fileSystem.getInputStream(from)); + } + + // Copy the jars into the jar file. + for (String inputJar : inputJars) { + InputStream in = fileSystem.getInputStream(inputJar); + try { + combiner.addZip(inputJar, in); + InputStream inToClose = in; + in = null; + inToClose.close(); + } finally { + if (in != null) { + try { + in.close(); + } catch (IOException e) { + // Preserve original exception. + } + } + } + } + + // Close the output file. If something goes wrong here, delete the file. + combiner.close(); + combiner = null; + } finally { + // This part is only executed if an exception occurred. + if (combiner != null) { + try { + // We may end up calling close twice, but that's ok. + combiner.close(); + } catch (IOException e) { + // There's already an exception in progress - this won't add any + // additional information. + } + // Ignore return value - there's already an exception in progress. + fileSystem.delete(outputJar); + } + } + return 0; + } + + protected ZipEntryFilter createEntryFilter(boolean normalize, PathFilter allowedPaths) { + return new DefaultJarEntryFilter(normalize, allowedPaths); + } + + /** + * Collects the arguments for a command line flag until it finds a flag that + * starts with the terminatorPrefix. + * + * @param args + * @param startIndex the start index in the args to collect the flag arguments + * from + * @param flagArguments the collected flag arguments + * @param terminatorPrefix the terminator prefix to stop collecting of + * argument flags + * @return the index of the first argument that started with the + * terminatorPrefix + */ + private static int collectFlagArguments(List<String> args, int startIndex, + List<String> flagArguments, String terminatorPrefix) { + startIndex++; + while (startIndex < args.size()) { + String name = args.get(startIndex); + if (name.startsWith(terminatorPrefix)) { + return startIndex - 1; + } + flagArguments.add(name); + startIndex++; + } + return startIndex; + } + + /** + * Returns a single argument for a command line option. + * + * @throws IOException if no more arguments are available + */ + private static String getArgument(List<String> args, int i, String arg) throws IOException { + if (i + 1 < args.size()) { + return args.get(i + 1); + } + throw new IOException(arg + ": missing argument"); + } + + /** + * Processes the command line arguments. + * + * @throws IOException if one of the files containing options cannot be read + */ + protected void processCommandlineArgs(List<String> args) throws IOException { + List<String> manifestLines = new ArrayList<>(); + List<String> prefixes = new ArrayList<>(); + for (int i = 0; i < args.size(); i++) { + String arg = args.get(i); + if (arg.equals("--sources")) { + i = collectFlagArguments(args, i, inputJars, "--"); + } else if (arg.equals("--resources")) { + i = collectFlagArguments(args, i, resources, "--"); + } else if (arg.equals("--classpath_resources")) { + i = collectFlagArguments(args, i, classpathResources, "--"); + } else if (arg.equals("--deploy_manifest_lines")) { + i = collectFlagArguments(args, i, manifestLines, "--"); + } else if (arg.equals("--build_info_file")) { + buildInformationFiles.add(getArgument(args, i, arg)); + i++; + } else if (arg.equals("--extra_build_info")) { + buildInformations.add(getArgument(args, i, arg)); + i++; + } else if (arg.equals("--main_class")) { + mainClass = getArgument(args, i, arg); + i++; + } else if (arg.equals("--output")) { + outputJar = getArgument(args, i, arg); + i++; + } else if (arg.equals("--compression")) { + outputMode = OutputMode.FORCE_DEFLATE; + } else if (arg.equals("--dont_change_compression")) { + outputMode = OutputMode.DONT_CARE; + } else if (arg.equals("--normalize")) { + normalize = true; + } else if (arg.equals("--include_prefixes")) { + i = collectFlagArguments(args, i, prefixes, "--"); + } else if (arg.equals("--exclude_build_data")) { + includeBuildData = false; + } else if (arg.equals("--warn_duplicate_resources")) { + warnDuplicateFiles = true; + } else if (arg.equals("--java_launcher")) { + launcherBin = getArgument(args, i, arg); + i++; + } else { + throw new IOException("unknown option : '" + arg + "'"); + } + } + if (!manifestLines.isEmpty()) { + setExtraManifestContent(joinWithNewlines(manifestLines)); + } + if (!prefixes.isEmpty()) { + setPathPrefixes(prefixes); + } + } + + private String joinWithNewlines(Iterable<String> lines) { + StringBuilder result = new StringBuilder(); + Iterator<String> it = lines.iterator(); + if (it.hasNext()) { + result.append(it.next()); + } + while (it.hasNext()) { + result.append('\n'); + result.append(it.next()); + } + return result.toString(); + } + + private void setExtraManifestContent(String extraManifestContent) { + // The manifest content has to be terminated with a newline character + if (!extraManifestContent.endsWith("\n")) { + extraManifestContent = extraManifestContent + '\n'; + } + this.extraManifestContent = extraManifestContent; + } + + private void setPathPrefixes(List<String> prefixes) throws IOException { + if (prefixes.isEmpty()) { + throw new IOException( + "Empty set of path prefixes; cowardly refusing to emit an empty jar file"); + } + allowedPaths = new PrefixListPathFilter(prefixes); + } + + public static void main(String[] args) { + try { + SingleJar singlejar = new SingleJar(new JavaIoFileSystem()); + System.exit(singlejar.run(Arrays.asList(args))); + } catch (IOException e) { + System.err.println("SingleJar threw exception : " + e.getMessage()); + System.exit(1); + } + } +} diff --git a/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/ZipCombiner.java b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/ZipCombiner.java new file mode 100644 index 0000000000..d38c6d4493 --- /dev/null +++ b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/ZipCombiner.java @@ -0,0 +1,1643 @@ +// Copyright 2014 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import static java.nio.charset.StandardCharsets.ISO_8859_1; +import static java.nio.charset.StandardCharsets.UTF_8; + +import com.google.devtools.build.singlejar.ZipEntryFilter.CustomMergeStrategy; +import com.google.devtools.build.singlejar.ZipEntryFilter.StrategyCallback; + +import java.io.BufferedOutputStream; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.EOFException; +import java.io.FilterOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Calendar; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.zip.CRC32; +import java.util.zip.DataFormatException; +import java.util.zip.Deflater; +import java.util.zip.Inflater; + +import javax.annotation.Nullable; +import javax.annotation.concurrent.NotThreadSafe; + +/** + * An object that combines multiple ZIP files into a single file. It only + * supports a subset of the ZIP format, specifically: + * <ul> + * <li>It only supports STORE and DEFLATE storage methods.</li> + * <li>There may be no data before the first file or between files.</li> + * <li>It ignores any data after the last file.</li> + * </ul> + * + * <p>These restrictions are also present in the JDK implementations + * {@link java.util.jar.JarInputStream}, {@link java.util.zip.ZipInputStream}, + * though they are not documented there. + * + * <p>IMPORTANT NOTE: Callers must call {@link #finish()} or {@link #close()} + * at the end of processing to ensure that the output buffers are flushed and + * the ZIP file is complete. + * + * <p>This class performs only rudimentary data checking. If the input files + * are damaged, the output will likely also be damaged. + * + * <p>Also see: + * <a href="http://www.pkware.com/documents/casestudies/APPNOTE.TXT">ZIP format</a> + */ +@NotThreadSafe +public final class ZipCombiner implements AutoCloseable { + + /** + * A Date set to the 1/1/1980, 00:00:00, the minimum value that can be stored + * in a ZIP file. + */ + public static final Date DOS_EPOCH = new GregorianCalendar(1980, 0, 1, 0, 0, 0).getTime(); + + private static final int DEFAULT_CENTRAL_DIRECTORY_BLOCK_SIZE = 1048576; // 1 MB for each block + + // The following constants are ZIP-specific. + private static final int LOCAL_FILE_HEADER_MARKER = 0x04034b50; + private static final int DATA_DESCRIPTOR_MARKER = 0x08074b50; + private static final int CENTRAL_DIRECTORY_MARKER = 0x02014b50; + private static final int END_OF_CENTRAL_DIRECTORY_MARKER = 0x06054b50; + + private static final int FILE_HEADER_BUFFER_SIZE = 30; + + private static final int VERSION_TO_EXTRACT_OFFSET = 4; + private static final int GENERAL_PURPOSE_FLAGS_OFFSET = 6; + private static final int COMPRESSION_METHOD_OFFSET = 8; + private static final int MTIME_OFFSET = 10; + private static final int MDATE_OFFSET = 12; + private static final int CRC32_OFFSET = 14; + private static final int COMPRESSED_SIZE_OFFSET = 18; + private static final int UNCOMPRESSED_SIZE_OFFSET = 22; + private static final int FILENAME_LENGTH_OFFSET = 26; + private static final int EXTRA_LENGTH_OFFSET = 28; + + private static final int DIRECTORY_ENTRY_BUFFER_SIZE = 46; + + // Set if the size, compressed size and CRC are set to zero, and present in + // the data descriptor after the data. + private static final int SIZE_MASKED_FLAG = 1 << 3; + + private static final int STORED_METHOD = 0; + private static final int DEFLATE_METHOD = 8; + + private static final int VERSION_STORED = 10; // Version 1.0 + private static final int VERSION_DEFLATE = 20; // Version 2.0 + + private static final long MAXIMUM_DATA_SIZE = 0xffffffffL; + + // This class relies on the buffer to have sufficient space for a complete + // file name. 2^16 is the maximum number of bytes in a file name. + private static final int BUFFER_SIZE = 65536; + + /** An empty entry used to skip files that have already been copied (or skipped). */ + private static final FileEntry COPIED_FILE_ENTRY = new FileEntry(null, null, 0); + + /** An empty entry used to mark files that have already been renamed. */ + private static final FileEntry RENAMED_FILE_ENTRY = new FileEntry(null, null, 0); + + /** A zero length array of ExtraData. */ + public static final ExtraData[] NO_EXTRA_ENTRIES = new ExtraData[0]; + + /** + * Whether to compress or decompress entries. + */ + public enum OutputMode { + + /** + * Output entries using any method. + */ + DONT_CARE, + + /** + * Output all entries using DEFLATE method, except directory entries. It is + * always more efficient to store directory entries uncompressed. + */ + FORCE_DEFLATE, + + /** + * Output all entries using STORED method. + */ + FORCE_STORED; + } + + // A two-element enum for copyOrSkip type methods. + private static enum SkipMode { + + /** + * Copy the read data to the output stream. + */ + COPY, + + /** + * Do not write anything to the output stream. + */ + SKIP; + } + + /** + * Stores internal information about merges or skips. + */ + private static final class FileEntry { + + /** If null, the file should be skipped. Otherwise, it should be merged. */ + private final CustomMergeStrategy mergeStrategy; + private final ByteArrayOutputStream outputBuffer; + private final int dosTime; + + private FileEntry(CustomMergeStrategy mergeStrategy, ByteArrayOutputStream outputBuffer, + int dosTime) { + this.mergeStrategy = mergeStrategy; + this.outputBuffer = outputBuffer; + this.dosTime = dosTime; + } + } + + /** + * The directory entry info used for files whose extra directory entry info is not given + * explicitly. It uses {@code -1} for {@link DirectoryEntryInfo#withMadeByVersion(short)}, which + * indicates it will be set to the same version as "needed to extract." + * + * <p>The {@link DirectoryEntryInfo#withExternalFileAttribute(int)} value is set to {@code 0}, + * whose meaning depends on the value of {@code madeByVersion}, but is usually a reasonable + * default. + */ + public static final DirectoryEntryInfo DEFAULT_DIRECTORY_ENTRY_INFO = + new DirectoryEntryInfo((short) -1, 0); + + /** + * Contains information related to a zip entry that is stored in the central directory record. + * This does not contain all the information stored in the central directory record, only the + * information that can be customized and is not automatically calculated or detected. + */ + public static final class DirectoryEntryInfo { + private final short madeByVersion; + private final int externalFileAttribute; + + private DirectoryEntryInfo(short madeByVersion, int externalFileAttribute) { + this.madeByVersion = madeByVersion; + this.externalFileAttribute = externalFileAttribute; + } + + /** + * This will be written as "made by" version in the central directory. + * If -1 (default) then "made by" will be the same to version "needed to extract". + */ + public DirectoryEntryInfo withMadeByVersion(short madeByVersion) { + return new DirectoryEntryInfo(madeByVersion, externalFileAttribute); + } + + /** + * This will be written as external file attribute. The meaning of this depends upon the value + * set with {@link #withMadeByVersion(short)}. If that value indicates a Unix source, then this + * value has the file mode and permission bits in the upper two bytes (e.g. possibly + * {@code 0100644} for a regular file). + */ + public DirectoryEntryInfo withExternalFileAttribute(int externalFileAttribute) { + return new DirectoryEntryInfo(madeByVersion, externalFileAttribute); + } + } + + /** + * The central directory, which is grown as required; instead of using a single large buffer, we + * store a sequence of smaller buffers. With a single large buffer, whenever we grow the buffer by + * 2x, we end up requiring 3x the memory temporarily, which can lead to OOM problems even if there + * would still be enough memory. + * + * <p>The invariants for the fields are as follows: + * <ul> + * <li>All blocks must have the same size. + * <li>The list of blocks must contain all blocks, including the current block (even if empty). + * <li>The current block offset must apply to the last block in the list, which is + * simultaneously the current block. + * <li>The current block may only be {@code null} if the list is empty. + * </ul> + */ + private static final class CentralDirectory { + private final int blockSize; // We allow this to be overridden for testing. + private List<byte[]> blockList = new ArrayList<>(); + private byte[] currentBlock; + private int currentBlockOffset = 0; + private int size = 0; + + CentralDirectory(int centralDirectoryBlockSize) { + this.blockSize = centralDirectoryBlockSize; + } + + /** + * Appends the given data to the central directory and returns the start + * offset within the central directory to allow back-patching. + */ + int writeToCentralDirectory(byte[] b, int off, int len) { + checkArgument(len >= 0); + int offsetStarted = size; + while (len > 0) { + if (currentBlock == null + || currentBlockOffset >= currentBlock.length) { + currentBlock = new byte[blockSize]; + currentBlockOffset = 0; + blockList.add(currentBlock); + } + int maxCopy = Math.min(blockSize - currentBlockOffset, len); + System.arraycopy(b, off, currentBlock, currentBlockOffset, maxCopy); + off += maxCopy; + len -= maxCopy; + size += maxCopy; + currentBlockOffset += maxCopy; + } + return offsetStarted; + } + + /** Calls through to {@link #writeToCentralDirectory(byte[], int, int)}. */ + int writeToCentralDirectory(byte[] b) { + return writeToCentralDirectory(b, 0, b.length); + } + + /** + * Writes an unsigned int in little-endian byte order to the central directory at the + * given offset. Does not perform range checking. + */ + void setUnsignedInt(int offset, int value) { + blockList.get(cdIndex(offset + 0))[cdOffset(offset + 0)] = (byte) (value & 0xff); + blockList.get(cdIndex(offset + 1))[cdOffset(offset + 1)] = (byte) ((value >> 8) & 0xff); + blockList.get(cdIndex(offset + 2))[cdOffset(offset + 2)] = (byte) ((value >> 16) & 0xff); + blockList.get(cdIndex(offset + 3))[cdOffset(offset + 3)] = (byte) ((value >> 24) & 0xff); + } + + private int cdIndex(int offset) { + return offset / blockSize; + } + + private int cdOffset(int offset) { + return offset % blockSize; + } + + /** + * Writes the central directory to the given output stream and returns the size, i.e., the + * number of bytes written. + */ + int writeTo(OutputStream out) throws IOException { + for (int i = 0; i < blockList.size() - 1; i++) { + out.write(blockList.get(i)); + } + if (currentBlock != null) { + out.write(currentBlock, 0, currentBlockOffset); + } + return size; + } + } + + /** + * An output stream that counts how many bytes were written. + */ + private static final class ByteCountingOutputStream extends FilterOutputStream { + private long bytesWritten = 0L; + + ByteCountingOutputStream(OutputStream out) { + super(out); + } + + @Override + public void write(byte[] b, int off, int len) throws IOException { + out.write(b, off, len); + bytesWritten += len; + } + + @Override + public void write(int b) throws IOException { + out.write(b); + bytesWritten++; + } + } + + private final OutputMode mode; + private final ZipEntryFilter entryFilter; + + private final ByteCountingOutputStream out; + + // An input buffer to allow reading blocks of data. Keeping it here avoids + // another copy operation that would be required by the BufferedInputStream. + // The valid data is between bufferOffset and bufferOffset+bufferLength (exclusive). + private final byte[] buffer = new byte[BUFFER_SIZE]; + private int bufferOffset = 0; + private int bufferLength = 0; + + private String currentInputFile; + + // An intermediate buffer for the file header data. Keeping it here avoids + // creating a new buffer for every entry. + private final byte[] headerBuffer = new byte[FILE_HEADER_BUFFER_SIZE]; + + // An intermediate buffer for a central directory entry. Keeping it here + // avoids creating a new buffer for every entry. + private final byte[] directoryEntryBuffer = new byte[DIRECTORY_ENTRY_BUFFER_SIZE]; + + // The Inflater is a class member to avoid creating a new instance for every + // entry in the ZIP file. + private final Inflater inflater = new Inflater(true); + + // The contents of this buffer are never read. The Inflater is only used to + // determine the length of the compressed data, and the buffer is a throw- + // away buffer for the decompressed data. + private final byte[] inflaterBuffer = new byte[BUFFER_SIZE]; + + private final Map<String, FileEntry> fileNames = new HashMap<>(); + + private final CentralDirectory centralDirectory; + private int fileCount = 0; + + private boolean finished = false; + + // Package private for testing. + ZipCombiner(OutputMode mode, ZipEntryFilter entryFilter, OutputStream out, + int centralDirectoryBlockSize) { + this.mode = mode; + this.entryFilter = entryFilter; + this.out = new ByteCountingOutputStream(new BufferedOutputStream(out)); + this.centralDirectory = new CentralDirectory(centralDirectoryBlockSize); + } + + /** + * Creates a new instance with the given parameters. The {@code entryFilter} + * is called for every entry in the ZIP files and the combined ZIP file is + * written to {@code out}. The output mode determines whether entries must be + * written in compressed or decompressed form. Note that the result is + * invalid if an exception is thrown from any of the methods in this class, + * and before a call to {@link #close} or {@link #finish}. + */ + public ZipCombiner(OutputMode mode, ZipEntryFilter entryFilter, OutputStream out) { + this(mode, entryFilter, out, DEFAULT_CENTRAL_DIRECTORY_BLOCK_SIZE); + } + + /** + * Creates a new instance with the given parameters and the DONT_CARE mode. + */ + public ZipCombiner(ZipEntryFilter entryFilter, OutputStream out) { + this(OutputMode.DONT_CARE, entryFilter, out); + } + + /** + * Creates a new instance with the {@link CopyEntryFilter} as the filter and + * the given mode and output stream. + */ + public ZipCombiner(OutputMode mode, OutputStream out) { + this(mode, new CopyEntryFilter(), out); + } + + /** + * Creates a new instance with the {@link CopyEntryFilter} as the filter, the + * DONT_CARE mode and the given output stream. + */ + public ZipCombiner(OutputStream out) { + this(OutputMode.DONT_CARE, new CopyEntryFilter(), out); + } + + /** + * Returns whether the output zip already contains a file or directory with + * the given name. + */ + public boolean containsFile(String filename) { + return fileNames.containsKey(filename); + } + + /** + * Makes a write call to the output stream, and updates the current offset. + */ + private void write(byte[] b, int off, int len) throws IOException { + out.write(b, off, len); + } + + /** Calls through to {@link #write(byte[], int, int)}. */ + private void write(byte[] b) throws IOException { + write(b, 0, b.length); + } + + /** + * Reads at least one more byte into the internal buffer. This method must + * only be called when more data is necessary to correctly decode the ZIP + * format. + * + * <p>This method automatically compacts the existing data in the buffer by + * moving it to the beginning of the buffer. + * + * @throws EOFException if no more data is available from the input stream + * @throws IOException if the underlying stream throws one + */ + private void readMoreData(InputStream in) throws IOException { + if ((bufferLength > 0) && (bufferOffset > 0)) { + System.arraycopy(buffer, bufferOffset, buffer, 0, bufferLength); + } + if (bufferLength >= buffer.length) { + // The buffer size is specifically chosen to avoid this situation. + throw new AssertionError("Internal error: buffer overrun."); + } + bufferOffset = 0; + int bytesRead = in.read(buffer, bufferLength, buffer.length - bufferLength); + if (bytesRead <= 0) { + throw new EOFException(); + } + bufferLength += bytesRead; + } + + /** + * Reads data until the buffer is filled with at least {@code length} bytes. + * + * @throws IllegalArgumentException if not 0 <= length <= buffer.length + * @throws IOException if the underlying input stream throws one or the end + * of the input stream is reached before the required + * number of bytes is read + */ + private void readFully(InputStream in, int length) throws IOException { + checkArgument(length >= 0, "length too small: %s", length); + checkArgument(length <= buffer.length, "length too large: %s", length); + while (bufferLength < length) { + readMoreData(in); + } + } + + /** + * Reads an unsigned short in little-endian byte order from the buffer at the + * given offset. Does not perform range checking. + */ + private int getUnsignedShort(byte[] source, int offset) { + int a = source[offset + 0] & 0xff; + int b = source[offset + 1] & 0xff; + return (b << 8) | a; + } + + /** + * Reads an unsigned int in little-endian byte order from the buffer at the + * given offset. Does not perform range checking. + */ + private long getUnsignedInt(byte[] source, int offset) { + int a = source[offset + 0] & 0xff; + int b = source[offset + 1] & 0xff; + int c = source[offset + 2] & 0xff; + int d = source[offset + 3] & 0xff; + return ((d << 24) | (c << 16) | (b << 8) | a) & 0xffffffffL; + } + + /** + * Writes an unsigned short in little-endian byte order to the buffer at the + * given offset. Does not perform range checking. + */ + private void setUnsignedShort(byte[] target, int offset, short value) { + target[offset + 0] = (byte) (value & 0xff); + target[offset + 1] = (byte) ((value >> 8) & 0xff); + } + + /** + * Writes an unsigned int in little-endian byte order to the buffer at the + * given offset. Does not perform range checking. + */ + private void setUnsignedInt(byte[] target, int offset, int value) { + target[offset + 0] = (byte) (value & 0xff); + target[offset + 1] = (byte) ((value >> 8) & 0xff); + target[offset + 2] = (byte) ((value >> 16) & 0xff); + target[offset + 3] = (byte) ((value >> 24) & 0xff); + } + + /** + * Copies or skips {@code length} amount of bytes from the input stream to the + * output stream. If the internal buffer is not empty, those bytes are copied + * first. When the method returns, there may be more bytes remaining in the + * buffer. + * + * @throws IOException if the underlying stream throws one + */ + private void copyOrSkipData(InputStream in, long length, SkipMode skip) throws IOException { + checkArgument(length >= 0); + while (length > 0) { + if (bufferLength == 0) { + readMoreData(in); + } + int bytesToWrite = (length < bufferLength) ? (int) length : bufferLength; + if (skip == SkipMode.COPY) { + write(buffer, bufferOffset, bytesToWrite); + } + bufferOffset += bytesToWrite; + bufferLength -= bytesToWrite; + length -= bytesToWrite; + } + } + + /** + * Copies or skips {@code length} amount of bytes from the input stream to the + * output stream. If the internal buffer is not empty, those bytes are copied + * first. When the method returns, there may be more bytes remaining in the + * buffer. In addition to writing to the output stream, it also writes to the + * central directory. + * + * @throws IOException if the underlying stream throws one + */ + private void forkOrSkipData(InputStream in, long length, SkipMode skip) throws IOException { + checkArgument(length >= 0); + while (length > 0) { + if (bufferLength == 0) { + readMoreData(in); + } + int bytesToWrite = (length < bufferLength) ? (int) length : bufferLength; + if (skip == SkipMode.COPY) { + write(buffer, bufferOffset, bytesToWrite); + centralDirectory.writeToCentralDirectory(buffer, bufferOffset, bytesToWrite); + } + bufferOffset += bytesToWrite; + bufferLength -= bytesToWrite; + length -= bytesToWrite; + } + } + + /** + * A mutable integer reference value to allow returning two values from a + * method. + */ + private static class MutableInt { + + private int value; + + MutableInt(int initialValue) { + this.value = initialValue; + } + + public void setValue(int value) { + this.value = value; + } + + public int getValue() { + return value; + } + } + + /** + * Uses the inflater to decompress some data into the given buffer. This + * method performs no error checking on the input parameters and also does + * not update the buffer parameters of the input buffer (such as bufferOffset + * and bufferLength). It's only here to avoid code duplication. + * + * <p>The Inflater may not be in the finished state when this method is + * called. + * + * <p>This method returns 0 if it read data and reached the end of the + * DEFLATE stream without producing output. In that case, {@link + * Inflater#finished} is guaranteed to return true. + * + * @throws IOException if the underlying stream throws an IOException or if + * illegal data is encountered + */ + private int inflateData(InputStream in, byte[] dest, int off, int len, MutableInt consumed) + throws IOException { + // Defend against Inflater.finished() returning true. + consumed.setValue(0); + int bytesProduced = 0; + int bytesConsumed = 0; + while ((bytesProduced == 0) && !inflater.finished()) { + inflater.setInput(buffer, bufferOffset + bytesConsumed, bufferLength - bytesConsumed); + int remainingBefore = inflater.getRemaining(); + try { + bytesProduced = inflater.inflate(dest, off, len); + } catch (DataFormatException e) { + throw new IOException("Invalid deflate stream in ZIP file.", e); + } + bytesConsumed += remainingBefore - inflater.getRemaining(); + consumed.setValue(bytesConsumed); + if (bytesProduced == 0) { + if (inflater.needsDictionary()) { + // The DEFLATE algorithm as used in the ZIP file format does not + // require an additional dictionary. + throw new AssertionError("Inflater unexpectedly requires a dictionary."); + } else if (inflater.needsInput()) { + readMoreData(in); + } else if (inflater.finished()) { + return 0; + } else { + // According to the Inflater specification, this cannot happen. + throw new AssertionError("Inflater unexpectedly produced no output."); + } + } + } + return bytesProduced; + } + + /** + * Copies or skips data from the input stream to the output stream. To + * determine the length of the data, the data is decompressed with the + * DEFLATE algorithm, which stores the length implicitly as part of the + * compressed data, using a combination of end markers and length indicators. + * + * @see <a href="http://www.ietf.org/rfc/rfc1951.txt">RFC 1951</a> + * + * @throws IOException if the underlying stream throws an IOException + */ + private long copyOrSkipDeflateData(InputStream in, SkipMode skip) throws IOException { + long bytesCopied = 0; + inflater.reset(); + MutableInt consumedBytes = new MutableInt(0); + while (!inflater.finished()) { + // Neither the uncompressed data nor the length of it is used. The + // decompression is only required to determine the correct length of the + // compressed data to copy. + inflateData(in, inflaterBuffer, 0, inflaterBuffer.length, consumedBytes); + int bytesRead = consumedBytes.getValue(); + if (skip == SkipMode.COPY) { + write(buffer, bufferOffset, bytesRead); + } + bufferOffset += bytesRead; + bufferLength -= bytesRead; + bytesCopied += bytesRead; + } + return bytesCopied; + } + + /** + * Returns a 32-bit integer containing a ZIP-compatible encoding of the given + * date. Only dates between 1980 and 2107 (inclusive) are supported. + * + * <p>The upper 16 bits contain the year, month, and day. The lower 16 bits + * contain the hour, minute, and second. The resolution of the second field + * is only 4 bits, which means that the only even second values can be + * stored - this method rounds down to the nearest even value. + * + * @throws IllegalArgumentException if the given date is outside the + * supported range + */ + // Only visible for testing. + static int dateToDosTime(Date date) { + Calendar calendar = new GregorianCalendar(); + calendar.setTime(date); + int year = calendar.get(Calendar.YEAR); + if (year < 1980) { + throw new IllegalArgumentException("date must be in or after 1980"); + } + // The ZIP format only provides 7 bits for the year. + if (year > 2107) { + throw new IllegalArgumentException("date must before 2107"); + } + int month = calendar.get(Calendar.MONTH) + 1; // Months from Calendar are zero-based. + int day = calendar.get(Calendar.DAY_OF_MONTH); + int hour = calendar.get(Calendar.HOUR_OF_DAY); + int minute = calendar.get(Calendar.MINUTE); + int second = calendar.get(Calendar.SECOND); + return ((year - 1980) << 25) | (month << 21) | (day << 16) + | (hour << 11) | (minute << 5) | (second >> 1); + } + + /** + * Fills the directory entry, using the information from the header buffer, + * and writes it to the central directory. It returns the offset into the + * central directory that can be used for patching the entry. Requires that + * the entire entry header is present in {@link #headerBuffer}. It also uses + * the {@link ByteCountingOutputStream#bytesWritten}, so it must be called + * just before the header is written to the output stream. + * + * @throws IOException if the current offset is too large for the ZIP format + */ + private int fillDirectoryEntryBuffer( + DirectoryEntryInfo directoryEntryInfo) throws IOException { + // central file header signature + setUnsignedInt(directoryEntryBuffer, 0, CENTRAL_DIRECTORY_MARKER); + short version = (short) getUnsignedShort(headerBuffer, VERSION_TO_EXTRACT_OFFSET); + short curMadeMyVersion = (directoryEntryInfo.madeByVersion == -1) + ? version : directoryEntryInfo.madeByVersion; + setUnsignedShort(directoryEntryBuffer, 4, curMadeMyVersion); // version made by + // version needed to extract + setUnsignedShort(directoryEntryBuffer, 6, version); + // general purpose bit flag + setUnsignedShort(directoryEntryBuffer, 8, + (short) getUnsignedShort(headerBuffer, GENERAL_PURPOSE_FLAGS_OFFSET)); + // compression method + setUnsignedShort(directoryEntryBuffer, 10, + (short) getUnsignedShort(headerBuffer, COMPRESSION_METHOD_OFFSET)); + // last mod file time, last mod file date + setUnsignedShort(directoryEntryBuffer, 12, + (short) getUnsignedShort(headerBuffer, MTIME_OFFSET)); + setUnsignedShort(directoryEntryBuffer, 14, + (short) getUnsignedShort(headerBuffer, MDATE_OFFSET)); + // crc-32 + setUnsignedInt(directoryEntryBuffer, 16, (int) getUnsignedInt(headerBuffer, CRC32_OFFSET)); + // compressed size + setUnsignedInt(directoryEntryBuffer, 20, + (int) getUnsignedInt(headerBuffer, COMPRESSED_SIZE_OFFSET)); + // uncompressed size + setUnsignedInt(directoryEntryBuffer, 24, + (int) getUnsignedInt(headerBuffer, UNCOMPRESSED_SIZE_OFFSET)); + // file name length + setUnsignedShort(directoryEntryBuffer, 28, + (short) getUnsignedShort(headerBuffer, FILENAME_LENGTH_OFFSET)); + // extra field length + setUnsignedShort(directoryEntryBuffer, 30, + (short) getUnsignedShort(headerBuffer, EXTRA_LENGTH_OFFSET)); + setUnsignedShort(directoryEntryBuffer, 32, (short) 0); // file comment length + setUnsignedShort(directoryEntryBuffer, 34, (short) 0); // disk number start + setUnsignedShort(directoryEntryBuffer, 36, (short) 0); // internal file attributes + setUnsignedInt(directoryEntryBuffer, 38, directoryEntryInfo.externalFileAttribute); + if (out.bytesWritten >= MAXIMUM_DATA_SIZE) { + throw new IOException("Unable to handle files bigger than 2^32 bytes."); + } + // relative offset of local header + setUnsignedInt(directoryEntryBuffer, 42, (int) out.bytesWritten); + fileCount++; + return centralDirectory.writeToCentralDirectory(directoryEntryBuffer); + } + + /** + * Fix the directory entry with the correct crc32, compressed size, and + * uncompressed size. + */ + private void fixDirectoryEntry(int offset, long crc32, long compressedSize, + long uncompressedSize) { + // The constants from the top don't apply here, because this is the central directory entry. + centralDirectory.setUnsignedInt(offset + 16, (int) crc32); // crc-32 + centralDirectory.setUnsignedInt(offset + 20, (int) compressedSize); // compressed size + centralDirectory.setUnsignedInt(offset + 24, (int) uncompressedSize); // uncompressed size + } + + /** + * (Un)Compresses and copies the current ZIP file entry. Requires that the + * entire entry header is present in {@link #headerBuffer}. It currently + * drops the extra data in the process. + * + * @throws IOException if the underlying stream throws an IOException + */ + private void modifyAndCopyEntry(String filename, InputStream in, int dosTime) + throws IOException { + final int method = getUnsignedShort(headerBuffer, COMPRESSION_METHOD_OFFSET); + final int flags = getUnsignedShort(headerBuffer, GENERAL_PURPOSE_FLAGS_OFFSET); + final int fileNameLength = getUnsignedShort(headerBuffer, FILENAME_LENGTH_OFFSET); + final int extraFieldLength = getUnsignedShort(headerBuffer, EXTRA_LENGTH_OFFSET); + // TODO(bazel-team): Read and copy the extra data if present. + + forkOrSkipData(in, fileNameLength, SkipMode.SKIP); + forkOrSkipData(in, extraFieldLength, SkipMode.SKIP); + if (method == STORED_METHOD) { + long compressedSize = getUnsignedInt(headerBuffer, COMPRESSED_SIZE_OFFSET); + copyStreamToEntry(filename, new FixedLengthInputStream(in, compressedSize), dosTime, + NO_EXTRA_ENTRIES, true, DEFAULT_DIRECTORY_ENTRY_INFO); + } else if (method == DEFLATE_METHOD) { + inflater.reset(); + copyStreamToEntry(filename, new DeflateInputStream(in), dosTime, NO_EXTRA_ENTRIES, false, + DEFAULT_DIRECTORY_ENTRY_INFO); + if ((flags & SIZE_MASKED_FLAG) != 0) { + copyOrSkipData(in, 16, SkipMode.SKIP); + } + } else { + throw new AssertionError("This should have been checked in validateHeader()."); + } + } + + /** + * Copies or skips the current ZIP file entry. Requires that the entire entry + * header is present in {@link #headerBuffer}. It uses the current mode to + * decide whether to compress or decompress the entry. + * + * @throws IOException if the underlying stream throws an IOException + */ + private void copyOrSkipEntry(String filename, InputStream in, SkipMode skip, Date date, + DirectoryEntryInfo directoryEntryInfo) throws IOException { + copyOrSkipEntry(filename, in, skip, date, directoryEntryInfo, false); + } + + /** + * Renames and otherwise copies the current ZIP file entry. Requires that the entire + * entry header is present in {@link #headerBuffer}. It uses the current mode to + * decide whether to compress or decompress the entry. + * + * @throws IOException if the underlying stream throws an IOException + */ + private void renameEntry(String filename, InputStream in, Date date, + DirectoryEntryInfo directoryEntryInfo) throws IOException { + copyOrSkipEntry(filename, in, SkipMode.COPY, date, directoryEntryInfo, true); + } + + /** + * Copies or skips the current ZIP file entry. Requires that the entire entry + * header is present in {@link #headerBuffer}. It uses the current mode to + * decide whether to compress or decompress the entry. + * + * @throws IOException if the underlying stream throws an IOException + */ + private void copyOrSkipEntry(String filename, InputStream in, SkipMode skip, Date date, + DirectoryEntryInfo directoryEntryInfo, boolean rename) throws IOException { + final int method = getUnsignedShort(headerBuffer, COMPRESSION_METHOD_OFFSET); + + // We can cast here, because the result is only treated as a bitmask. + int dosTime = date == null ? (int) getUnsignedInt(headerBuffer, MTIME_OFFSET) + : dateToDosTime(date); + if (skip == SkipMode.COPY) { + if ((mode == OutputMode.FORCE_DEFLATE) && (method == STORED_METHOD) + && !filename.endsWith("/")) { + modifyAndCopyEntry(filename, in, dosTime); + return; + } else if ((mode == OutputMode.FORCE_STORED) && (method == DEFLATE_METHOD)) { + modifyAndCopyEntry(filename, in, dosTime); + return; + } + } + + int directoryOffset = copyOrSkipEntryHeader(filename, in, date, directoryEntryInfo, + skip, rename); + + copyOrSkipEntryData(filename, in, skip, directoryOffset); + } + + /** + * Copies or skips the header of an entry, including filename and extra data. + * Requires that the entire entry header is present in {@link #headerBuffer}. + * + * @returns the enrty offset in the central directory + * @throws IOException if the underlying stream throws an IOException + */ + private int copyOrSkipEntryHeader(String filename, InputStream in, Date date, + DirectoryEntryInfo directoryEntryInfo, SkipMode skip, boolean rename) + throws IOException { + final int fileNameLength = getUnsignedShort(headerBuffer, FILENAME_LENGTH_OFFSET); + final int extraFieldLength = getUnsignedShort(headerBuffer, EXTRA_LENGTH_OFFSET); + + byte[] fileNameAsBytes = null; + if (rename) { + // If the entry is renamed, we patch the filename length in the buffer + // before it's copied, and before writing to the central directory. + fileNameAsBytes = filename.getBytes(UTF_8); + checkArgument(fileNameAsBytes.length <= 65535, + "File name too long: %s bytes (max. 65535)", fileNameAsBytes.length); + setUnsignedShort(headerBuffer, FILENAME_LENGTH_OFFSET, (short) fileNameAsBytes.length); + } + + int directoryOffset = 0; + if (skip == SkipMode.COPY) { + if (date != null) { + int dosTime = dateToDosTime(date); + setUnsignedShort(headerBuffer, MTIME_OFFSET, (short) dosTime); // lower 16 bits + setUnsignedShort(headerBuffer, MDATE_OFFSET, (short) (dosTime >> 16)); // upper 16 bits + } + // Call this before writing the data out, so that we get the correct offset. + directoryOffset = fillDirectoryEntryBuffer(directoryEntryInfo); + write(headerBuffer, 0, FILE_HEADER_BUFFER_SIZE); + } + if (!rename) { + forkOrSkipData(in, fileNameLength, skip); + } else { + forkOrSkipData(in, fileNameLength, SkipMode.SKIP); + write(fileNameAsBytes); + centralDirectory.writeToCentralDirectory(fileNameAsBytes); + } + forkOrSkipData(in, extraFieldLength, skip); + return directoryOffset; + } + + /** + * Copy or skip the data of an entry. Requires that the + * entire entry header is present in {@link #headerBuffer}. + * + * @throws IOException if the underlying stream throws an IOException + */ + private void copyOrSkipEntryData(String filename, InputStream in, SkipMode skip, + int directoryOffset) throws IOException { + final int flags = getUnsignedShort(headerBuffer, GENERAL_PURPOSE_FLAGS_OFFSET); + final int method = getUnsignedShort(headerBuffer, COMPRESSION_METHOD_OFFSET); + if ((flags & SIZE_MASKED_FLAG) != 0) { + // The compressed data size is unknown. + if (method != DEFLATE_METHOD) { + throw new AssertionError("This should have been checked in validateHeader()."); + } + copyOrSkipDeflateData(in, skip); + // The flags indicate that a data descriptor must follow the data. + readFully(in, 16); + if (getUnsignedInt(buffer, bufferOffset) != DATA_DESCRIPTOR_MARKER) { + throw new IOException("Missing data descriptor for " + filename + " in " + currentInputFile + + "."); + } + long crc32 = getUnsignedInt(buffer, bufferOffset + 4); + long compressedSize = getUnsignedInt(buffer, bufferOffset + 8); + long uncompressedSize = getUnsignedInt(buffer, bufferOffset + 12); + if (skip == SkipMode.COPY) { + fixDirectoryEntry(directoryOffset, crc32, compressedSize, uncompressedSize); + } + copyOrSkipData(in, 16, skip); + } else { + // The size value is present in the header, so just copy that amount. + long compressedSize = getUnsignedInt(headerBuffer, COMPRESSED_SIZE_OFFSET); + copyOrSkipData(in, compressedSize, skip); + } + } + + /** + * An input stream that reads a fixed number of bytes from the given input + * stream before it returns end-of-input. It uses the local buffer, so it + * can't be static. + */ + private class FixedLengthInputStream extends InputStream { + + private final InputStream in; + private long remainingBytes; + private final byte[] singleByteBuffer = new byte[1]; + + FixedLengthInputStream(InputStream in, long remainingBytes) { + this.in = in; + this.remainingBytes = remainingBytes; + } + + @Override + public int read() throws IOException { + int bytesRead = read(singleByteBuffer, 0, 1); + return (bytesRead == -1) ? -1 : singleByteBuffer[0]; + } + + @Override + public int read(byte b[], int off, int len) throws IOException { + checkArgument(len >= 0); + checkArgument(off >= 0); + checkArgument(off + len <= b.length); + if (remainingBytes == 0) { + return -1; + } + if (bufferLength == 0) { + readMoreData(in); + } + int bytesToCopy = len; + if (remainingBytes < bytesToCopy) { + bytesToCopy = (int) remainingBytes; + } + if (bufferLength < bytesToCopy) { + bytesToCopy = bufferLength; + } + System.arraycopy(buffer, bufferOffset, b, off, bytesToCopy); + bufferOffset += bytesToCopy; + bufferLength -= bytesToCopy; + remainingBytes -= bytesToCopy; + return bytesToCopy; + } + } + + /** + * An input stream that reads from a given input stream, decoding that data + * according to the DEFLATE algorithm. The DEFLATE data stream implicitly + * contains its own end-of-input marker. It uses the local buffer, so it + * can't be static. + */ + private class DeflateInputStream extends InputStream { + + private final InputStream in; + private final byte[] singleByteBuffer = new byte[1]; + private final MutableInt consumedBytes = new MutableInt(0); + + DeflateInputStream(InputStream in) { + this.in = in; + } + + @Override + public int read() throws IOException { + int bytesRead = read(singleByteBuffer, 0, 1); + // Do an unsigned cast on the byte from the buffer if it exists. + return (bytesRead == -1) ? -1 : (singleByteBuffer[0] & 0xff); + } + + @Override + public int read(byte b[], int off, int len) throws IOException { + if (inflater.finished()) { + return -1; + } + int length = inflateData(in, b, off, len, consumedBytes); + int bytesRead = consumedBytes.getValue(); + bufferOffset += bytesRead; + bufferLength -= bytesRead; + return length == 0 ? -1 : length; + } + } + + /** + * Handles a custom merge operation with the given strategy. This method + * creates an appropriate input stream and hands it to the strategy for + * processing. Requires that the entire entry header is present in {@link + * #headerBuffer}. + * + * @throws IOException if one of the underlying stream throws an IOException, + * if the ZIP entry data is inconsistent, or if the + * implementation cannot handle the compression method + * given in the ZIP entry + */ + private void handleCustomMerge(final InputStream in, CustomMergeStrategy mergeStrategy, + ByteArrayOutputStream outputBuffer) throws IOException { + final int flags = getUnsignedShort(headerBuffer, GENERAL_PURPOSE_FLAGS_OFFSET); + final int method = getUnsignedShort(headerBuffer, COMPRESSION_METHOD_OFFSET); + final long compressedSize = getUnsignedInt(headerBuffer, COMPRESSED_SIZE_OFFSET); + + final int fileNameLength = getUnsignedShort(headerBuffer, FILENAME_LENGTH_OFFSET); + final int extraFieldLength = getUnsignedShort(headerBuffer, EXTRA_LENGTH_OFFSET); + + copyOrSkipData(in, fileNameLength, SkipMode.SKIP); + copyOrSkipData(in, extraFieldLength, SkipMode.SKIP); + if (method == STORED_METHOD) { + mergeStrategy.merge(new FixedLengthInputStream(in, compressedSize), outputBuffer); + } else if (method == DEFLATE_METHOD) { + inflater.reset(); + // TODO(bazel-team): Defend against the mergeStrategy not reading the complete input. + mergeStrategy.merge(new DeflateInputStream(in), outputBuffer); + if ((flags & SIZE_MASKED_FLAG) != 0) { + copyOrSkipData(in, 16, SkipMode.SKIP); + } + } else { + throw new AssertionError("This should have been checked in validateHeader()."); + } + } + + /** + * Implementation of the strategy callback. + */ + private class TheStrategyCallback implements StrategyCallback { + + private String filename; + private final InputStream in; + + // Use an atomic boolean to make sure that only a single call goes + // through, even if there are multiple concurrent calls. Paranoid + // defensive programming. + private final AtomicBoolean callDone = new AtomicBoolean(); + + TheStrategyCallback(String filename, InputStream in) { + this.filename = filename; + this.in = in; + } + + // Verify that this is the first call and throw an exception if not. + private void checkCall() { + checkState(callDone.compareAndSet(false, true), "The callback was already called once."); + } + + @Override + public void copy(Date date) throws IOException { + checkCall(); + if (!containsFile(filename)) { + fileNames.put(filename, COPIED_FILE_ENTRY); + copyOrSkipEntry(filename, in, SkipMode.COPY, date, DEFAULT_DIRECTORY_ENTRY_INFO); + } else { // can't copy, name already used for renamed entry + copyOrSkipEntry(filename, in, SkipMode.SKIP, null, DEFAULT_DIRECTORY_ENTRY_INFO); + } + } + + @Override + public void rename(String newName, Date date) throws IOException { + checkCall(); + if (!containsFile(newName)) { + fileNames.put(newName, RENAMED_FILE_ENTRY); + renameEntry(newName, in, date, DEFAULT_DIRECTORY_ENTRY_INFO); + } else { + copyOrSkipEntry(filename, in, SkipMode.SKIP, null, DEFAULT_DIRECTORY_ENTRY_INFO); + } + filename = newName; + } + + @Override + public void skip() throws IOException { + checkCall(); + if (!containsFile(filename)) {// don't overwrite possible RENAMED_FILE_ENTRY value + fileNames.put(filename, COPIED_FILE_ENTRY); + } + copyOrSkipEntry(filename, in, SkipMode.SKIP, null, DEFAULT_DIRECTORY_ENTRY_INFO); + } + + @Override + public void customMerge(Date date, CustomMergeStrategy strategy) throws IOException { + checkCall(); + ByteArrayOutputStream outputBuffer = new ByteArrayOutputStream(); + fileNames.put(filename, new FileEntry(strategy, outputBuffer, dateToDosTime(date))); + handleCustomMerge(in, strategy, outputBuffer); + } + } + + /** + * Validates that the current entry obeys all the restrictions of this implementation. + * + * @throws IOException if the current entry doesn't obey the restrictions + */ + private void validateHeader() throws IOException { + // We only handle DEFLATE and STORED, like java.util.zip. + final int method = getUnsignedShort(headerBuffer, COMPRESSION_METHOD_OFFSET); + if ((method != DEFLATE_METHOD) && (method != STORED_METHOD)) { + throw new IOException("Unable to handle compression methods other than DEFLATE!"); + } + + // If the method is STORED, then the size must be available in the header. + final int flags = getUnsignedShort(headerBuffer, GENERAL_PURPOSE_FLAGS_OFFSET); + if ((method == STORED_METHOD) && ((flags & SIZE_MASKED_FLAG) != 0)) { + throw new IOException("If the method is STORED, then the size must be available in the" + + " header!"); + } + + // If the method is STORED, the compressed and uncompressed sizes must be equal. + final long compressedSize = getUnsignedInt(headerBuffer, COMPRESSED_SIZE_OFFSET); + final long uncompressedSize = getUnsignedInt(headerBuffer, UNCOMPRESSED_SIZE_OFFSET); + if ((method == STORED_METHOD) && (compressedSize != uncompressedSize)) { + throw new IOException("Compressed and uncompressed sizes for STORED entry differ!"); + } + + // The compressed or uncompressed size being set to 0xffffffff is a strong indicator that the + // ZIP file is in ZIP64 mode, which supports files larger than 2^32. + // TODO(bazel-team): Support the ZIP64 extension. + if ((compressedSize == MAXIMUM_DATA_SIZE) || (uncompressedSize == MAXIMUM_DATA_SIZE)) { + throw new IOException("Unable to handle ZIP64 compressed files."); + } + } + + /** + * Reads a file entry from the input stream, calls the entryFilter to + * determine what to do with the entry, and performs the requested operation. + * Returns true if the input stream contained another entry. + * + * @throws IOException if one of the underlying stream throws an IOException, + * if the ZIP contains unsupported, inconsistent or + * incomplete data or if the filter throws an IOException + */ + private boolean handleNextEntry(final InputStream in) throws IOException { + // Just try to read the complete header and fail if it didn't work. + try { + readFully(in, FILE_HEADER_BUFFER_SIZE); + } catch (EOFException e) { + return false; + } + + System.arraycopy(buffer, bufferOffset, headerBuffer, 0, FILE_HEADER_BUFFER_SIZE); + bufferOffset += FILE_HEADER_BUFFER_SIZE; + bufferLength -= FILE_HEADER_BUFFER_SIZE; + if (getUnsignedInt(headerBuffer, 0) != LOCAL_FILE_HEADER_MARKER) { + return false; + } + validateHeader(); + + final int fileNameLength = getUnsignedShort(headerBuffer, FILENAME_LENGTH_OFFSET); + readFully(in, fileNameLength); + // TODO(bazel-team): If I read the spec correctly, this should be UTF-8 rather than ISO-8859-1. + final String filename = new String(buffer, bufferOffset, fileNameLength, ISO_8859_1); + + FileEntry handler = fileNames.get(filename); + // The handler is null if this is the first time we see an entry with this filename, + // or if all previous entries with this name were renamed by the filter (and we can + // pretend we didn't encounter the name yet). + // If the handler is RENAMED_FILE_ENTRY, a previous entry was renamed as filename, + // in which case the filter should now be invoked for this name for the first time, + // giving the filter a chance to choose an unique name. + if (handler == null || handler == RENAMED_FILE_ENTRY) { + TheStrategyCallback callback = new TheStrategyCallback(filename, in); + entryFilter.accept(filename, callback); + if (fileNames.get(callback.filename) == null && fileNames.get(filename) == null) { + throw new IllegalStateException(); + } + } else if (handler.mergeStrategy == null) { + copyOrSkipEntry(filename, in, SkipMode.SKIP, null, DEFAULT_DIRECTORY_ENTRY_INFO); + } else { + handleCustomMerge(in, handler.mergeStrategy, handler.outputBuffer); + } + return true; + } + + /** + * Clears the internal buffer. + */ + private void clearBuffer() { + bufferOffset = 0; + bufferLength = 0; + } + + /** + * Copies another ZIP file into the output. If multiple entries with the same + * name are present, the first such entry is copied, but the others are + * ignored. This is also true for multiple invocations of this method. The + * {@code inputName} parameter is used to provide better error messages in the + * case of a failure to decode the ZIP file. + * + * @throws IOException if one of the underlying stream throws an IOException, + * if the ZIP contains unsupported, inconsistent or + * incomplete data or if the filter throws an IOException + */ + public void addZip(String inputName, InputStream in) throws IOException { + if (finished) { + throw new IllegalStateException(); + } + if (in == null) { + throw new NullPointerException(); + } + clearBuffer(); + currentInputFile = inputName; + while (handleNextEntry(in)) {/*handleNextEntry has side-effect.*/} + } + + public void addZip(InputStream in) throws IOException { + addZip(null, in); + } + + private void copyStreamToEntry(String filename, InputStream in, int dosTime, + ExtraData[] extraDataEntries, boolean compress, DirectoryEntryInfo directoryEntryInfo) + throws IOException { + fileNames.put(filename, COPIED_FILE_ENTRY); + + byte[] fileNameAsBytes = filename.getBytes(UTF_8); + checkArgument(fileNameAsBytes.length <= 65535, + "File name too long: %s bytes (max. 65535)", fileNameAsBytes.length); + + // Note: This method can be called with an input stream that uses the buffer field of this + // class. We use a local buffer here to avoid conflicts. + byte[] localBuffer = new byte[4096]; + + byte[] uncompressedData = null; + if (!compress) { + ByteArrayOutputStream temp = new ByteArrayOutputStream(); + int bytesRead; + while ((bytesRead = in.read(localBuffer)) != -1) { + temp.write(localBuffer, 0, bytesRead); + } + uncompressedData = temp.toByteArray(); + } + byte[] extraData = null; + if (extraDataEntries.length != 0) { + int totalLength = 0; + for (ExtraData extra : extraDataEntries) { + int length = extra.getData().length; + if (totalLength > 0xffff - 4 - length) { + throw new IOException("Total length of extra data too big."); + } + totalLength += length + 4; + } + extraData = new byte[totalLength]; + int position = 0; + for (ExtraData extra : extraDataEntries) { + byte[] data = extra.getData(); + setUnsignedShort(extraData, position + 0, extra.getId()); + setUnsignedShort(extraData, position + 2, (short) data.length); + System.arraycopy(data, 0, extraData, position + 4, data.length); + position += data.length + 4; + } + } + + // write header + Arrays.fill(headerBuffer, (byte) 0); + setUnsignedInt(headerBuffer, 0, LOCAL_FILE_HEADER_MARKER); // file header signature + if (compress) { + setUnsignedShort(headerBuffer, 4, (short) VERSION_DEFLATE); // version to extract + setUnsignedShort(headerBuffer, 6, (short) SIZE_MASKED_FLAG); // general purpose bit flag + setUnsignedShort(headerBuffer, 8, (short) DEFLATE_METHOD); // compression method + } else { + setUnsignedShort(headerBuffer, 4, (short) VERSION_STORED); // version to extract + setUnsignedShort(headerBuffer, 6, (short) 0); // general purpose bit flag + setUnsignedShort(headerBuffer, 8, (short) STORED_METHOD); // compression method + } + setUnsignedShort(headerBuffer, 10, (short) dosTime); // mtime + setUnsignedShort(headerBuffer, 12, (short) (dosTime >> 16)); // mdate + if (uncompressedData != null) { + CRC32 crc = new CRC32(); + crc.update(uncompressedData); + setUnsignedInt(headerBuffer, 14, (int) crc.getValue()); // crc32 + setUnsignedInt(headerBuffer, 18, uncompressedData.length); // compressed size + setUnsignedInt(headerBuffer, 22, uncompressedData.length); // uncompressed size + } else { + setUnsignedInt(headerBuffer, 14, 0); // crc32 + setUnsignedInt(headerBuffer, 18, 0); // compressed size + setUnsignedInt(headerBuffer, 22, 0); // uncompressed size + } + setUnsignedShort(headerBuffer, 26, (short) fileNameAsBytes.length); // file name length + if (extraData != null) { + setUnsignedShort(headerBuffer, 28, (short) extraData.length); // extra field length + } else { + setUnsignedShort(headerBuffer, 28, (short) 0); // extra field length + } + + // This call works for both compressed or uncompressed entries. + int directoryOffset = fillDirectoryEntryBuffer(directoryEntryInfo); + write(headerBuffer); + write(fileNameAsBytes); + centralDirectory.writeToCentralDirectory(fileNameAsBytes); + if (extraData != null) { + write(extraData); + centralDirectory.writeToCentralDirectory(extraData); + } + + // write data + if (uncompressedData != null) { + write(uncompressedData); + } else { + try (DeflaterOutputStream deflaterStream = new DeflaterOutputStream()) { + int bytesRead; + while ((bytesRead = in.read(localBuffer)) != -1) { + deflaterStream.write(localBuffer, 0, bytesRead); + } + deflaterStream.finish(); + + // write data descriptor + Arrays.fill(headerBuffer, (byte) 0); + setUnsignedInt(headerBuffer, 0, DATA_DESCRIPTOR_MARKER); + setUnsignedInt(headerBuffer, 4, deflaterStream.getCRC()); // crc32 + setUnsignedInt(headerBuffer, 8, deflaterStream.getCompressedSize()); // compressed size + setUnsignedInt(headerBuffer, 12, deflaterStream.getUncompressedSize()); // uncompressed size + write(headerBuffer, 0, 16); + fixDirectoryEntry(directoryOffset, deflaterStream.getCRC(), + deflaterStream.getCompressedSize(), deflaterStream.getUncompressedSize()); + } + } + } + + /** + * Adds a new entry into the output, by reading the input stream until it + * returns end of stream. Equivalent to + * {@link #addFile(String, Date, InputStream, DirectoryEntryInfo)}, but uses + * {@link #DEFAULT_DIRECTORY_ENTRY_INFO} for the file's directory entry. + */ + public void addFile(String filename, Date date, InputStream in) throws IOException { + addFile(filename, date, in, DEFAULT_DIRECTORY_ENTRY_INFO); + } + + /** + * Adds a new entry into the output, by reading the input stream until it + * returns end of stream. This method does not call {@link + * ZipEntryFilter#accept}. + * + * @throws IOException if one of the underlying streams throws an IOException + * or if the input stream returns more data than + * supported by the ZIP format + * @throws IllegalStateException if an entry with the given name already + * exists + * @throws IllegalArgumentException if the given file name is longer than + * supported by the ZIP format + */ + public void addFile(String filename, Date date, InputStream in, + DirectoryEntryInfo directoryEntryInfo) throws IOException { + checkNotFinished(); + if (in == null) { + throw new NullPointerException(); + } + if (filename == null) { + throw new NullPointerException(); + } + checkState(!fileNames.containsKey(filename), + "jar already contains a file named %s", filename); + int dosTime = dateToDosTime(date != null ? date : new Date()); + copyStreamToEntry(filename, in, dosTime, NO_EXTRA_ENTRIES, + mode != OutputMode.FORCE_STORED, // Always compress if we're allowed to. + directoryEntryInfo); + } + + /** + * Adds a new directory entry into the output. This method does not call + * {@link ZipEntryFilter#accept}. Uses {@link #DEFAULT_DIRECTORY_ENTRY_INFO} for the added + * directory entry. + * + * @throws IOException if one of the underlying streams throws an IOException + * @throws IllegalStateException if an entry with the given name already + * exists + * @throws IllegalArgumentException if the given file name is longer than + * supported by the ZIP format + */ + public void addDirectory(String filename, Date date, ExtraData[] extraDataEntries) + throws IOException { + checkNotFinished(); + checkArgument(filename.endsWith("/")); // Can also throw NPE. + checkState(!fileNames.containsKey(filename), + "jar already contains a directory named %s", filename); + int dosTime = dateToDosTime(date != null ? date : new Date()); + copyStreamToEntry(filename, new ByteArrayInputStream(new byte[0]), dosTime, extraDataEntries, + false, // Never compress directory entries. + DEFAULT_DIRECTORY_ENTRY_INFO); + } + + /** + * Adds a new directory entry into the output. This method does not call + * {@link ZipEntryFilter#accept}. + * + * @throws IOException if one of the underlying streams throws an IOException + * @throws IllegalStateException if an entry with the given name already + * exists + * @throws IllegalArgumentException if the given file name is longer than + * supported by the ZIP format + */ + public void addDirectory(String filename, Date date) + throws IOException { + addDirectory(filename, date, NO_EXTRA_ENTRIES); + } + + /** + * A deflater output stream that also counts uncompressed and compressed + * numbers of bytes and computes the CRC so that the data descriptor marker + * is written correctly. + * + * <p>Not static, so it can access the write() methods. + */ + private class DeflaterOutputStream extends OutputStream { + + private final Deflater deflater = new Deflater(Deflater.DEFAULT_COMPRESSION, true); + private final CRC32 crc = new CRC32(); + private final byte[] outputBuffer = new byte[4096]; + private long uncompressedBytes = 0; + private long compressedBytes = 0; + + @Override + public void write(int b) throws IOException { + byte[] buf = new byte[] { (byte) (b & 0xff) }; + write(buf, 0, buf.length); + } + + @Override + public void write(byte b[], int off, int len) throws IOException { + checkNotFinished(); + uncompressedBytes += len; + crc.update(b, off, len); + deflater.setInput(b, off, len); + while (!deflater.needsInput()) { + deflate(); + } + } + + @Override + public void close() throws IOException { + super.close(); + deflater.end(); + } + + /** + * Writes out the remaining buffered data without closing the output + * stream. + */ + public void finish() throws IOException { + checkNotFinished(); + deflater.finish(); + while (!deflater.finished()) { + deflate(); + } + if ((compressedBytes >= MAXIMUM_DATA_SIZE) || (uncompressedBytes >= MAXIMUM_DATA_SIZE)) { + throw new IOException("Too much data for ZIP entry."); + } + } + + private void deflate() throws IOException { + int length = deflater.deflate(outputBuffer); + ZipCombiner.this.write(outputBuffer, 0, length); + compressedBytes += length; + } + + public int getCRC() { + return (int) crc.getValue(); + } + + public int getCompressedSize() { + return (int) compressedBytes; + } + + public int getUncompressedSize() { + return (int) uncompressedBytes; + } + + private void checkNotFinished() { + if (deflater.finished()) { + throw new IllegalStateException(); + } + } + } + + /** + * Writes any remaining output data to the output stream and also creates the + * merged entries by calling the {@link CustomMergeStrategy} implementations + * given back from the ZIP entry filter. + * + * @throws IOException if the output stream or the filter throws an + * IOException + * @throws IllegalStateException if this method was already called earlier + */ + public void finish() throws IOException { + checkNotFinished(); + finished = true; + for (Map.Entry<String, FileEntry> entry : fileNames.entrySet()) { + String filename = entry.getKey(); + CustomMergeStrategy mergeStrategy = entry.getValue().mergeStrategy; + ByteArrayOutputStream outputBuffer = entry.getValue().outputBuffer; + int dosTime = entry.getValue().dosTime; + if (mergeStrategy == null) { + // Do nothing. + } else { + mergeStrategy.finish(outputBuffer); + copyStreamToEntry(filename, new ByteArrayInputStream(outputBuffer.toByteArray()), dosTime, + NO_EXTRA_ENTRIES, true, DEFAULT_DIRECTORY_ENTRY_INFO); + } + } + + // Write central directory. + if (out.bytesWritten >= MAXIMUM_DATA_SIZE) { + throw new IOException("Unable to handle files bigger than 2^32 bytes."); + } + int startOfCentralDirectory = (int) out.bytesWritten; + int centralDirectorySize = centralDirectory.writeTo(out); + + // end of central directory signature + setUnsignedInt(directoryEntryBuffer, 0, END_OF_CENTRAL_DIRECTORY_MARKER); + // number of this disk + setUnsignedShort(directoryEntryBuffer, 4, (short) 0); + // number of the disk with the start of the central directory + setUnsignedShort(directoryEntryBuffer, 6, (short) 0); + // total number of entries in the central directory on this disk + setUnsignedShort(directoryEntryBuffer, 8, (short) fileCount); + // total number of entries in the central directory + setUnsignedShort(directoryEntryBuffer, 10, (short) fileCount); + // size of the central directory + setUnsignedInt(directoryEntryBuffer, 12, centralDirectorySize); + // offset of start of central directory with respect to the starting disk number + setUnsignedInt(directoryEntryBuffer, 16, startOfCentralDirectory); + // .ZIP file comment length + setUnsignedShort(directoryEntryBuffer, 20, (short) 0); + write(directoryEntryBuffer, 0, 22); + + out.flush(); + } + + private void checkNotFinished() { + if (finished) { + throw new IllegalStateException(); + } + } + + /** + * Writes any remaining output data to the output stream and closes it. + * + * @throws IOException if the output stream or the filter throws an + * IOException + */ + @Override + public void close() throws IOException { + if (!finished) { + finish(); + } + out.close(); + } + + /** + * Turns this JAR file into an executable JAR by prepending an executable. + * JAR files are placed at the end of a file, and executables are placed + * at the beginning, so a file can be both, if desired. + * + * @param launcherIn The InputStream, from which the launcher is read. + * @throws NullPointerException if launcherIn is null + * @throws IOException if reading from launcherIn or writing to the output + * stream throws an IOException. + */ + public void prependExecutable(InputStream launcherIn) throws IOException { + if (launcherIn == null) { + throw new NullPointerException("No launcher specified"); + } + byte[] buf = new byte[BUFFER_SIZE]; + int bytesRead; + while ((bytesRead = launcherIn.read(buf)) > 0) { + out.write(buf, 0, bytesRead); + } + } + + /** + * Ensures the truth of an expression involving one or more parameters to the calling method. + */ + private static void checkArgument(boolean expression, + @Nullable String errorMessageTemplate, + @Nullable Object... errorMessageArgs) { + if (!expression) { + throw new IllegalArgumentException(String.format(errorMessageTemplate, errorMessageArgs)); + } + } + + /** + * Ensures the truth of an expression involving one or more parameters to the calling method. + */ + private static void checkArgument(boolean expression) { + if (!expression) { + throw new IllegalArgumentException(); + } + } + + /** + * Ensures the truth of an expression involving state. + */ + private static void checkState(boolean expression, + @Nullable String errorMessageTemplate, + @Nullable Object... errorMessageArgs) { + if (!expression) { + throw new IllegalStateException(String.format(errorMessageTemplate, errorMessageArgs)); + } + } +} diff --git a/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/ZipEntryFilter.java b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/ZipEntryFilter.java new file mode 100644 index 0000000000..ab5a24af36 --- /dev/null +++ b/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/ZipEntryFilter.java @@ -0,0 +1,119 @@ +// Copyright 2014 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Date; + +/** + * A custom filter for entries when combining multiple ZIP files (or even just + * copying a single ZIP file). + * + * <p>Implementations of this interface must be thread-safe. The {@link + * #accept} method may be called concurrently by multiple threads. + */ +public interface ZipEntryFilter { + + /** + * Strategy for a custom merge operation. The current file and all additional + * file are passed to the strategy object via {@link #merge}, which merges + * the files. At the end of the ZIP combination, {@link #finish} is called, + * which then writes the merged single entry of that name. + * + * <p>Implementations of this interface are not required to be thread-safe. + * Thread-safety is achieved by creating multiple instances. Each instance + * that is separately passed to {@link StrategyCallback#customMerge} is + * guaranteed not to be called by two threads at the same time. + */ + interface CustomMergeStrategy { + + /** + * Merges another file into the current state. This method is called for + * every file entry of the same name. + */ + void merge(InputStream in, OutputStream out) throws IOException; + + /** + * Outputs the merged result into the given output stream. This method is + * only called once when no further file of the same name is available. + */ + void finish(OutputStream out) throws IOException; + } + + /** + * A callback interface for the {@link ZipEntryFilter#accept} method. Use + * this interface to indicate the type of processing for the given file name. + * For every file name, exactly one of the methods must be called once. A + * second method call throws {@link IllegalStateException}. + * + * <p>There is no guarantee that the callback will perform the requested + * operation at the time of the invocation. An implementation may choose to + * defer the operation to an arbitrary later time. + * + * <p>IMPORTANT NOTE: Do not implement this interface. It will be modified to + * support future extensions, and all implementations in this package will be + * updated. If you violate this advice, your code will break. + */ + interface StrategyCallback { + + /** + * Skips the current entry and all entries with the same name. + */ + void skip() throws IOException; + + /** + * Copies the current entry and skips all further entries with the same + * name. If {@code date} is non-null, then the timestamp of the entry is + * overwritten with the given value. + */ + void copy(Date date) throws IOException; + + /** + * Renames and copies the current entry, and skips all further entries with + * the same name. If {@code date} is non-null, then the timestamp of the entry + * is overwritten with the given value. + */ + void rename(String filename, Date date) throws IOException; + + /** + * Merges this and all further entries with the same name with the given + * {@link CustomMergeStrategy}. This method must never be called twice with + * the same object. If {@code date} is non-null, then the timestamp of the + * generated entry is set to the given value; otherwise, it is set to the + * current time. + */ + void customMerge(Date date, CustomMergeStrategy strategy) throws IOException; + } + + /** + * Determines the policy with which to handle the ZIP file entry with the + * given name and calls the appropriate method on the callback interface + * {@link StrategyCallback}. For every unique name in the set of all ZIP file + * entries, this method is called exactly once and the result is used for all + * entries of the same name. Except, if an entry is renamed, the original name + * is not considered as having been encountered yet. + * + * <p>Implementations should use the filename to distinguish the desired + * processing, call one method on the callback interface and return + * immediately after that call. + * + * <p>There is no guarantee that the callback will perform the requested + * operation at the time of the invocation. An implementation may choose to + * defer the operation to an arbitrary later time. + */ + void accept(String filename, StrategyCallback callback) throws IOException; +} diff --git a/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/ConcatenateStrategyTest.java b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/ConcatenateStrategyTest.java new file mode 100644 index 0000000000..af03729111 --- /dev/null +++ b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/ConcatenateStrategyTest.java @@ -0,0 +1,75 @@ +// Copyright 2015 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.junit.Assert.assertEquals; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; + +/** + * Unit tests for {@link ConcatenateStrategy}. + */ +@RunWith(JUnit4.class) +public class ConcatenateStrategyTest { + + private String merge(String... inputs) throws IOException { + return mergeInternal(true, inputs); + } + + private String mergeNoNewLine(String... inputs) throws IOException { + return mergeInternal(false, inputs); + } + + private String mergeInternal(boolean appendNewLine, String... inputs) throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ConcatenateStrategy strategy = new ConcatenateStrategy(appendNewLine); + for (String input : inputs) { + strategy.merge(new ByteArrayInputStream(input.getBytes(UTF_8)), out); + } + strategy.finish(out); + return new String(out.toByteArray(), UTF_8); + } + + @Test + public void testSingleInput() throws IOException { + assertEquals("a", merge("a")); + assertEquals("a", mergeNoNewLine("a")); + } + + @Test + public void testTwoInputs() throws IOException { + assertEquals("a\nb", merge("a\n", "b")); + assertEquals("a\nb", mergeNoNewLine("a\n", "b")); + } + + @Test + public void testAutomaticNewline() throws IOException { + assertEquals("a\nb", merge("a", "b")); + assertEquals("ab", mergeNoNewLine("a", "b")); + } + + @Test + public void testAutomaticNewlineAndEmptyFile() throws IOException { + assertEquals("a\nb", merge("a", "", "b")); + assertEquals("ab", mergeNoNewLine("a", "", "b")); + } +} diff --git a/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/CopyEntryFilterTest.java b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/CopyEntryFilterTest.java new file mode 100644 index 0000000000..8eeb8d077f --- /dev/null +++ b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/CopyEntryFilterTest.java @@ -0,0 +1,39 @@ +// Copyright 2015 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +import java.io.IOException; +import java.util.Arrays; + +/** + * Unit tests for {@link CopyEntryFilter}. + */ +@RunWith(JUnit4.class) +public class CopyEntryFilterTest { + + @Test + public void testSingleInput() throws IOException { + RecordingCallback callback = new RecordingCallback(); + new CopyEntryFilter().accept("abc", callback); + assertEquals(Arrays.asList("copy"), callback.calls); + } + +} diff --git a/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/DefaultJarEntryFilterTest.java b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/DefaultJarEntryFilterTest.java new file mode 100644 index 0000000000..6240cfb58f --- /dev/null +++ b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/DefaultJarEntryFilterTest.java @@ -0,0 +1,101 @@ +// Copyright 2015 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Date; +import java.util.jar.JarFile; + +/** + * Unit tests for {@link DefaultJarEntryFilter}. + */ +@RunWith(JUnit4.class) +public class DefaultJarEntryFilterTest { + + private static final Date DOS_EPOCH = ZipCombiner.DOS_EPOCH; + + @Test + public void testSingleInput() throws IOException { + RecordingCallback callback = new RecordingCallback(); + new DefaultJarEntryFilter().accept("abc", callback); + assertEquals(Arrays.asList("copy"), callback.calls); + assertEquals(Arrays.asList(DOS_EPOCH), callback.dates); + } + + @Test + public void testProtobufExtensionsInput() throws IOException { + RecordingCallback callback = new RecordingCallback(); + new DefaultJarEntryFilter().accept("protobuf.meta", callback); + assertEquals(Arrays.asList("customMerge"), callback.calls); + assertEquals(Arrays.asList(DOS_EPOCH), callback.dates); + } + + @Test + public void testManifestInput() throws IOException { + RecordingCallback callback = new RecordingCallback(); + new DefaultJarEntryFilter().accept(JarFile.MANIFEST_NAME, callback); + assertEquals(Arrays.asList("skip"), callback.calls); + } + + @Test + public void testServiceInput() throws IOException { + RecordingCallback callback = new RecordingCallback(); + new DefaultJarEntryFilter().accept("META-INF/services/any.service", callback); + assertEquals(Arrays.asList("customMerge"), callback.calls); + assertEquals(Arrays.asList(DOS_EPOCH), callback.dates); + } + + @Test + public void testSpringHandlers() throws IOException { + RecordingCallback callback = new RecordingCallback(); + new DefaultJarEntryFilter().accept("META-INF/spring.handlers", callback); + assertEquals(Arrays.asList("customMerge"), callback.calls); + assertEquals(Arrays.asList(DOS_EPOCH), callback.dates); + } + + @Test + public void testSpringSchemas() throws IOException { + RecordingCallback callback = new RecordingCallback(); + new DefaultJarEntryFilter().accept("META-INF/spring.schemas", callback); + assertEquals(Arrays.asList("customMerge"), callback.calls); + assertEquals(Arrays.asList(DOS_EPOCH), callback.dates); + } + + @Test + public void testClassInput() throws IOException { + RecordingCallback callback = new RecordingCallback(); + new DefaultJarEntryFilter().accept("a.class", callback); + assertEquals(Arrays.asList("copy"), callback.calls); + assertEquals(Arrays.asList(DefaultJarEntryFilter.DOS_EPOCH_PLUS_2_SECONDS), callback.dates); + } + + @Test + public void testOtherSkippedInputs() throws IOException { + RecordingCallback callback = new RecordingCallback(); + ZipEntryFilter filter = new DefaultJarEntryFilter(); + filter.accept("a.SF", callback); + filter.accept("a.DSA", callback); + filter.accept("a.RSA", callback); + assertEquals(Arrays.asList("skip", "skip", "skip"), callback.calls); + assertEquals(Arrays.<Date>asList(), callback.dates); + } +} diff --git a/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/FakeZipFile.java b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/FakeZipFile.java new file mode 100644 index 0000000000..0156cbcd20 --- /dev/null +++ b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/FakeZipFile.java @@ -0,0 +1,265 @@ +// Copyright 2015 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.common.base.Receiver; +import com.google.devtools.build.singlejar.SingleJarTest.EntryMode; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; + +/** + * A fake zip file to assert that a given {@link ZipInputStream} contains + * specified entries in a specified order. Just for unit testing. + */ +public final class FakeZipFile { + + private static void assertSameByteArray(byte[] expected, byte[] actual) { + if (expected == null) { + assertNull(actual); + } else { + assertArrayEquals(expected, actual); + } + } + + private static byte[] readZipEntryContent(ZipInputStream zipInput) throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + byte[] buffer = new byte[1024]; + int bytesCopied; + while ((bytesCopied = zipInput.read(buffer)) != -1) { + out.write(buffer, 0, bytesCopied); + } + return out.toByteArray(); + } + + private static final class PlainByteValidator implements Receiver<byte[]> { + private final byte[] expected; + + private PlainByteValidator(String expected) { + this.expected = expected == null ? new byte[0] : expected.getBytes(UTF_8); + } + + @Override + public void accept(byte[] object) { + assertSameByteArray(expected, object); + } + + } + + private static final class FakeZipEntry { + + private final String name; + private final Receiver<byte[]> content; + private final Date date; + private final byte[] extra; + private final EntryMode mode; + + private FakeZipEntry(String name, Date date, String content, byte[] extra, EntryMode mode) { + this.name = name; + this.date = date; + this.content = new PlainByteValidator(content); + this.extra = extra; + this.mode = mode; + } + + private FakeZipEntry(String name, Date date, Receiver<byte[]> content, byte[] extra, + EntryMode mode) { + this.name = name; + this.date = date; + this.content = content; + this.extra = extra; + this.mode = mode; + } + + public void assertNext(ZipInputStream zipInput) throws IOException { + ZipEntry zipEntry = zipInput.getNextEntry(); + assertNotNull(zipEntry); + switch (mode) { + case EXPECT_DEFLATE: + assertEquals(ZipEntry.DEFLATED, zipEntry.getMethod()); + break; + case EXPECT_STORED: + assertEquals(ZipEntry.STORED, zipEntry.getMethod()); + break; + default: + // we don't care. + break; + } + assertEquals(name, zipEntry.getName()); + if (date != null) { + assertEquals(date.getTime(), zipEntry.getTime()); + } + assertSameByteArray(extra, zipEntry.getExtra()); + content.accept(readZipEntryContent(zipInput)); + } + } + + private final List<FakeZipEntry> entries = new ArrayList<>(); + + public FakeZipFile addEntry(String name, String content) { + entries.add(new FakeZipEntry(name, null, content, null, EntryMode.DONT_CARE)); + return this; + } + + public FakeZipFile addEntry(String name, String content, boolean compressed) { + entries.add(new FakeZipEntry(name, null, content, null, + compressed ? EntryMode.EXPECT_DEFLATE : EntryMode.EXPECT_STORED)); + return this; + } + + public FakeZipFile addEntry(String name, Date date, String content) { + entries.add(new FakeZipEntry(name, date, content, null, EntryMode.DONT_CARE)); + return this; + } + + public FakeZipFile addEntry(String name, Date date, String content, boolean compressed) { + entries.add(new FakeZipEntry(name, date, content, null, + compressed ? EntryMode.EXPECT_DEFLATE : EntryMode.EXPECT_STORED)); + return this; + } + + public FakeZipFile addEntry(String name, Receiver<byte[]> content) { + entries.add(new FakeZipEntry(name, null, content, null, EntryMode.DONT_CARE)); + return this; + } + + public FakeZipFile addEntry(String name, Receiver<byte[]> content, boolean compressed) { + entries.add(new FakeZipEntry(name, null, content, null, + compressed ? EntryMode.EXPECT_DEFLATE : EntryMode.EXPECT_STORED)); + return this; + } + + public FakeZipFile addEntry(String name, Date date, Receiver<byte[]> content) { + entries.add(new FakeZipEntry(name, date, content, null, EntryMode.DONT_CARE)); + return this; + } + + public FakeZipFile addEntry(String name, Date date, Receiver<byte[]> content, + boolean compressed) { + entries.add(new FakeZipEntry(name, date, content, null, + compressed ? EntryMode.EXPECT_DEFLATE : EntryMode.EXPECT_STORED)); + return this; + } + + public FakeZipFile addEntry(String name, byte[] extra) { + entries.add(new FakeZipEntry(name, null, (String) null, extra, EntryMode.DONT_CARE)); + return this; + } + + public FakeZipFile addEntry(String name, byte[] extra, boolean compressed) { + entries.add(new FakeZipEntry(name, null, (String) null, extra, + compressed ? EntryMode.EXPECT_DEFLATE : EntryMode.EXPECT_STORED)); + return this; + } + + private byte[] preamble = null; + + public FakeZipFile addPreamble(byte[] contents) { + preamble = Arrays.copyOf(contents, contents.length); + return this; + } + + private int getUnsignedShort(byte[] source, int offset) { + int a = source[offset + 0] & 0xff; + int b = source[offset + 1] & 0xff; + return (b << 8) | a; + } + + public void assertSame(byte[] data) throws IOException { + int offset = 0; + int length = data.length; + if (preamble != null) { + offset += preamble.length; + length -= offset; + byte[] maybePreamble = Arrays.copyOfRange(data, 0, offset); + assertTrue(Arrays.equals(preamble, maybePreamble)); + } + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(data, offset, length)); + for (FakeZipEntry entry : entries) { + entry.assertNext(zipInput); + } + assertNull(zipInput.getNextEntry()); + // Verify that the end of central directory data is correct. + // This assumes that the end of directory is at the end of input and that there is no zip file + // comment. + int count = getUnsignedShort(data, data.length-14); + assertEquals(entries.size(), count); + count = getUnsignedShort(data, data.length-12); + assertEquals(entries.size(), count); + } + + /** + * Assert that {@code expected} is the same zip file as {@code actual}. It is similar to + * {@link org.junit.Assert#assertArrayEquals(byte[], byte[])} but should use a more + * helpful error message. + */ + public static void assertSame(byte[] expected, byte[] actual) throws IOException { + // First parse the zip files, then compare to have explicit comparison messages. + ZipInputStream expectedZip = new ZipInputStream(new ByteArrayInputStream(expected)); + ZipInputStream actualZip = new ZipInputStream(new ByteArrayInputStream(actual)); + StringBuffer actualFileList = new StringBuffer(); + StringBuffer expectedFileList = new StringBuffer(); + Map<String, ZipEntry> actualEntries = new HashMap<String, ZipEntry>(); + Map<String, ZipEntry> expectedEntries = new HashMap<String, ZipEntry>(); + Map<String, byte[]> actualEntryContents = new HashMap<String, byte[]>(); + Map<String, byte[]> expectedEntryContents = new HashMap<String, byte[]>(); + parseZipEntry(expectedZip, expectedFileList, expectedEntries, expectedEntryContents); + parseZipEntry(actualZip, actualFileList, actualEntries, actualEntryContents); + // Compare the ordered file list first. + assertEquals(expectedFileList.toString(), actualFileList.toString()); + + // Then compare each entry. + for (String name : expectedEntries.keySet()) { + ZipEntry expectedEntry = expectedEntries.get(name); + ZipEntry actualEntry = actualEntries.get(name); + assertEquals("Time differs for " + name, expectedEntry.getTime(), actualEntry.getTime()); + assertArrayEquals("Extraneous content differs for " + name, + expectedEntry.getExtra(), actualEntry.getExtra()); + assertArrayEquals("Content differs for " + name, + expectedEntryContents.get(name), actualEntryContents.get(name)); + } + + // Finally do a binary array comparison to be sure that test fails if files are different in + // some way we don't test. + assertArrayEquals(expected, actual); + } + + private static void parseZipEntry(ZipInputStream expectedZip, StringBuffer expectedFileList, + Map<String, ZipEntry> expectedEntries, Map<String, byte[]> expectedEntryContents) + throws IOException { + ZipEntry expectedEntry; + while ((expectedEntry = expectedZip.getNextEntry()) != null) { + expectedFileList.append(expectedEntry.getName()).append("\n"); + expectedEntries.put(expectedEntry.getName(), expectedEntry); + expectedEntryContents.put(expectedEntry.getName(), readZipEntryContent(expectedZip)); + } + } +} diff --git a/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/MockSimpleFileSystem.java b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/MockSimpleFileSystem.java new file mode 100644 index 0000000000..8fec585fe0 --- /dev/null +++ b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/MockSimpleFileSystem.java @@ -0,0 +1,88 @@ +// Copyright 2015 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.HashMap; +import java.util.Map; + +/** + * FileSystem for testing. FileSystem supports exactly one one OutputStream for filename + * specified in constructor. + * Workflow for using this class in tests are following: + * <ul> + * <li> Construct with exactly one outputFile. </li> + * <li> add some input files using method addFile </li> + * <li> check content of outputFile calling toByteArray </li> + * </ul> + */ +public final class MockSimpleFileSystem implements SimpleFileSystem { + + private final String outputFileName; + private ByteArrayOutputStream out; + private final Map<String, byte[]> files = new HashMap<>(); + + public MockSimpleFileSystem(String outputFileName) { + this.outputFileName = outputFileName; + } + + public void addFile(String name, byte[] content) { + files.put(name, content); + } + + public void addFile(String name, String content) { + files.put(name, content.getBytes(UTF_8)); + } + + @Override + public OutputStream getOutputStream(String filename) { + assertEquals(outputFileName, filename); + assertNull(out); + out = new ByteArrayOutputStream(); + return out; + } + + @Override + public InputStream getInputStream(String filename) throws IOException { + byte[] data = files.get(filename); + if (data == null) { + throw new FileNotFoundException(); + } + return new ByteArrayInputStream(data); + } + + @Override + public boolean delete(String filename) { + assertEquals(outputFileName, filename); + assertNotNull(out); + out = null; + return true; + } + + public byte[] toByteArray() { + assertNotNull(out); + return out.toByteArray(); + } +} diff --git a/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/OptionFileExpanderTest.java b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/OptionFileExpanderTest.java new file mode 100644 index 0000000000..eea87a5796 --- /dev/null +++ b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/OptionFileExpanderTest.java @@ -0,0 +1,87 @@ +// Copyright 2015 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +import com.google.devtools.build.singlejar.OptionFileExpander.OptionFileProvider; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +import java.io.ByteArrayInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; + +/** + * Unit tests for {@link OptionFileExpander}. + */ +@RunWith(JUnit4.class) +public class OptionFileExpanderTest { + + private static class StoredOptionFileProvider implements OptionFileProvider { + + private Map<String, byte[]> availableFiles = new HashMap<>(); + + void addFile(String filename, String content) { + availableFiles.put(filename, content.getBytes(UTF_8)); + } + + @Override + public InputStream getInputStream(String filename) throws IOException { + byte[] result = availableFiles.get(filename); + if (result == null) { + throw new FileNotFoundException(); + } + return new ByteArrayInputStream(result); + } + } + + @Test + public void testNoExpansion() throws IOException { + OptionFileExpander expander = new OptionFileExpander(new StoredOptionFileProvider()); + assertEquals(Arrays.asList("--some", "option", "list"), + expander.expandArguments(Arrays.asList("--some", "option", "list"))); + } + + @Test + public void testExpandSimpleOptionsFile() throws IOException { + StoredOptionFileProvider provider = new StoredOptionFileProvider(); + provider.addFile("options", "--some option list"); + OptionFileExpander expander = new OptionFileExpander(provider); + assertEquals(Arrays.asList("--some", "option", "list"), + expander.expandArguments(Arrays.asList("@options"))); + } + + @Test + public void testIllegalOptionsFile() { + StoredOptionFileProvider provider = new StoredOptionFileProvider(); + provider.addFile("options", "'missing apostrophe"); + OptionFileExpander expander = new OptionFileExpander(provider); + try { + expander.expandArguments(Arrays.asList("@options")); + fail(); + } catch (IOException e) { + // Expected exception. + } + } +} diff --git a/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/PrefixListPathFilterTest.java b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/PrefixListPathFilterTest.java new file mode 100644 index 0000000000..32b5eaeb19 --- /dev/null +++ b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/PrefixListPathFilterTest.java @@ -0,0 +1,54 @@ +// Copyright 2015 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.devtools.build.singlejar.DefaultJarEntryFilter.PathFilter; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +/** + * Tests {@link PrefixListPathFilter}. + */ +@RunWith(JUnit4.class) +public class PrefixListPathFilterTest { + private PathFilter filter; + + @Test + public void testPrefixList() { + filter = new PrefixListPathFilter(ImmutableList.of("dir1", "dir/subdir")); + assertIncluded("dir1/file1"); + assertExcluded("dir2/file1"); + assertIncluded("dir/subdir/file1"); + assertExcluded("dir2/subdir/file1"); + assertExcluded("dir/othersub/file1"); + assertExcluded("dir3/file1"); + } + + private void assertExcluded(String path) { + assertFalse(path + " should have been excluded, but was included", + filter.allowed(path)); + } + + private void assertIncluded(String path) { + assertTrue(path + " should have been included but was not", + filter.allowed(path)); + } +} diff --git a/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/RecordingCallback.java b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/RecordingCallback.java new file mode 100644 index 0000000000..a3f69ff434 --- /dev/null +++ b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/RecordingCallback.java @@ -0,0 +1,56 @@ +// Copyright 2015 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + + +import com.google.devtools.build.singlejar.ZipEntryFilter.CustomMergeStrategy; +import com.google.devtools.build.singlejar.ZipEntryFilter.StrategyCallback; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +/** + * A helper implementation of {@link StrategyCallback} that records callback + * invocations as string. + */ +public final class RecordingCallback implements StrategyCallback { + + public final List<String> calls = new ArrayList<>(); + public final List<Date> dates = new ArrayList<>(); + + @Override + public void copy(Date date) { + calls.add("copy"); + dates.add(date); + } + + @Override + public void rename(String filename, Date date) { + calls.add("rename"); + dates.add(date); + } + + @Override + public void customMerge(Date date, CustomMergeStrategy strategy) { + calls.add("customMerge"); + dates.add(date); + } + + @Override + public void skip() { + calls.add("skip"); + } +} diff --git a/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/SingleJarTest.java b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/SingleJarTest.java new file mode 100644 index 0000000000..dbff1553cb --- /dev/null +++ b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/SingleJarTest.java @@ -0,0 +1,634 @@ +// Copyright 2015 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import com.google.common.base.Joiner; +import com.google.common.base.Receiver; +import com.google.common.collect.ImmutableList; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.jar.JarFile; + +/** + * Unit tests for {@link SingleJar}. + */ +@RunWith(JUnit4.class) +public class SingleJarTest { + + public static final byte[] EXTRA_FOR_META_INF = new byte[] {(byte) 0xFE, (byte) 0xCA, 0x00, 0x00}; + + static final Joiner LINE_JOINER = Joiner.on("\r\n"); + static final Joiner LINEFEED_JOINER = Joiner.on("\n"); + + static enum EntryMode { + DONT_CARE, EXPECT_DEFLATE, EXPECT_STORED; + } + + public static final class BuildInfoValidator implements Receiver<byte[]> { + private final List<String> buildInfoLines; + + public BuildInfoValidator(List<String> buildInfoLines) { + this.buildInfoLines = buildInfoLines; + } + + @Override + public void accept(byte[] content) { + String actualBuildInfo = new String(content, StandardCharsets.UTF_8); + List<String> expectedBuildInfos = new ArrayList<>(); + for (String line : buildInfoLines) { // the character : is escaped + expectedBuildInfos.add(line.replace(":", "\\:")); + } + Collections.sort(expectedBuildInfos); + String[] actualBuildInfos = actualBuildInfo.split("\n"); + Arrays.sort(actualBuildInfos); + assertEquals(LINEFEED_JOINER.join(expectedBuildInfos), + LINEFEED_JOINER.join(actualBuildInfos)); + } + + } + + // Manifest file line ordering is dependent of the ordering in HashMap (Attributes class) so + // we do a sorted comparison for Manifest. + public static final class ManifestValidator implements Receiver<byte[]> { + private final List<String> manifestLines; + + public ManifestValidator(List<String> manifestLines) { + this.manifestLines = new ArrayList<String>(manifestLines); + Collections.sort(this.manifestLines); + } + + public ManifestValidator(String... manifestLines) { + this.manifestLines = Arrays.asList(manifestLines); + Collections.sort(this.manifestLines); + } + + @Override + public void accept(byte[] content) { + String actualManifest = new String(content, StandardCharsets.UTF_8); + String[] actualManifestLines = actualManifest.trim().split("\r\n"); + Arrays.sort(actualManifestLines); + assertEquals(LINEFEED_JOINER.join(manifestLines), LINEFEED_JOINER.join(actualManifestLines)); + } + + } + + private BuildInfoValidator redactedBuildData(String outputJar) { + return new BuildInfoValidator(ImmutableList.of("build.target=" + outputJar)); + } + + private BuildInfoValidator redactedBuildData(String outputJar, String mainClass) { + return new BuildInfoValidator( + ImmutableList.of("build.target=" + outputJar, "main.class=" + mainClass)); + } + + static List<String> getBuildInfo() { + return ImmutableList.of("build.build_id=11111-222-33333", + "build.version=12659499", + "build.location=user@machine.domain.com:/home/user/source", + "build.target=output.jar", + "build.time=Fri Jan 2 02:17:36 1970 (123456)", + "build.timestamp=Fri Jan 2 02:17:36 1970 (123456)", + "build.timestamp.as.int=123456" + ); + } + + private byte[] sampleZip() { + ZipFactory factory = new ZipFactory(); + factory.addFile("hello.txt", "Hello World!"); + return factory.toByteArray(); + } + + private byte[] sampleUncompressedZip() { + ZipFactory factory = new ZipFactory(); + factory.addFile("hello.txt", "Hello World!", false); + return factory.toByteArray(); + } + + private byte[] sampleZipWithSF() { + ZipFactory factory = new ZipFactory(); + factory.addFile("hello.SF", "Hello World!"); + return factory.toByteArray(); + } + + private byte[] sampleZipWithSubdirs() { + ZipFactory factory = new ZipFactory(); + factory.addFile("dir1/file1", "contents11"); + factory.addFile("dir1/file2", "contents12"); + factory.addFile("dir2/file1", "contents21"); + factory.addFile("dir3/file1", "contents31"); + return factory.toByteArray(); + } + + private void assertStripFirstLine(String expected, String testCase) { + byte[] result = SingleJar.stripFirstLine(testCase.getBytes(StandardCharsets.UTF_8)); + assertEquals(expected, new String(result)); + } + + @Test + public void testStripFirstLine() { + assertStripFirstLine("", ""); + assertStripFirstLine("", "no linefeed"); + assertStripFirstLine(LINEFEED_JOINER.join("toto", "titi"), + LINEFEED_JOINER.join("# timestamp comment", "toto", "titi")); + assertStripFirstLine(LINE_JOINER.join("toto", "titi"), + LINE_JOINER.join("# timestamp comment", "toto", "titi")); + } + + @Test + public void testEmptyJar() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("--output", "output.jar")); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator( + "Manifest-Version: 1.0", + "Created-By: blaze-singlejar")) + .addEntry("build-data.properties", redactedBuildData("output.jar")); + expectedResult.assertSame(mockFs.toByteArray()); + } + + // Test that two identical calls at different time actually returns identical results + @Test + public void testDeterministicJar() throws IOException, InterruptedException { + MockSimpleFileSystem mockFs1 = new MockSimpleFileSystem("output.jar"); + SingleJar singleJar1 = new SingleJar(mockFs1); + singleJar1.run(ImmutableList.of("--output", "output.jar", "--extra_build_info", "toto=titi", + "--normalize")); + Thread.sleep(1000); // ensure that we are not at the same seconds + + MockSimpleFileSystem mockFs2 = new MockSimpleFileSystem("output.jar"); + SingleJar singleJar2 = new SingleJar(mockFs2); + singleJar2.run(ImmutableList.of("--output", "output.jar", "--extra_build_info", "toto=titi", + "--normalize")); + + FakeZipFile.assertSame(mockFs1.toByteArray(), mockFs2.toByteArray()); + } + + @Test + public void testExtraManifestContent() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("--output", "output.jar", "--deploy_manifest_lines", + "Main-Class: SomeClass", "X-Other: Duh")); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator( + "Manifest-Version: 1.0", + "Created-By: blaze-singlejar", + "Main-Class: SomeClass", + "X-Other: Duh")) + .addEntry("build-data.properties", redactedBuildData("output.jar")); + expectedResult.assertSame(mockFs.toByteArray()); + } + + @Test + public void testMultipleExtraManifestContent() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("--deploy_manifest_lines", "X-Other: Duh", + "--output", "output.jar", + "--deploy_manifest_lines", "Main-Class: SomeClass")); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator( + "Manifest-Version: 1.0", + "Created-By: blaze-singlejar", + "Main-Class: SomeClass", + "X-Other: Duh")) + .addEntry("build-data.properties", redactedBuildData("output.jar")); + expectedResult.assertSame(mockFs.toByteArray()); + } + + @Test + public void testMainClass() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("--output", "output.jar", "--main_class", "SomeClass")); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator( + "Manifest-Version: 1.0", + "Created-By: blaze-singlejar", + "Main-Class: SomeClass")) + .addEntry("build-data.properties", redactedBuildData("output.jar", "SomeClass")); + expectedResult.assertSame(mockFs.toByteArray()); + } + + // These four tests test all combinations of compressed/uncompressed input and output. + @Test + public void testSimpleZip() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + mockFs.addFile("test.jar", sampleZip()); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("--output", "output.jar", "--sources", "test.jar")); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF, false) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator( + "Manifest-Version: 1.0", + "Created-By: blaze-singlejar"), false) + .addEntry("build-data.properties", redactedBuildData("output.jar"), false) + .addEntry("hello.txt", "Hello World!", false); + expectedResult.assertSame(mockFs.toByteArray()); + } + + @Test + public void testSimpleZipExpectCompressedOutput() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + mockFs.addFile("test.jar", sampleZip()); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("--output", "output.jar", "--sources", "test.jar", + "--compression")); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF, false) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator( + "Manifest-Version: 1.0", + "Created-By: blaze-singlejar"), true) + .addEntry("build-data.properties", redactedBuildData("output.jar"), true) + .addEntry("hello.txt", "Hello World!", true); + expectedResult.assertSame(mockFs.toByteArray()); + } + + @Test + public void testSimpleUncompressedZip() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + mockFs.addFile("test.jar", sampleUncompressedZip()); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("--output", "output.jar", "--sources", "test.jar")); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF, false) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator(ImmutableList.of( + "Manifest-Version: 1.0", + "Created-By: blaze-singlejar")), false) + .addEntry("build-data.properties", redactedBuildData("output.jar"), false) + .addEntry("hello.txt", "Hello World!", false); + expectedResult.assertSame(mockFs.toByteArray()); + } + + @Test + public void testSimpleUncompressedZipExpectCompressedOutput() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + mockFs.addFile("test.jar", sampleUncompressedZip()); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("--output", "output.jar", "--sources", "test.jar", + "--compression")); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF, false) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator( + "Manifest-Version: 1.0", + "Created-By: blaze-singlejar"), true) + .addEntry("build-data.properties", redactedBuildData("output.jar"), true) + .addEntry("hello.txt", "Hello World!", true); + expectedResult.assertSame(mockFs.toByteArray()); + } + + // Integration test for option file expansion. + @Test + public void testOptionFile() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + mockFs.addFile("input.jar", sampleZip()); + mockFs.addFile("options", "--output output.jar --sources input.jar"); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("@options")); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator( + "Manifest-Version: 1.0", + "Created-By: blaze-singlejar")) + .addEntry("build-data.properties", redactedBuildData("output.jar")) + .addEntry("hello.txt", "Hello World!"); + expectedResult.assertSame(mockFs.toByteArray()); + } + + @Test + public void testSkipsSignatureFiles() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + mockFs.addFile("input.jar", sampleZipWithSF()); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("--output", "output.jar", "--sources", "input.jar")); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator( + "Manifest-Version: 1.0", + "Created-By: blaze-singlejar")) + .addEntry("build-data.properties", redactedBuildData("output.jar")); + expectedResult.assertSame(mockFs.toByteArray()); + } + + @Test + public void testSkipsUsingInputPrefixes() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + mockFs.addFile("input.jar", sampleZipWithSubdirs()); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("--output", "output.jar", "--sources", + "input.jar", "--include_prefixes", "dir1", "dir2")); + + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator( + "Manifest-Version: 1.0", + "Created-By: blaze-singlejar")) + .addEntry("build-data.properties", redactedBuildData("output.jar")) + .addEntry("dir1/file1", "contents11") + .addEntry("dir1/file2", "contents12") + .addEntry("dir2/file1", "contents21"); + + expectedResult.assertSame(mockFs.toByteArray()); + } + + @Test + public void testSkipsUsingMultipleInputPrefixes() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + mockFs.addFile("input.jar", sampleZipWithSubdirs()); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("--output", "output.jar", "--include_prefixes", "dir2", + "--sources", "input.jar", "--include_prefixes", "dir1")); + + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator( + "Manifest-Version: 1.0", + "Created-By: blaze-singlejar")) + .addEntry("build-data.properties", redactedBuildData("output.jar")) + .addEntry("dir1/file1", "contents11") + .addEntry("dir1/file2", "contents12") + .addEntry("dir2/file1", "contents21"); + + expectedResult.assertSame(mockFs.toByteArray()); + } + + @Test + public void testNormalize() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + mockFs.addFile("input.jar", sampleZip()); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("--output", "output.jar", "--sources", "input.jar", + "--normalize")); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF, false) + .addEntry(JarFile.MANIFEST_NAME, ZipCombiner.DOS_EPOCH, new ManifestValidator( + "Manifest-Version: 1.0", "Created-By: blaze-singlejar"), false) + .addEntry("build-data.properties", ZipCombiner.DOS_EPOCH, + redactedBuildData("output.jar"), false) + .addEntry("hello.txt", ZipCombiner.DOS_EPOCH, "Hello World!", false); + expectedResult.assertSame(mockFs.toByteArray()); + } + + @Test + public void testNormalizeAndCompress() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + mockFs.addFile("input.jar", sampleZip()); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("--output", "output.jar", "--sources", "input.jar", + "--normalize", "--compression")); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF, false) + .addEntry(JarFile.MANIFEST_NAME, ZipCombiner.DOS_EPOCH, new ManifestValidator( + "Manifest-Version: 1.0", "Created-By: blaze-singlejar"), true) + .addEntry("build-data.properties", ZipCombiner.DOS_EPOCH, + redactedBuildData("output.jar"), true) + .addEntry("hello.txt", ZipCombiner.DOS_EPOCH, "Hello World!", true); + expectedResult.assertSame(mockFs.toByteArray()); + } + + @Test + public void testAddBuildInfoProperties() throws IOException { + List<String> buildInfo = getBuildInfo(); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF, false) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator( + "Manifest-Version: 1.0", "Created-By: blaze-singlejar"), false) + .addEntry("build-data.properties", new BuildInfoValidator(buildInfo), + false); + + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + SingleJar singleJar = new SingleJar(mockFs); + List<String> args = new ArrayList<String>(); + args.add("--output"); + args.add("output.jar"); + args.addAll(infoPropertyArguments(buildInfo)); + singleJar.run(args); + expectedResult.assertSame(mockFs.toByteArray()); + } + + private static List<String> infoPropertyArguments(List<String> buildInfoLines) { + List<String> args = new ArrayList<>(); + for (String s : buildInfoLines) { + if (!s.isEmpty()) { + args.add("--extra_build_info"); + args.add(s); + } + } + return args; + } + + @Test + public void testAddBuildInfoPropertiesFile() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + SingleJar singleJar = new SingleJar(mockFs); + doTestAddBuildInfoPropertiesFile(mockFs, "output.jar", singleJar); + } + + public static void doTestAddBuildInfoPropertiesFile(MockSimpleFileSystem mockFs, String target, + SingleJar singleJar) throws IOException { + List<String> buildInfo = getBuildInfo(); + mockFs.addFile("my.properties", makePropertyFileFromBuildInfo(buildInfo)); + singleJar.run(ImmutableList.of("--output", target, "--build_info_file", "my.properties")); + + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF, false) + .addEntry(JarFile.MANIFEST_NAME, + new ManifestValidator("Manifest-Version: 1.0", "Created-By: blaze-singlejar"), false) + .addEntry("build-data.properties", new BuildInfoValidator(buildInfo), + false); + expectedResult.assertSame(mockFs.toByteArray()); + } + + private static String makePropertyFileFromBuildInfo(List<String> buildInfo) { + return LINEFEED_JOINER.join(buildInfo).replace(":", "\\:"); + } + + @Test + public void testAddBuildInfoPropertiesFiles() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + SingleJar singleJar = new SingleJar(mockFs); + doTestAddBuildInfoPropertiesFiles(mockFs, "output.jar", singleJar); + } + + public static void doTestAddBuildInfoPropertiesFiles(MockSimpleFileSystem mockFs, String target, + SingleJar singleJar) throws IOException { + List<String> buildInfo = getBuildInfo(); + + mockFs.addFile("my1.properties", makePropertyFileFromBuildInfo(buildInfo.subList(0, 4))); + mockFs.addFile("my2.properties", + makePropertyFileFromBuildInfo(buildInfo.subList(4, buildInfo.size()))); + singleJar.run(ImmutableList.of("--output", target, + "--build_info_file", "my1.properties", + "--build_info_file", "my2.properties")); + + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF, false) + .addEntry(JarFile.MANIFEST_NAME, + new ManifestValidator("Manifest-Version: 1.0", "Created-By: blaze-singlejar"), false) + .addEntry("build-data.properties", new BuildInfoValidator(buildInfo), + false); + expectedResult.assertSame(mockFs.toByteArray()); + } + + @Test + public void testAddBuildInfoPropertiesAndFiles() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + SingleJar singleJar = new SingleJar(mockFs); + doTestAddBuildInfoPropertiesAndFiles(mockFs, "output.jar", singleJar); + } + + public static void doTestAddBuildInfoPropertiesAndFiles(MockSimpleFileSystem mockFs, + String target, SingleJar singleJar) throws IOException { + List<String> buildInfo = getBuildInfo(); + + mockFs.addFile("my1.properties", makePropertyFileFromBuildInfo(buildInfo.subList(0, 4))); + mockFs.addFile("my2.properties", makePropertyFileFromBuildInfo( + buildInfo.subList(4, buildInfo.size()))); + List<String> args = ImmutableList.<String>builder() + .add("--output").add(target) + .add("--build_info_file").add("my1.properties") + .add("--build_info_file").add("my2.properties") + .addAll(infoPropertyArguments(buildInfo.subList(4, buildInfo.size()))) + .build(); + + singleJar.run(args); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF, false) + .addEntry(JarFile.MANIFEST_NAME, + new ManifestValidator("Manifest-Version: 1.0", "Created-By: blaze-singlejar"), false) + .addEntry("build-data.properties", new BuildInfoValidator(buildInfo), + false); + expectedResult.assertSame(mockFs.toByteArray()); + } + + + @Test + public void testExcludeBuildData() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + SingleJar singleJar = new SingleJar(mockFs); + doTestExcludeBuildData(mockFs, "output.jar", singleJar); + } + + public static void doTestExcludeBuildData(MockSimpleFileSystem mockFs, String target, + SingleJar singleJar) throws IOException { + singleJar.run(ImmutableList.of("--output", target, "--exclude_build_data")); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator( + "Manifest-Version: 1.0", + "Created-By: blaze-singlejar")); + expectedResult.assertSame(mockFs.toByteArray()); + } + + @Test + public void testResourceMapping() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + mockFs.addFile("a/b/c", "Test"); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("--output", "output.jar", "--exclude_build_data", + "--resources", "a/b/c:c/b/a")); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator( + "Manifest-Version: 1.0", + "Created-By: blaze-singlejar")) + .addEntry("c/b/a", "Test"); + expectedResult.assertSame(mockFs.toByteArray()); + } + + @Test + public void testResourceMappingIdentity() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + mockFs.addFile("a/b/c", "Test"); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("--output", "output.jar", "--exclude_build_data", + "--resources", "a/b/c")); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator( + "Manifest-Version: 1.0", + "Created-By: blaze-singlejar")) + .addEntry("a/b/c", "Test"); + expectedResult.assertSame(mockFs.toByteArray()); + } + + @Test + public void testResourceMappingDuplicateError() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + mockFs.addFile("a/b/c", "Test"); + SingleJar singleJar = new SingleJar(mockFs); + try { + singleJar.run(ImmutableList.of("--output", "output.jar", "--exclude_build_data", + "--resources", "a/b/c", "a/b/c")); + fail(); + } catch (IllegalStateException e) { + assertTrue(e.getMessage().contains("already contains a file named a/b/c")); + } + } + + @Test + public void testResourceMappingDuplicateWarning() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + mockFs.addFile("a/b/c", "Test"); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("--output", "output.jar", "--exclude_build_data", + "--warn_duplicate_resources", "--resources", "a/b/c", "a/b/c")); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("META-INF/", EXTRA_FOR_META_INF) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator( + "Manifest-Version: 1.0", + "Created-By: blaze-singlejar")) + .addEntry("a/b/c", "Test"); + expectedResult.assertSame(mockFs.toByteArray()); + } + + @Test + public void testCanAddPreamble() throws IOException { + MockSimpleFileSystem mockFs = new MockSimpleFileSystem("output.jar"); + String preamble = "WeThePeople"; + mockFs.addFile(preamble, preamble.getBytes()); + SingleJar singleJar = new SingleJar(mockFs); + singleJar.run(ImmutableList.of("--output", "output.jar", + "--java_launcher", preamble, + "--main_class", "SomeClass")); + FakeZipFile expectedResult = new FakeZipFile() + .addPreamble(preamble.getBytes()) + .addEntry("META-INF/", EXTRA_FOR_META_INF) + .addEntry(JarFile.MANIFEST_NAME, new ManifestValidator( + "Manifest-Version: 1.0", + "Created-By: blaze-singlejar", + "Main-Class: SomeClass")) + .addEntry("build-data.properties", redactedBuildData("output.jar", "SomeClass")); + expectedResult.assertSame(mockFs.toByteArray()); + } +} diff --git a/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/SingleJarTests.java b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/SingleJarTests.java new file mode 100644 index 0000000000..8b68004e72 --- /dev/null +++ b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/SingleJarTests.java @@ -0,0 +1,27 @@ +// Copyright 2015 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + + +import com.google.devtools.build.lib.testutil.ClasspathSuite; + +import org.junit.runner.RunWith; + +/** + * A test-suite builder for this package. + */ +@RunWith(ClasspathSuite.class) +public class SingleJarTests { +} diff --git a/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/SlowConcatenateStrategy.java b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/SlowConcatenateStrategy.java new file mode 100644 index 0000000000..d1a50918d2 --- /dev/null +++ b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/SlowConcatenateStrategy.java @@ -0,0 +1,45 @@ +// Copyright 2015 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + + +import com.google.devtools.build.singlejar.ZipEntryFilter.CustomMergeStrategy; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +import javax.annotation.concurrent.NotThreadSafe; + +/** + * A strategy that merges a set of files by concatenating them. It inserts no + * additional characters and copies bytes one by one. Used for testing. + */ +@NotThreadSafe +final class SlowConcatenateStrategy implements CustomMergeStrategy { + + @Override + public void merge(InputStream in, OutputStream out) throws IOException { + int nextByte; + while ((nextByte = in.read()) != -1) { + out.write(nextByte); + } + } + + @Override + public void finish(OutputStream out) { + // No need to do anything. All the data was already written. + } +} diff --git a/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/ZipCombinerTest.java b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/ZipCombinerTest.java new file mode 100644 index 0000000000..e5345cb1f8 --- /dev/null +++ b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/ZipCombinerTest.java @@ -0,0 +1,936 @@ +// Copyright 2015 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import static com.google.common.truth.Truth.assertThat; +import static java.nio.charset.StandardCharsets.ISO_8859_1; +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import com.google.common.base.Preconditions; +import com.google.common.collect.ArrayListMultimap; +import com.google.common.collect.ListMultimap; +import com.google.devtools.build.singlejar.ZipCombiner.OutputMode; +import com.google.devtools.build.singlejar.ZipEntryFilter.CustomMergeStrategy; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Calendar; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.jar.JarOutputStream; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; +import java.util.zip.ZipOutputStream; + +/** + * Unit tests for {@link ZipCombiner}. + */ +@RunWith(JUnit4.class) +public class ZipCombinerTest { + + private static final Date DOS_EPOCH = ZipCombiner.DOS_EPOCH; + + private InputStream sampleZip() { + ZipFactory factory = new ZipFactory(); + factory.addFile("hello.txt", "Hello World!"); + return factory.toInputStream(); + } + + private InputStream sampleZip2() { + ZipFactory factory = new ZipFactory(); + factory.addFile("hello2.txt", "Hello World 2!"); + return factory.toInputStream(); + } + + private InputStream sampleZipWithTwoEntries() { + ZipFactory factory = new ZipFactory(); + factory.addFile("hello.txt", "Hello World!"); + factory.addFile("hello2.txt", "Hello World 2!"); + return factory.toInputStream(); + } + + private InputStream sampleZipWithOneUncompressedEntry() { + ZipFactory factory = new ZipFactory(); + factory.addFile("hello.txt", "Hello World!", false); + return factory.toInputStream(); + } + + private InputStream sampleZipWithTwoUncompressedEntries() { + ZipFactory factory = new ZipFactory(); + factory.addFile("hello.txt", "Hello World!", false); + factory.addFile("hello2.txt", "Hello World 2!", false); + return factory.toInputStream(); + } + + private void assertEntry(ZipInputStream zipInput, String filename, long time, byte[] content) + throws IOException { + ZipEntry zipEntry = zipInput.getNextEntry(); + assertNotNull(zipEntry); + assertEquals(filename, zipEntry.getName()); + assertEquals(time, zipEntry.getTime()); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + byte[] buffer = new byte[1024]; + int bytesCopied; + while ((bytesCopied = zipInput.read(buffer)) != -1) { + out.write(buffer, 0, bytesCopied); + } + assertTrue(Arrays.equals(content, out.toByteArray())); + } + + private void assertEntry(ZipInputStream zipInput, String filename, byte[] content) + throws IOException { + assertEntry(zipInput, filename, ZipCombiner.DOS_EPOCH.getTime(), content); + } + + private void assertEntry(ZipInputStream zipInput, String filename, String content) + throws IOException { + assertEntry(zipInput, filename, content.getBytes(ISO_8859_1)); + } + + private void assertEntry(ZipInputStream zipInput, String filename, Date date, String content) + throws IOException { + assertEntry(zipInput, filename, date.getTime(), content.getBytes(ISO_8859_1)); + } + + @Test + public void testDateToDosTime() { + assertEquals(0x210000, ZipCombiner.dateToDosTime(ZipCombiner.DOS_EPOCH)); + Calendar calendar = new GregorianCalendar(); + for (int i = 1980; i <= 2107; i++) { + calendar.set(i, 0, 1, 0, 0, 0); + int result = ZipCombiner.dateToDosTime(calendar.getTime()); + assertEquals(i - 1980, result >>> 25); + assertEquals(1, (result >> 21) & 0xf); + assertEquals(1, (result >> 16) & 0x1f); + assertEquals(0, result & 0xffff); + } + } + + @Test + public void testDateToDosTimeFailsForBadValues() { + try { + Calendar calendar = new GregorianCalendar(); + calendar.set(1979, 0, 1, 0, 0, 0); + ZipCombiner.dateToDosTime(calendar.getTime()); + fail(); + } catch (IllegalArgumentException e) { + /* Expected exception. */ + } + try { + Calendar calendar = new GregorianCalendar(); + calendar.set(2108, 0, 1, 0, 0, 0); + ZipCombiner.dateToDosTime(calendar.getTime()); + fail(); + } catch (IllegalArgumentException e) { + /* Expected exception. */ + } + } + + @Test + public void testCompressedDontCare() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(out); + singleJar.addZip(sampleZip()); + singleJar.close(); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("hello.txt", "Hello World!", true); + expectedResult.assertSame(out.toByteArray()); + } + + @Test + public void testCompressedForceDeflate() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(OutputMode.FORCE_DEFLATE, out); + singleJar.addZip(sampleZip()); + singleJar.close(); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("hello.txt", "Hello World!", true); + expectedResult.assertSame(out.toByteArray()); + } + + @Test + public void testCompressedForceStored() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(OutputMode.FORCE_STORED, out); + singleJar.addZip(sampleZip()); + singleJar.close(); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("hello.txt", "Hello World!", false); + expectedResult.assertSame(out.toByteArray()); + } + + @Test + public void testUncompressedDontCare() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(out); + singleJar.addZip(sampleZipWithOneUncompressedEntry()); + singleJar.close(); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("hello.txt", "Hello World!", false); + expectedResult.assertSame(out.toByteArray()); + } + + @Test + public void testUncompressedForceDeflate() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(OutputMode.FORCE_DEFLATE, out); + singleJar.addZip(sampleZipWithOneUncompressedEntry()); + singleJar.close(); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("hello.txt", "Hello World!", true); + expectedResult.assertSame(out.toByteArray()); + } + + @Test + public void testUncompressedForceStored() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(OutputMode.FORCE_STORED, out); + singleJar.addZip(sampleZipWithOneUncompressedEntry()); + singleJar.close(); + FakeZipFile expectedResult = new FakeZipFile() + .addEntry("hello.txt", "Hello World!", false); + expectedResult.assertSame(out.toByteArray()); + } + + @Test + public void testCopyTwoEntries() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(out); + singleJar.addZip(sampleZipWithTwoEntries()); + singleJar.close(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello.txt", "Hello World!"); + assertEntry(zipInput, "hello2.txt", "Hello World 2!"); + assertNull(zipInput.getNextEntry()); + } + + @Test + public void testCopyTwoUncompressedEntries() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(out); + singleJar.addZip(sampleZipWithTwoUncompressedEntries()); + singleJar.close(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello.txt", "Hello World!"); + assertEntry(zipInput, "hello2.txt", "Hello World 2!"); + assertNull(zipInput.getNextEntry()); + } + + @Test + public void testCombine() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(out); + singleJar.addZip(sampleZip()); + singleJar.addZip(sampleZip2()); + singleJar.close(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello.txt", "Hello World!"); + assertEntry(zipInput, "hello2.txt", "Hello World 2!"); + assertNull(zipInput.getNextEntry()); + } + + @Test + public void testDuplicateEntry() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(out); + singleJar.addZip(sampleZip()); + singleJar.addZip(sampleZip()); + singleJar.close(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello.txt", "Hello World!"); + assertNull(zipInput.getNextEntry()); + } + + // Returns an input stream that can only read one byte at a time. + private InputStream slowRead(final InputStream in) { + return new InputStream() { + @Override + public int read() throws IOException { + return in.read(); + } + @Override + public int read(byte b[], int off, int len) throws IOException { + Preconditions.checkArgument(b != null); + Preconditions.checkArgument((len >= 0) && (off >= 0)); + Preconditions.checkArgument(len <= b.length - off); + if (len == 0) { + return 0; + } + int value = read(); + if (value == -1) { + return -1; + } + b[off] = (byte) value; + return 1; + } + }; + } + + @Test + public void testDuplicateUncompressedEntryWithSlowRead() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(out); + singleJar.addZip(slowRead(sampleZipWithOneUncompressedEntry())); + singleJar.addZip(slowRead(sampleZipWithOneUncompressedEntry())); + singleJar.close(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello.txt", "Hello World!"); + assertNull(zipInput.getNextEntry()); + } + + @Test + public void testDuplicateEntryWithSlowRead() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(out); + singleJar.addZip(slowRead(sampleZip())); + singleJar.addZip(slowRead(sampleZip())); + singleJar.close(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello.txt", "Hello World!"); + assertNull(zipInput.getNextEntry()); + } + + @Test + public void testBadZipFileNoEntry() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(out); + singleJar.addZip(new ByteArrayInputStream(new byte[] { 1, 2, 3, 4 })); + singleJar.close(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertNull(zipInput.getNextEntry()); + } + + private InputStream asStream(String content) { + return new ByteArrayInputStream(content.getBytes(UTF_8)); + } + + @Test + public void testAddFile() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(out); + singleJar.addFile("hello.txt", DOS_EPOCH, asStream("Hello World!")); + singleJar.close(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello.txt", "Hello World!"); + assertNull(zipInput.getNextEntry()); + } + + @Test + public void testAddFileAndDuplicateZipEntry() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(out); + singleJar.addFile("hello.txt", DOS_EPOCH, asStream("Hello World!")); + singleJar.addZip(sampleZip()); + singleJar.close(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello.txt", "Hello World!"); + assertNull(zipInput.getNextEntry()); + } + + static final class MergeStrategyPlaceHolder implements CustomMergeStrategy { + + @Override + public void finish(OutputStream out) { + throw new UnsupportedOperationException(); + } + + @Override + public void merge(InputStream in, OutputStream out) { + throw new UnsupportedOperationException(); + } + } + + private static final CustomMergeStrategy COPY_PLACEHOLDER = new MergeStrategyPlaceHolder(); + private static final CustomMergeStrategy SKIP_PLACEHOLDER = new MergeStrategyPlaceHolder(); + + /** + * A mock implementation that either uses the specified behavior or calls + * through to copy. + */ + class MockZipEntryFilter implements ZipEntryFilter { + + private Date date = DOS_EPOCH; + private final List<String> calls = new ArrayList<>(); + // File name to merge strategy map. + private final Map<String, CustomMergeStrategy> behavior = + new HashMap<>(); + private final ListMultimap<String, String> renameMap = ArrayListMultimap.create(); + + @Override + public void accept(String filename, StrategyCallback callback) throws IOException { + calls.add(filename); + CustomMergeStrategy strategy = behavior.get(filename); + if (strategy == null) { + callback.copy(null); + } else if (strategy == COPY_PLACEHOLDER) { + List<String> names = renameMap.get(filename); + if (names != null && !names.isEmpty()) { + // rename to the next name in list of replacement names. + String newName = names.get(0); + callback.rename(newName, null); + // Unless this is the last replacment names, we pop the used name. + // The lastreplacement name applies any additional entries. + if (names.size() > 1) { + names.remove(0); + } + } else { + callback.copy(null); + } + } else if (strategy == SKIP_PLACEHOLDER) { + callback.skip(); + } else { + callback.customMerge(date, strategy); + } + } + } + + @Test + public void testCopyCallsFilter() throws IOException { + MockZipEntryFilter mockFilter = new MockZipEntryFilter(); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(mockFilter, out); + singleJar.addZip(sampleZip()); + singleJar.close(); + assertEquals(Arrays.asList("hello.txt"), mockFilter.calls); + } + + @Test + public void testDuplicateEntryCallsFilterOnce() throws IOException { + MockZipEntryFilter mockFilter = new MockZipEntryFilter(); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(mockFilter, out); + singleJar.addZip(sampleZip()); + singleJar.addZip(sampleZip()); + singleJar.close(); + assertEquals(Arrays.asList("hello.txt"), mockFilter.calls); + } + + @Test + public void testMergeStrategy() throws IOException { + MockZipEntryFilter mockFilter = new MockZipEntryFilter(); + mockFilter.behavior.put("hello.txt", new ConcatenateStrategy()); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(mockFilter, out); + singleJar.addZip(sampleZip()); + singleJar.addZip(sampleZipWithTwoEntries()); + singleJar.close(); + assertEquals(Arrays.asList("hello.txt", "hello2.txt"), mockFilter.calls); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello2.txt", "Hello World 2!"); + assertEntry(zipInput, "hello.txt", "Hello World!\nHello World!"); + assertNull(zipInput.getNextEntry()); + } + + @Test + public void testMergeStrategyWithUncompressedFiles() throws IOException { + MockZipEntryFilter mockFilter = new MockZipEntryFilter(); + mockFilter.behavior.put("hello.txt", new ConcatenateStrategy()); + mockFilter.behavior.put("hello2.txt", SKIP_PLACEHOLDER); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(mockFilter, out); + singleJar.addZip(sampleZipWithTwoUncompressedEntries()); + singleJar.addZip(sampleZipWithTwoUncompressedEntries()); + singleJar.close(); + assertEquals(Arrays.asList("hello.txt", "hello2.txt"), mockFilter.calls); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello.txt", "Hello World!\nHello World!"); + assertNull(zipInput.getNextEntry()); + } + + @Test + public void testMergeStrategyWithUncompressedEntriesAndSlowRead() throws IOException { + MockZipEntryFilter mockFilter = new MockZipEntryFilter(); + mockFilter.behavior.put("hello.txt", new ConcatenateStrategy()); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(mockFilter, out); + singleJar.addZip(slowRead(sampleZipWithOneUncompressedEntry())); + singleJar.addZip(slowRead(sampleZipWithTwoUncompressedEntries())); + singleJar.close(); + assertEquals(Arrays.asList("hello.txt", "hello2.txt"), mockFilter.calls); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello2.txt", "Hello World 2!"); + assertEntry(zipInput, "hello.txt", "Hello World!\nHello World!"); + assertNull(zipInput.getNextEntry()); + } + + @Test + public void testMergeStrategyWithSlowCopy() throws IOException { + MockZipEntryFilter mockFilter = new MockZipEntryFilter(); + mockFilter.behavior.put("hello.txt", new SlowConcatenateStrategy()); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(mockFilter, out); + singleJar.addZip(sampleZip()); + singleJar.addZip(sampleZipWithTwoEntries()); + singleJar.close(); + assertEquals(Arrays.asList("hello.txt", "hello2.txt"), mockFilter.calls); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello2.txt", "Hello World 2!"); + assertEntry(zipInput, "hello.txt", "Hello World!Hello World!"); + assertNull(zipInput.getNextEntry()); + } + + @Test + public void testMergeStrategyWithUncompressedFilesAndSlowCopy() throws IOException { + MockZipEntryFilter mockFilter = new MockZipEntryFilter(); + mockFilter.behavior.put("hello.txt", new SlowConcatenateStrategy()); + mockFilter.behavior.put("hello2.txt", SKIP_PLACEHOLDER); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(mockFilter, out); + singleJar.addZip(sampleZipWithTwoUncompressedEntries()); + singleJar.addZip(sampleZipWithTwoUncompressedEntries()); + singleJar.close(); + assertEquals(Arrays.asList("hello.txt", "hello2.txt"), mockFilter.calls); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello.txt", "Hello World!Hello World!"); + assertNull(zipInput.getNextEntry()); + } + + private InputStream specialZipWithMinusOne() { + ZipFactory factory = new ZipFactory(); + factory.addFile("hello.txt", new byte[] {-1}); + return factory.toInputStream(); + } + + @Test + public void testMergeStrategyWithSlowCopyAndNegativeBytes() throws IOException { + MockZipEntryFilter mockFilter = new MockZipEntryFilter(); + mockFilter.behavior.put("hello.txt", new SlowConcatenateStrategy()); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(mockFilter, out); + singleJar.addZip(specialZipWithMinusOne()); + singleJar.close(); + assertEquals(Arrays.asList("hello.txt"), mockFilter.calls); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello.txt", new byte[] { -1 }); + assertNull(zipInput.getNextEntry()); + } + + @Test + public void testCopyDateHandling() throws IOException { + final Date date = new GregorianCalendar(2009, 8, 2, 0, 0, 0).getTime(); + ZipEntryFilter mockFilter = new ZipEntryFilter() { + @Override + public void accept(String filename, StrategyCallback callback) throws IOException { + assertEquals("hello.txt", filename); + callback.copy(date); + } + }; + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(mockFilter, out); + singleJar.addZip(sampleZip()); + singleJar.close(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello.txt", date, "Hello World!"); + assertNull(zipInput.getNextEntry()); + } + + @Test + public void testMergeDateHandling() throws IOException { + MockZipEntryFilter mockFilter = new MockZipEntryFilter(); + mockFilter.behavior.put("hello.txt", new ConcatenateStrategy()); + mockFilter.date = new GregorianCalendar(2009, 8, 2, 0, 0, 0).getTime(); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(mockFilter, out); + singleJar.addZip(sampleZip()); + singleJar.addZip(sampleZipWithTwoEntries()); + singleJar.close(); + assertEquals(Arrays.asList("hello.txt", "hello2.txt"), mockFilter.calls); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello2.txt", DOS_EPOCH, "Hello World 2!"); + assertEntry(zipInput, "hello.txt", mockFilter.date, "Hello World!\nHello World!"); + assertNull(zipInput.getNextEntry()); + } + + @Test + public void testDuplicateCallThrowsException() throws IOException { + ZipEntryFilter badFilter = new ZipEntryFilter() { + @Override + public void accept(String filename, StrategyCallback callback) throws IOException { + // Duplicate callback call. + callback.skip(); + callback.copy(null); + } + }; + ByteArrayOutputStream out = new ByteArrayOutputStream(); + try (ZipCombiner singleJar = new ZipCombiner(badFilter, out)) { + singleJar.addZip(sampleZip()); + fail(); + } catch (IllegalStateException e) { + // Expected exception. + } + } + + @Test + public void testNoCallThrowsException() throws IOException { + ZipEntryFilter badFilter = new ZipEntryFilter() { + @Override + public void accept(String filename, StrategyCallback callback) { + // No callback call. + } + }; + ByteArrayOutputStream out = new ByteArrayOutputStream(); + try (ZipCombiner singleJar = new ZipCombiner(badFilter, out)) { + singleJar.addZip(sampleZip()); + fail(); + } catch (IllegalStateException e) { + // Expected exception. + } + } + + // This test verifies that if an entry A is renamed as A (identy mapping), + // then subsequent entries named A are still subject to filtering. + // Note: this is different from a copy, where subsequent entries are skipped. + @Test + public void testRenameIdentityMapping() throws IOException { + MockZipEntryFilter mockFilter = new MockZipEntryFilter(); + mockFilter.behavior.put("hello.txt", COPY_PLACEHOLDER); + mockFilter.behavior.put("hello2.txt", COPY_PLACEHOLDER); + mockFilter.renameMap.put("hello.txt", "hello.txt"); // identity rename, not copy + mockFilter.renameMap.put("hello2.txt", "hello2.txt"); // identity rename, not copy + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(mockFilter, out); + singleJar.addZip(sampleZipWithTwoEntries()); + singleJar.addZip(sampleZipWithTwoEntries()); + singleJar.close(); + assertThat(mockFilter.calls).containsExactly("hello.txt", "hello2.txt", + "hello.txt", "hello2.txt").inOrder(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello.txt", "Hello World!"); + assertEntry(zipInput, "hello2.txt", "Hello World 2!"); + assertNull(zipInput.getNextEntry()); + } + + // This test verifies that multiple entries with the same name can be + // renamed to unique names. + @Test + public void testRenameNoConflictMapping() throws IOException { + MockZipEntryFilter mockFilter = new MockZipEntryFilter(); + mockFilter.behavior.put("hello.txt", COPY_PLACEHOLDER); + mockFilter.behavior.put("hello2.txt", COPY_PLACEHOLDER); + mockFilter.renameMap.putAll("hello.txt", Arrays.asList("hello1.txt", "hello2.txt")); + mockFilter.renameMap.putAll("hello2.txt", Arrays.asList("world1.txt", "world2.txt")); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(mockFilter, out); + singleJar.addZip(sampleZipWithTwoEntries()); + singleJar.addZip(sampleZipWithTwoEntries()); + singleJar.close(); + assertThat(mockFilter.calls).containsExactly("hello.txt", "hello2.txt", + "hello.txt", "hello2.txt").inOrder(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello1.txt", "Hello World!"); + assertEntry(zipInput, "world1.txt", "Hello World 2!"); + assertEntry(zipInput, "hello2.txt", "Hello World!"); + assertEntry(zipInput, "world2.txt", "Hello World 2!"); + assertNull(zipInput.getNextEntry()); + } + + // This tests verifies that an attempt to rename an entry to a + // name already written, results in the entry being skipped, after + // calling the filter. + @Test + public void testRenameSkipUsedName() throws IOException { + MockZipEntryFilter mockFilter = new MockZipEntryFilter(); + mockFilter.behavior.put("hello.txt", COPY_PLACEHOLDER); + mockFilter.behavior.put("hello2.txt", COPY_PLACEHOLDER); + mockFilter.renameMap.putAll("hello.txt", + Arrays.asList("hello1.txt", "hello2.txt", "hello3.txt")); + mockFilter.renameMap.put("hello2.txt", "hello2.txt"); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(mockFilter, out); + singleJar.addZip(sampleZipWithTwoEntries()); + singleJar.addZip(sampleZipWithTwoEntries()); + singleJar.addZip(sampleZipWithTwoEntries()); + singleJar.close(); + assertThat(mockFilter.calls).containsExactly("hello.txt", "hello2.txt", + "hello.txt", "hello2.txt", "hello.txt", "hello2.txt").inOrder(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello1.txt", "Hello World!"); + assertEntry(zipInput, "hello2.txt", "Hello World 2!"); + assertEntry(zipInput, "hello3.txt", "Hello World!"); + assertNull(zipInput.getNextEntry()); + } + + // This tests verifies that if an entry has been copied, then + // further entries of the same name are skipped (filter not invoked), + // and entries renamed to the same name are skipped (after calling filter). + @Test + public void testRenameAndCopy() throws IOException { + MockZipEntryFilter mockFilter = new MockZipEntryFilter(); + mockFilter.behavior.put("hello.txt", COPY_PLACEHOLDER); + mockFilter.behavior.put("hello2.txt", COPY_PLACEHOLDER); + mockFilter.renameMap.putAll("hello.txt", + Arrays.asList("hello1.txt", "hello2.txt", "hello3.txt")); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(mockFilter, out); + singleJar.addZip(sampleZipWithTwoEntries()); + singleJar.addZip(sampleZipWithTwoEntries()); + singleJar.addZip(sampleZipWithTwoEntries()); + singleJar.close(); + assertThat(mockFilter.calls).containsExactly("hello.txt", "hello2.txt", + "hello.txt", "hello.txt").inOrder(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello1.txt", "Hello World!"); + assertEntry(zipInput, "hello2.txt", "Hello World 2!"); + assertEntry(zipInput, "hello3.txt", "Hello World!"); + assertNull(zipInput.getNextEntry()); + } + + // This tests verifies that if an entry has been skipped, then + // further entries of the same name are skipped (filter not invoked), + // and entries renamed to the same name are skipped (after calling filter). + @Test + public void testRenameAndSkip() throws IOException { + MockZipEntryFilter mockFilter = new MockZipEntryFilter(); + mockFilter.behavior.put("hello.txt", COPY_PLACEHOLDER); + mockFilter.behavior.put("hello2.txt", SKIP_PLACEHOLDER); + mockFilter.renameMap.putAll("hello.txt", + Arrays.asList("hello1.txt", "hello2.txt", "hello3.txt")); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(mockFilter, out); + singleJar.addZip(sampleZipWithTwoEntries()); + singleJar.addZip(sampleZipWithTwoEntries()); + singleJar.addZip(sampleZipWithTwoEntries()); + singleJar.close(); + assertThat(mockFilter.calls).containsExactly("hello.txt", "hello2.txt", + "hello.txt", "hello.txt").inOrder(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello1.txt", "Hello World!"); + assertEntry(zipInput, "hello3.txt", "Hello World!"); + assertNull(zipInput.getNextEntry()); + } + + // This test verifies that renaming works when input and output + // disagree on compression method. This is the simple case, where + // content is read and rewritten, and no header repair is needed. + @Test + public void testRenameWithUncompressedFiles () throws IOException { + MockZipEntryFilter mockFilter = new MockZipEntryFilter(); + mockFilter.behavior.put("hello.txt", COPY_PLACEHOLDER); + mockFilter.behavior.put("hello2.txt", COPY_PLACEHOLDER); + mockFilter.renameMap.putAll("hello.txt", + Arrays.asList("hello1.txt", "hello2.txt", "hello3.txt")); + mockFilter.renameMap.put("hello2.txt", "hello2.txt"); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(mockFilter, out); + singleJar.addZip(sampleZipWithTwoUncompressedEntries()); + singleJar.addZip(sampleZipWithTwoUncompressedEntries()); + singleJar.addZip(sampleZipWithTwoUncompressedEntries()); + singleJar.close(); + assertThat(mockFilter.calls).containsExactly("hello.txt", "hello2.txt", + "hello.txt", "hello2.txt", "hello.txt", "hello2.txt").inOrder(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "hello1.txt", "Hello World!"); + assertEntry(zipInput, "hello2.txt", "Hello World 2!"); + assertEntry(zipInput, "hello3.txt", "Hello World!"); + assertNull(zipInput.getNextEntry()); + } + + // The next two tests check that ZipCombiner can handle a ZIP with an data + // descriptor marker in the compressed data, i.e. that it does not scan for + // the data descriptor marker. It's unfortunately a bit tricky to create such + // a ZIP. + private static final int LOCAL_FILE_HEADER_MARKER = 0x04034b50; + private static final int DATA_DESCRIPTOR_MARKER = 0x08074b50; + private static final byte[] DATA_DESCRIPTOR_MARKER_AS_BYTES = new byte[] { + 0x50, 0x4b, 0x07, 0x08 + }; + + // Create a ZIP with an data descriptor marker in the DEFLATE content of a + // file. To do that, we build the ZIP byte by byte. + private InputStream zipWithUnexpectedDataDescriptorMarker() { + ByteBuffer out = ByteBuffer.wrap(new byte[200]).order(ByteOrder.LITTLE_ENDIAN); + out.clear(); + // file header + out.putInt(LOCAL_FILE_HEADER_MARKER); // file header signature + out.putShort((short) 6); // version to extract + out.putShort((short) 8); // general purpose bit flag + out.putShort((short) ZipOutputStream.DEFLATED); // compression method + out.putShort((short) 0); // mtime (00:00:00) + out.putShort((short) 0x21); // mdate (1.1.1980) + out.putInt(0); // crc32 + out.putInt(0); // compressed size + out.putInt(0); // uncompressed size + out.putShort((short) 1); // file name length + out.putShort((short) 0); // extra field length + out.put((byte) 'a'); // file name + + // file contents + out.put((byte) 0x01); // deflated content block is last block and uncompressed + out.putShort((short) 4); // uncompressed block length + out.putShort((short) ~4); // negated uncompressed block length + out.putInt(DATA_DESCRIPTOR_MARKER); // 4 bytes uncompressed data + + // data descriptor + out.putInt(DATA_DESCRIPTOR_MARKER); // data descriptor with marker + out.putInt((int) ZipFactory.calculateCrc32(DATA_DESCRIPTOR_MARKER_AS_BYTES)); + out.putInt(9); + out.putInt(4); + // We omit the central directory here. It's currently not used by + // ZipCombiner or by java.util.zip.ZipInputStream, so that shouldn't be a + // problem. + return new ByteArrayInputStream(out.array()); + } + + // Check that the created ZIP is correct. + @Test + public void testZipWithUnexpectedDataDescriptorMarkerIsCorrect() throws IOException { + ZipInputStream zipInput = new ZipInputStream(zipWithUnexpectedDataDescriptorMarker()); + assertEntry(zipInput, "a", DATA_DESCRIPTOR_MARKER_AS_BYTES); + assertNull(zipInput.getNextEntry()); + } + + // Check that ZipCombiner handles the ZIP correctly. + @Test + public void testZipWithUnexpectedDataDescriptorMarker() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner singleJar = new ZipCombiner(out); + singleJar.addZip(zipWithUnexpectedDataDescriptorMarker()); + singleJar.close(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + assertEntry(zipInput, "a", DATA_DESCRIPTOR_MARKER_AS_BYTES); + assertNull(zipInput.getNextEntry()); + } + + // Create a ZIP with a partial entry. + private InputStream zipWithPartialEntry() { + ByteBuffer out = ByteBuffer.wrap(new byte[32]).order(ByteOrder.LITTLE_ENDIAN); + out.clear(); + // file header + out.putInt(LOCAL_FILE_HEADER_MARKER); // file header signature + out.putShort((short) 6); // version to extract + out.putShort((short) 0); // general purpose bit flag + out.putShort((short) ZipOutputStream.STORED); // compression method + out.putShort((short) 0); // mtime (00:00:00) + out.putShort((short) 0x21); // mdate (1.1.1980) + out.putInt(0); // crc32 + out.putInt(10); // compressed size + out.putInt(10); // uncompressed size + out.putShort((short) 1); // file name length + out.putShort((short) 0); // extra field length + out.put((byte) 'a'); // file name + + // file contents + out.put((byte) 0x01); + // Unexpected end of file. + + return new ByteArrayInputStream(out.array()); + } + + @Test + public void testBadZipFilePartialEntry() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + try (ZipCombiner singleJar = new ZipCombiner(out)) { + singleJar.addZip(zipWithPartialEntry()); + fail(); + } catch (EOFException e) { + // Expected exception. + } + } + + @Test + public void testSimpleJarAgainstJavaUtil() throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + JarOutputStream jarOut = new JarOutputStream(out); + ZipEntry entry; + entry = new ZipEntry("META-INF/"); + entry.setTime(DOS_EPOCH.getTime()); + entry.setMethod(JarOutputStream.STORED); + entry.setSize(0); + entry.setCompressedSize(0); + entry.setCrc(0); + jarOut.putNextEntry(entry); + entry = new ZipEntry("META-INF/MANIFEST.MF"); + entry.setTime(DOS_EPOCH.getTime()); + entry.setMethod(JarOutputStream.DEFLATED); + jarOut.putNextEntry(entry); + jarOut.write(new byte[] { 1, 2, 3, 4 }); + jarOut.close(); + byte[] javaFile = out.toByteArray(); + out.reset(); + + ZipCombiner singleJar = new ZipCombiner(out); + singleJar.addDirectory("META-INF/", DOS_EPOCH, + new ExtraData[] { new ExtraData((short) 0xCAFE, new byte[0]) }); + singleJar.addFile("META-INF/MANIFEST.MF", DOS_EPOCH, + new ByteArrayInputStream(new byte[] { 1, 2, 3, 4 })); + singleJar.close(); + byte[] singlejarFile = out.toByteArray(); + + new ZipTester(singlejarFile).validate(); + assertZipFilesEquivalent(singlejarFile, javaFile); + } + + void assertZipFilesEquivalent(byte[] x, byte[] y) { + assertEquals(x.length, y.length); + + for (int i = 0; i < x.length; i++) { + if (x[i] != y[i]) { + // Allow general purpose bit 11 (UTF-8 encoding) used in jdk7 to differ + assertEquals("at position " + i, 0x08, x[i] ^ y[i]); + // Check that x[i] is the second byte of a general purpose bit flag. + // Phil Katz, you will never be forgotten. + assertTrue( + // Local header + x[i-7] == 'P' && x[i-6] == 'K' && x[i-5] == 3 && x[i-4] == 4 || + // Central directory header + x[i-9] == 'P' && x[i-8] == 'K' && x[i-7] == 1 && x[i-6] == 2); + } + } + } + + /** + * Ensures that the code that grows the central directory and the code that patches it is not + * obviously broken. + */ + @Test + public void testLotsOfFiles() throws IOException { + int fileCount = 100; + for (int blockSize : new int[] { 1, 2, 3, 4, 10, 1000 }) { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipCombiner zipCombiner = new ZipCombiner( + OutputMode.DONT_CARE, new CopyEntryFilter(), out, blockSize); + for (int i = 0; i < fileCount; i++) { + zipCombiner.addFile("hello" + i, DOS_EPOCH, asStream("Hello " + i + "!")); + } + zipCombiner.close(); + ZipInputStream zipInput = new ZipInputStream(new ByteArrayInputStream(out.toByteArray())); + for (int i = 0; i < fileCount; i++) { + assertEntry(zipInput, "hello" + i, "Hello " + i + "!"); + } + assertNull(zipInput.getNextEntry()); + new ZipTester(out.toByteArray()).validate(); + } + } +} diff --git a/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/ZipFactory.java b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/ZipFactory.java new file mode 100644 index 0000000000..a6474fa704 --- /dev/null +++ b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/ZipFactory.java @@ -0,0 +1,106 @@ +// Copyright 2015 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + +import static java.nio.charset.StandardCharsets.ISO_8859_1; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.List; +import java.util.zip.CRC32; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + +/** + * A helper class to create zip files for testing. + */ +public class ZipFactory { + + static class Entry { + private final String name; + private final byte[] content; + private final boolean compressed; + private Entry(String name, byte[] content, boolean compressed) { + this.name = name; + this.content = content; + this.compressed = compressed; + } + } + + private final List<Entry> entries = new ArrayList<>(); + + // Assumes that content was created locally. Does not perform a defensive copy! + private void addEntry(String name, byte[] content, boolean compressed) { + entries.add(new Entry(name, content, compressed)); + } + + public ZipFactory addFile(String name, String content) { + addEntry(name, content.getBytes(ISO_8859_1), true); + return this; + } + + public ZipFactory addFile(String name, byte[] content) { + addEntry(name, content.clone(), true); + return this; + } + + public ZipFactory addFile(String name, String content, boolean compressed) { + addEntry(name, content.getBytes(ISO_8859_1), compressed); + return this; + } + + public ZipFactory addFile(String name, byte[] content, boolean compressed) { + addEntry(name, content.clone(), compressed); + return this; + } + + public byte[] toByteArray() { + try { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ZipOutputStream zipper = new ZipOutputStream(out); + for (Entry entry : entries) { + ZipEntry zipEntry = new ZipEntry(entry.name); + if (entry.compressed) { + zipEntry.setMethod(ZipEntry.DEFLATED); + } else { + zipEntry.setMethod(ZipEntry.STORED); + zipEntry.setSize(entry.content.length); + zipEntry.setCrc(calculateCrc32(entry.content)); + } + zipEntry.setTime(ZipCombiner.DOS_EPOCH.getTime()); + zipper.putNextEntry(zipEntry); + zipper.write(entry.content); + zipper.closeEntry(); + } + zipper.close(); + return out.toByteArray(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public InputStream toInputStream() { + return new ByteArrayInputStream(toByteArray()); + } + + public static long calculateCrc32(byte[] content) { + CRC32 crc = new CRC32(); + crc.update(content); + return crc.getValue(); + } +} diff --git a/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/ZipTester.java b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/ZipTester.java new file mode 100644 index 0000000000..c1293d6f92 --- /dev/null +++ b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/ZipTester.java @@ -0,0 +1,412 @@ +// Copyright 2015 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.singlejar; + + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.List; +import java.util.zip.CRC32; +import java.util.zip.DataFormatException; +import java.util.zip.Inflater; + +/** + * A helper class to validate zip files and provide reasonable diagnostics (better than what zip + * does). We might want to make this into a fully-fledged binary some day. + */ +final class ZipTester { + + // The following constants are ZIP-specific. + private static final int LOCAL_FILE_HEADER_MARKER = 0x04034b50; + private static final int DATA_DESCRIPTOR_MARKER = 0x08074b50; + private static final int CENTRAL_DIRECTORY_MARKER = 0x02014b50; + private static final int END_OF_CENTRAL_DIRECTORY_MARKER = 0x06054b50; + + private static final int FILE_HEADER_BUFFER_SIZE = 26; // without marker + private static final int DATA_DESCRIPTOR_BUFFER_SIZE = 12; // without marker + + private static final int DIRECTORY_ENTRY_BUFFER_SIZE = 42; // without marker + private static final int END_OF_CENTRAL_DIRECTORY_BUFFER_SIZE = 18; // without marker + + // Set if the size, compressed size and CRC are set to zero, and present in + // the data descriptor after the data. + private static final int SIZE_MASKED_FLAG = 1 << 3; + + private static final int STORED_METHOD = 0; + private static final int DEFLATE_METHOD = 8; + + private static final int VERSION_STORED = 10; // Version 1.0 + private static final int VERSION_DEFLATE = 20; // Version 2.0 + + private static class Entry { + private final long pos; + private final String name; + private final int flags; + private final int method; + private final int dosTime; + Entry(long pos, String name, int flags, int method, int dosTime) { + this.pos = pos; + this.name = name; + this.flags = flags; + this.method = method; + this.dosTime = dosTime; + } + } + + private final InputStream in; + private final byte[] buffer = new byte[1024]; + private int bufferLength; + private int bufferOffset; + private long pos; + + private List<Entry> entries = new ArrayList<Entry>(); + + public ZipTester(InputStream in) { + this.in = in; + } + + public ZipTester(byte[] data) { + this(new ByteArrayInputStream(data)); + } + + private void warn(String msg) { + System.err.println("WARNING: " + msg); + } + + private void readMoreData(String action) throws IOException { + if ((bufferLength > 0) && (bufferOffset > 0)) { + System.arraycopy(buffer, bufferOffset, buffer, 0, bufferLength); + } + if (bufferLength >= buffer.length) { + // The buffer size is specifically chosen to avoid this situation. + throw new AssertionError("Internal error: buffer overrun."); + } + bufferOffset = 0; + int bytesRead = in.read(buffer, bufferLength, buffer.length - bufferLength); + if (bytesRead <= 0) { + throw new IOException("Unexpected end of file, while " + action); + } + bufferLength += bytesRead; + } + + private int readByte(String action) throws IOException { + if (bufferLength == 0) { + readMoreData(action); + } + byte result = buffer[bufferOffset]; + bufferOffset++; bufferLength--; + pos++; + return result & 0xff; + } + + private long getUnsignedInt(String action) throws IOException { + int a = readByte(action); + int b = readByte(action); + int c = readByte(action); + int d = readByte(action); + return ((d << 24) | (c << 16) | (b << 8) | a) & 0xffffffffL; + } + + private void readFully(byte[] buffer, String action) throws IOException { + for (int i = 0; i < buffer.length; i++) { + buffer[i] = (byte) readByte(action); + } + } + + private void skip(long length, String action) throws IOException { + for (long i = 0; i < length; i++) { + readByte(action); + } + } + + private int getUnsignedShort(byte[] source, int offset) { + int a = source[offset + 0] & 0xff; + int b = source[offset + 1] & 0xff; + return (b << 8) | a; + } + + private long getUnsignedInt(byte[] source, int offset) { + int a = source[offset + 0] & 0xff; + int b = source[offset + 1] & 0xff; + int c = source[offset + 2] & 0xff; + int d = source[offset + 3] & 0xff; + return ((d << 24) | (c << 16) | (b << 8) | a) & 0xffffffffL; + } + + private class DeflateInputStream extends InputStream { + + private final byte[] singleByteBuffer = new byte[1]; + private int consumedBytes; + private final Inflater inflater = new Inflater(true); + private long totalBytesRead; + + private int inflateData(byte[] dest, int off, int len) + throws IOException { + consumedBytes = 0; + int bytesProduced = 0; + int bytesConsumed = 0; + while ((bytesProduced == 0) && !inflater.finished()) { + inflater.setInput(buffer, bufferOffset + bytesConsumed, bufferLength - bytesConsumed); + int remainingBefore = inflater.getRemaining(); + try { + bytesProduced = inflater.inflate(dest, off, len); + } catch (DataFormatException e) { + throw new IOException("Invalid deflate stream in ZIP file.", e); + } + bytesConsumed += remainingBefore - inflater.getRemaining(); + consumedBytes = bytesConsumed; + if (bytesProduced == 0) { + if (inflater.needsDictionary()) { + // The DEFLATE algorithm as used in the ZIP file format does not + // require an additional dictionary. + throw new AssertionError("Inflater unexpectedly requires a dictionary."); + } else if (inflater.needsInput()) { + readMoreData("need more data for deflate"); + } else if (inflater.finished()) { + return 0; + } else { + // According to the Inflater specification, this cannot happen. + throw new AssertionError("Inflater unexpectedly produced no output."); + } + } + } + return bytesProduced; + } + + @Override + public int read(byte b[], int off, int len) throws IOException { + if (inflater.finished()) { + return -1; + } + int length = inflateData(b, off, len); + totalBytesRead += consumedBytes; + bufferLength -= consumedBytes; + bufferOffset += consumedBytes; + pos += consumedBytes; + return length == 0 ? -1 : length; + } + + @Override + public int read() throws IOException { + int bytesRead = read(singleByteBuffer, 0, 1); + return (bytesRead == -1) ? -1 : (singleByteBuffer[0] & 0xff); + } + } + + private void readEntry() throws IOException { + long entrypos = pos - 4; + String entryDesc = "file entry at " + Long.toHexString(entrypos); + byte[] entryBuffer = new byte[FILE_HEADER_BUFFER_SIZE]; + readFully(entryBuffer, "reading file header"); + int versionToExtract = getUnsignedShort(entryBuffer, 0); + int flags = getUnsignedShort(entryBuffer, 2); + int method = getUnsignedShort(entryBuffer, 4); + int dosTime = (int) getUnsignedInt(entryBuffer, 6); + int crc32 = (int) getUnsignedInt(entryBuffer, 10); + long compressedSize = getUnsignedInt(entryBuffer, 14); + long uncompressedSize = getUnsignedInt(entryBuffer, 18); + int filenameLength = getUnsignedShort(entryBuffer, 22); + int extraLength = getUnsignedShort(entryBuffer, 24); + + byte[] filename = new byte[filenameLength]; + readFully(filename, "reading file name"); + skip(extraLength, "skipping extra data"); + + String name = new String(filename, "UTF-8"); + for (int i = 0; i < filename.length; i++) { + if ((filename[i] < ' ') || (filename[i] > 127)) { + warn(entryDesc + ": file name has unexpected non-ascii characters"); + } + } + entryDesc = "file entry '" + name + "' at " + Long.toHexString(entrypos); + + if ((method != STORED_METHOD) && (method != DEFLATE_METHOD)) { + throw new IOException(entryDesc + ": unknown method " + method); + } + if ((flags != 0) && (flags != SIZE_MASKED_FLAG)) { + throw new IOException(entryDesc + ": unknown flags " + flags); + } + if ((method == STORED_METHOD) && (versionToExtract != VERSION_STORED)) { + warn(entryDesc + ": unexpected version to extract for stored entry " + versionToExtract); + } + if ((method == DEFLATE_METHOD) && (versionToExtract != VERSION_DEFLATE)) { +// warn(entryDesc + ": unexpected version to extract for deflated entry " + versionToExtract); + } + + if (method == STORED_METHOD) { + if (compressedSize != uncompressedSize) { + throw new IOException(entryDesc + ": stored entries should have identical compressed and " + + "uncompressed sizes"); + } + skip(compressedSize, entryDesc + "skipping data"); + } else { + // No OS resources are actually allocated. + @SuppressWarnings("resource") DeflateInputStream deflater = new DeflateInputStream(); + long generatedBytes = 0; + byte[] deflated = new byte[1024]; + int readBytes; + CRC32 crc = new CRC32(); + while ((readBytes = deflater.read(deflated)) > 0) { + crc.update(deflated, 0, readBytes); + generatedBytes += readBytes; + } + int actualCrc32 = (int) crc.getValue(); + long consumedBytes = deflater.totalBytesRead; + if (flags == SIZE_MASKED_FLAG) { + long id = getUnsignedInt("reading footer marker"); + if (id != DATA_DESCRIPTOR_MARKER) { + throw new IOException(entryDesc + ": expected footer at " + Long.toHexString(pos - 4) + + ", but found " + Long.toHexString(id)); + } + byte[] footer = new byte[DATA_DESCRIPTOR_BUFFER_SIZE]; + readFully(footer, "reading footer"); + crc32 = (int) getUnsignedInt(footer, 0); + compressedSize = getUnsignedInt(footer, 4); + uncompressedSize = getUnsignedInt(footer, 8); + } + + if (consumedBytes != compressedSize) { + throw new IOException(entryDesc + ": amount of compressed data does not match value " + + "specified in the zip (specified: " + compressedSize + ", actual: " + consumedBytes + + ")"); + } + if (generatedBytes != uncompressedSize) { + throw new IOException(entryDesc + ": amount of uncompressed data does not match value " + + "specified in the zip (specified: " + uncompressedSize + ", actual: " + + generatedBytes + ")"); + } + if (crc32 != actualCrc32) { + throw new IOException(entryDesc + ": specified crc checksum does not match actual check " + + "sum"); + } + } + entries.add(new Entry(entrypos, name, flags, method, dosTime)); + } + + @SuppressWarnings("unused") // A couple of unused local variables. + private void validateCentralDirectoryEntry(Entry entry) throws IOException { + long entrypos = pos - 4; + String entryDesc = "file directory entry '" + entry.name + "' at " + Long.toHexString(entrypos); + + byte[] entryBuffer = new byte[DIRECTORY_ENTRY_BUFFER_SIZE]; + readFully(entryBuffer, "reading central directory entry"); + int versionMadeBy = getUnsignedShort(entryBuffer, 0); + int versionToExtract = getUnsignedShort(entryBuffer, 2); + int flags = getUnsignedShort(entryBuffer, 4); + int method = getUnsignedShort(entryBuffer, 6); + int dosTime = (int) getUnsignedInt(entryBuffer, 8); + int crc32 = (int) getUnsignedInt(entryBuffer, 12); + long compressedSize = getUnsignedInt(entryBuffer, 16); + long uncompressedSize = getUnsignedInt(entryBuffer, 20); + int filenameLength = getUnsignedShort(entryBuffer, 24); + int extraLength = getUnsignedShort(entryBuffer, 26); + int commentLength = getUnsignedShort(entryBuffer, 28); + int diskNumberStart = getUnsignedShort(entryBuffer, 30); + int internalAttributes = getUnsignedShort(entryBuffer, 32); + int externalAttributes = (int) getUnsignedInt(entryBuffer, 34); + long offset = getUnsignedInt(entryBuffer, 38); + + byte[] filename = new byte[filenameLength]; + readFully(filename, "reading file name"); + skip(extraLength, "skipping extra data"); + String name = new String(filename, "UTF-8"); + + if (!name.equals(entry.name)) { + throw new IOException(entryDesc + ": file name in central directory does not match original " + + "name"); + } + if (offset != entry.pos) { + throw new IOException(entryDesc); + } + if (flags != entry.flags) { + throw new IOException(entryDesc); + } + if (method != entry.method) { + throw new IOException(entryDesc); + } + if (dosTime != entry.dosTime) { + throw new IOException(entryDesc); + } + } + + private void validateCentralDirectory() throws IOException { + boolean first = true; + for (Entry entry : entries) { + if (first) { + first = false; + } else { + long id = getUnsignedInt("reading marker"); + if (id != CENTRAL_DIRECTORY_MARKER) { + throw new IOException(); + } + } + validateCentralDirectoryEntry(entry); + } + } + + @SuppressWarnings("unused") // A couple of unused local variables. + private void validateEndOfCentralDirectory() throws IOException { + long id = getUnsignedInt("expecting end of central directory"); + byte[] entryBuffer = new byte[END_OF_CENTRAL_DIRECTORY_BUFFER_SIZE]; + readFully(entryBuffer, "reading end of central directory"); + int diskNumber = getUnsignedShort(entryBuffer, 0); + int startDiskNumber = getUnsignedShort(entryBuffer, 2); + int numEntries = getUnsignedShort(entryBuffer, 4); + int numTotalEntries = getUnsignedShort(entryBuffer, 6); + long centralDirectorySize = getUnsignedInt(entryBuffer, 8); + long centralDirectoryOffset = getUnsignedInt(entryBuffer, 12); + int commentLength = getUnsignedShort(entryBuffer, 16); + if (diskNumber != 0) { + throw new IOException(String.format("diskNumber=%d", diskNumber)); + } + if (startDiskNumber != 0) { + throw new IOException(String.format("startDiskNumber=%d", diskNumber)); + } + if (numEntries != numTotalEntries) { + throw new IOException(String.format("numEntries=%d numTotalEntries=%d", + numEntries, numTotalEntries)); + } + if (numEntries != (entries.size() % 0x10000)) { + throw new IOException("bad number of entries in central directory footer"); + } + if (numTotalEntries != (entries.size() % 0x10000)) { + throw new IOException("bad number of entries in central directory footer"); + } + if (commentLength != 0) { + throw new IOException("Zip file comment is unexpected"); + } + if (id != END_OF_CENTRAL_DIRECTORY_MARKER) { + throw new IOException("Expected end of central directory marker"); + } + } + + public void validate() throws IOException { + while (true) { + long id = getUnsignedInt("reading marker"); + if (id == LOCAL_FILE_HEADER_MARKER) { + readEntry(); + } else if (id == CENTRAL_DIRECTORY_MARKER) { + validateCentralDirectory(); + validateEndOfCentralDirectory(); + return; + } else { + throw new IOException("unexpected result for marker: " + + Long.toHexString(id) + " at position " + Long.toHexString(pos - 4)); + } + } + } +} |