aboutsummaryrefslogtreecommitdiffhomepage
path: root/src/main/java/com
diff options
context:
space:
mode:
authorGravatar Kristina Chodorow <kchodorow@google.com>2015-04-27 14:00:04 +0000
committerGravatar Han-Wen Nienhuys <hanwen@google.com>2015-04-27 18:53:29 +0000
commit7086b9ed512694ab75d64e305ce40131e31506a8 (patch)
tree7af7aa220eb271723fc44b0b59088ddaef07b0d6 /src/main/java/com
parente42797e3efc2a72afada5c5c88ec35ba594bbdb6 (diff)
CL for Java readability review
-- MOS_MIGRATED_REVID=92141245
Diffstat (limited to 'src/main/java/com')
-rw-r--r--src/main/java/com/google/devtools/build/lib/bazel/repository/DecompressorFactory.java45
-rw-r--r--src/main/java/com/google/devtools/build/lib/bazel/repository/MavenJarFunction.java6
-rw-r--r--src/main/java/com/google/devtools/build/lib/bazel/repository/NewLocalRepositoryFunction.java23
3 files changed, 46 insertions, 28 deletions
diff --git a/src/main/java/com/google/devtools/build/lib/bazel/repository/DecompressorFactory.java b/src/main/java/com/google/devtools/build/lib/bazel/repository/DecompressorFactory.java
index 947e2245da..d421b49ad9 100644
--- a/src/main/java/com/google/devtools/build/lib/bazel/repository/DecompressorFactory.java
+++ b/src/main/java/com/google/devtools/build/lib/bazel/repository/DecompressorFactory.java
@@ -52,8 +52,8 @@ public abstract class DecompressorFactory {
return new JarDecompressor(targetKind, targetName, archivePath, repositoryPath);
} else {
throw new DecompressorException(
- "Expected " + HttpJarRule.NAME + " " + targetName
- + " to create file with a .jar suffix (got " + archivePath + ")");
+ String.format("Expected %s %s to create file with a .jar suffix (got %s)",
+ HttpJarRule.NAME, targetName, archivePath));
}
}
@@ -63,14 +63,13 @@ public abstract class DecompressorFactory {
return new ZipDecompressor(archivePath);
} else {
throw new DecompressorException(
- "Expected " + HttpArchiveRule.NAME + " " + targetName
- + " to create file with a .zip or .jar suffix (got " + archivePath + ")");
+ String.format("Expected %s %s to create file with a .zip or .jar suffix (got %s)",
+ HttpArchiveRule.NAME, targetName, archivePath));
}
}
- throw new DecompressorException(
- "No decompressor found for " + targetKind + " rule " + targetName
- + " (got " + archivePath + ")");
+ throw new DecompressorException(String.format("No decompressor found for %s rule %s (got %s)",
+ targetKind, targetName, archivePath));
}
/**
@@ -88,11 +87,18 @@ public abstract class DecompressorFactory {
* files and directories under the {@link Decompressor#archiveFile}'s parent directory.
*
* @return the path to the repository directory. That is, the returned path will be a directory
- * containing a WORKSPACE file.
+ * containing a WORKSPACE file.
*/
public abstract Path decompress() throws DecompressorException;
}
+ /**
+ * Decompressor for jar files.
+ *
+ * <p>This is actually a bit of a misnomer, as .jars aren't decompressed. This does create a
+ * repository a BUILD file for them, though, making the java_import target @&lt;jar&gt;//jar:jar
+ * available for users to depend on.</p>
+ */
static class JarDecompressor extends Decompressor {
private final String targetKind;
private final String targetName;
@@ -121,9 +127,9 @@ public abstract class DecompressorFactory {
FileSystemUtils.createDirectoryAndParents(repositoryDir);
// .external-repository/some-name/WORKSPACE.
Path workspaceFile = repositoryDir.getRelative("WORKSPACE");
- FileSystemUtils.writeContent(workspaceFile, Charset.forName("UTF-8"),
- "# DO NOT EDIT: automatically generated WORKSPACE file for " + targetKind
- + " rule " + targetName);
+ FileSystemUtils.writeContent(workspaceFile, Charset.forName("UTF-8"), String.format(
+ "# DO NOT EDIT: automatically generated WORKSPACE file for %s rule %s\n",
+ targetKind, targetName));
// .external-repository/some-name/jar.
Path jarDirectory = repositoryDir.getRelative("jar");
FileSystemUtils.createDirectoryAndParents(jarDirectory);
@@ -151,6 +157,9 @@ public abstract class DecompressorFactory {
}
}
+ /**
+ * Decompressor for zip files.
+ */
private static class ZipDecompressor extends Decompressor {
public ZipDecompressor(Path archiveFile) {
super(archiveFile);
@@ -184,8 +193,8 @@ public abstract class DecompressorFactory {
}
} catch (IOException | ArchiveException e) {
throw new DecompressorException(
- "Error extracting " + archiveFile + " to " + destinationDirectory + ": "
- + e.getMessage());
+ String.format("Error extracting %s to %s: %s",
+ archiveFile, destinationDirectory, e.getMessage()));
}
return destinationDirectory;
}
@@ -195,8 +204,8 @@ public abstract class DecompressorFactory {
throws IOException, DecompressorException {
PathFragment relativePath = new PathFragment(entry.getName());
if (relativePath.isAbsolute()) {
- throw new DecompressorException("Failed to extract " + relativePath
- + ", zipped paths cannot be absolute");
+ throw new DecompressorException(
+ String.format("Failed to extract %s, zipped paths cannot be absolute", relativePath));
}
Path outputPath = destinationDirectory.getRelative(relativePath);
FileSystemUtils.createDirectoryAndParents(outputPath.getParentDirectory());
@@ -206,8 +215,8 @@ public abstract class DecompressorFactory {
try (OutputStream out = new FileOutputStream(new File(outputPath.getPathString()))) {
IOUtils.copy(in, out);
} catch (IOException e) {
- throw new DecompressorException("Error writing " + outputPath + " from "
- + archiveFile);
+ throw new DecompressorException(
+ String.format("Error writing %s from %s", outputPath, archiveFile));
}
}
}
@@ -221,4 +230,4 @@ public abstract class DecompressorFactory {
super(message);
}
}
-} \ No newline at end of file
+}
diff --git a/src/main/java/com/google/devtools/build/lib/bazel/repository/MavenJarFunction.java b/src/main/java/com/google/devtools/build/lib/bazel/repository/MavenJarFunction.java
index e7571cfdce..fd09e8a407 100644
--- a/src/main/java/com/google/devtools/build/lib/bazel/repository/MavenJarFunction.java
+++ b/src/main/java/com/google/devtools/build/lib/bazel/repository/MavenJarFunction.java
@@ -130,11 +130,17 @@ public class MavenJarFunction extends HttpArchiveFunction {
return SkyFunctionName.computed(Ascii.toUpperCase(MavenJarRule.NAME));
}
+ /**
+ * @see RepositoryFunction#getRule(RepositoryName, String, Environment)
+ */
@Override
public Class<? extends RuleDefinition> getRuleDefinition() {
return MavenJarRule.class;
}
+ /**
+ * This downloader creates a connection to one or more Maven repositories and downloads a jar.
+ */
static class MavenDownloader {
private static final String MAVEN_CENTRAL_URL = "http://central.maven.org/maven2/";
diff --git a/src/main/java/com/google/devtools/build/lib/bazel/repository/NewLocalRepositoryFunction.java b/src/main/java/com/google/devtools/build/lib/bazel/repository/NewLocalRepositoryFunction.java
index 807857e9f4..674dd55b84 100644
--- a/src/main/java/com/google/devtools/build/lib/bazel/repository/NewLocalRepositoryFunction.java
+++ b/src/main/java/com/google/devtools/build/lib/bazel/repository/NewLocalRepositoryFunction.java
@@ -123,7 +123,7 @@ public class NewLocalRepositoryFunction extends RepositoryFunction {
try {
Path workspaceFile = repositoryDirectory.getRelative("WORKSPACE");
FileSystemUtils.writeContent(workspaceFile, Charset.forName("UTF-8"),
- "# DO NOT EDIT: automatically generated WORKSPACE file for " + rule + "\n");
+ String.format("# DO NOT EDIT: automatically generated WORKSPACE file for %s\n", rule));
} catch (IOException e) {
throw new RepositoryFunctionException(e, Transience.TRANSIENT);
}
@@ -137,19 +137,19 @@ public class NewLocalRepositoryFunction extends RepositoryFunction {
* @param env the Skyframe environment.
* @return the file value of the symlink created.
* @throws RepositoryFunctionException if the BUILD file specified does not exist or cannot be
- * linked.
+ * linked.
*/
- public static FileValue createBuildFile(Rule rule, Path workspaceDirectory,
- Path repositoryDirectory, Environment env)
+ public static FileValue createBuildFile(
+ Rule rule, Path workspaceDirectory, Path repositoryDirectory, Environment env)
throws RepositoryFunctionException {
AggregatingAttributeMapper mapper = AggregatingAttributeMapper.of(rule);
PathFragment buildFile = new PathFragment(mapper.get("build_file", Type.STRING));
Path buildFileTarget = workspaceDirectory.getRelative(buildFile);
if (!buildFileTarget.exists()) {
throw new RepositoryFunctionException(
- new EvalException(rule.getLocation(), "In " + rule
- + " the 'build_file' attribute does not specify an existing file ("
- + buildFileTarget + " does not exist)"),
+ new EvalException(rule.getLocation(),
+ String.format("In %s the 'build_file' attribute does not specify an existing file "
+ + "(%s does not exist)", rule, buildFileTarget)),
Transience.PERSISTENT);
}
Path buildFilePath = repositoryDirectory.getRelative("BUILD");
@@ -164,8 +164,8 @@ public class NewLocalRepositoryFunction extends RepositoryFunction {
}
} catch (IOException e) {
throw new RepositoryFunctionException(
- new IOException("Error creating symbolic link from " + from + " to " + to + ": "
- + e.getMessage()), Transience.TRANSIENT);
+ new IOException(String.format("Error creating symbolic link from %s to %s: %s",
+ from, to, e.getMessage())), Transience.TRANSIENT);
}
SkyKey outputDirectoryKey = FileValue.key(RootedPath.toRootedPath(
@@ -175,11 +175,14 @@ public class NewLocalRepositoryFunction extends RepositoryFunction {
FileSymlinkCycleException.class, InconsistentFilesystemException.class);
} catch (IOException | FileSymlinkCycleException | InconsistentFilesystemException e) {
throw new RepositoryFunctionException(
- new IOException("Could not access " + from + ": " + e.getMessage()),
+ new IOException(String.format("Could not access %s: %s", from, e.getMessage())),
Transience.PERSISTENT);
}
}
+ /**
+ * @see RepositoryFunction#getRule(RepositoryName, String, Environment)
+ */
@Override
public SkyFunctionName getSkyFunctionName() {
return SkyFunctionName.computed(NewLocalRepositoryRule.NAME.toUpperCase());