// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.buildtool; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Stopwatch; import com.google.common.collect.HashBasedTable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Ordering; import com.google.common.collect.Table; import com.google.devtools.build.lib.Constants; import com.google.devtools.build.lib.actions.Action; import com.google.devtools.build.lib.actions.ActionCacheChecker; import com.google.devtools.build.lib.actions.ActionContextConsumer; import com.google.devtools.build.lib.actions.ActionContextMarker; import com.google.devtools.build.lib.actions.ActionContextProvider; import com.google.devtools.build.lib.actions.ActionGraph; import com.google.devtools.build.lib.actions.ActionInputFileCache; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.BlazeExecutor; import com.google.devtools.build.lib.actions.BuildFailedException; import com.google.devtools.build.lib.actions.ExecException; import com.google.devtools.build.lib.actions.ExecutionStrategy; import com.google.devtools.build.lib.actions.Executor; import com.google.devtools.build.lib.actions.Executor.ActionContext; import com.google.devtools.build.lib.actions.ExecutorInitException; import com.google.devtools.build.lib.actions.LocalHostCapacity; import com.google.devtools.build.lib.actions.ResourceManager; import com.google.devtools.build.lib.actions.ResourceSet; import com.google.devtools.build.lib.actions.SimpleActionContextProvider; import com.google.devtools.build.lib.actions.SpawnActionContext; import com.google.devtools.build.lib.actions.TestExecException; import com.google.devtools.build.lib.actions.cache.ActionCache; import com.google.devtools.build.lib.analysis.BuildView; import com.google.devtools.build.lib.analysis.BuildView.AnalysisResult; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.FileProvider; import com.google.devtools.build.lib.analysis.InputFileConfiguredTarget; import com.google.devtools.build.lib.analysis.OutputFileConfiguredTarget; import com.google.devtools.build.lib.analysis.OutputGroupProvider; import com.google.devtools.build.lib.analysis.SymlinkTreeActionContext; import com.google.devtools.build.lib.analysis.TopLevelArtifactContext; import com.google.devtools.build.lib.analysis.TopLevelArtifactHelper; import com.google.devtools.build.lib.analysis.TransitiveInfoCollection; import com.google.devtools.build.lib.analysis.WorkspaceStatusAction; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.config.BuildConfigurationCollection; import com.google.devtools.build.lib.buildtool.buildevent.ExecutionPhaseCompleteEvent; import com.google.devtools.build.lib.buildtool.buildevent.ExecutionStartingEvent; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.collect.CollectionUtils; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.events.EventKind; import com.google.devtools.build.lib.events.Reporter; import com.google.devtools.build.lib.exec.CheckUpToDateFilter; import com.google.devtools.build.lib.exec.ExecutionOptions; import com.google.devtools.build.lib.exec.OutputService; import com.google.devtools.build.lib.exec.SingleBuildFileCache; import com.google.devtools.build.lib.exec.SymlinkTreeStrategy; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.profiler.AutoProfiler; import com.google.devtools.build.lib.profiler.ProfilePhase; import com.google.devtools.build.lib.profiler.Profiler; import com.google.devtools.build.lib.profiler.ProfilerTask; import com.google.devtools.build.lib.rules.fileset.FilesetActionContext; import com.google.devtools.build.lib.rules.fileset.FilesetActionContextImpl; import com.google.devtools.build.lib.rules.test.TestActionContext; import com.google.devtools.build.lib.runtime.BlazeModule; import com.google.devtools.build.lib.runtime.BlazeRuntime; import com.google.devtools.build.lib.runtime.CommandEnvironment; import com.google.devtools.build.lib.skyframe.AspectValue; import com.google.devtools.build.lib.skyframe.Builder; import com.google.devtools.build.lib.skyframe.SkyframeExecutor; import com.google.devtools.build.lib.util.AbruptExitException; import com.google.devtools.build.lib.util.ExitCode; import com.google.devtools.build.lib.util.LoggingUtil; import com.google.devtools.build.lib.util.io.OutErr; import com.google.devtools.build.lib.vfs.FileSystem; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import java.io.IOException; import java.io.OutputStream; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; /** * This class manages the execution phase. The entry point is {@link #executeBuild}. * *

This is only intended for use by {@link BuildTool}. * *

This class contains an ActionCache, and refers to the BlazeRuntime's BuildView and * PackageCache. * * @see BuildTool * @see BuildView */ public class ExecutionTool { private static class StrategyConverter { private Table, String, ActionContext> classMap = HashBasedTable.create(); private Map, ActionContext> defaultClassMap = new HashMap<>(); /** * Aggregates all {@link ActionContext}s that are in {@code contextProviders}. */ @SuppressWarnings("unchecked") private StrategyConverter(Iterable contextProviders) { for (ActionContextProvider provider : contextProviders) { for (ActionContext strategy : provider.getActionContexts()) { ExecutionStrategy annotation = strategy.getClass().getAnnotation(ExecutionStrategy.class); if (annotation != null) { defaultClassMap.put(annotation.contextType(), strategy); for (String name : annotation.name()) { classMap.put(annotation.contextType(), name, strategy); } } } } } @SuppressWarnings("unchecked") private T getStrategy(Class clazz, String name) { return (T) (name.isEmpty() ? defaultClassMap.get(clazz) : classMap.get(clazz, name)); } private String getValidValues(Class context) { return Joiner.on(", ").join(Ordering.natural().sortedCopy(classMap.row(context).keySet())); } private String getUserFriendlyName(Class context) { ActionContextMarker marker = context.getAnnotation(ActionContextMarker.class); return marker != null ? marker.name() : context.getSimpleName(); } } static final Logger LOG = Logger.getLogger(ExecutionTool.class.getName()); private final CommandEnvironment env; private final BlazeRuntime runtime; private final BuildRequest request; private BlazeExecutor executor; private ActionInputFileCache fileCache; private final ImmutableList actionContextProviders; private Map spawnStrategyMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); private List strategies = new ArrayList<>(); ExecutionTool(CommandEnvironment env, BuildRequest request) throws ExecutorInitException { this.env = env; this.runtime = env.getRuntime(); this.request = request; // Create tools before getting the strategies from the modules as some of them need tools to // determine whether the host actually supports certain strategies (e.g. sandboxing). createToolsSymlinks(); this.actionContextProviders = getActionContextProvidersFromModules( runtime, new FilesetActionContextImpl.Provider( env.getReporter(), runtime.getWorkspaceName()), new SimpleActionContextProvider( new SymlinkTreeStrategy(runtime.getOutputService(), runtime.getBinTools()))); StrategyConverter strategyConverter = new StrategyConverter(actionContextProviders); ImmutableList actionContextConsumers = getActionContextConsumersFromModules( runtime, // TODO(philwo) - the ExecutionTool should not add arbitrary dependencies on its own, // instead these dependencies should be added to the ActionContextConsumer of the module // that actually depends on them. new ActionContextConsumer() { @Override public Map getSpawnActionContexts() { return ImmutableMap.of(); } @Override public Map, String> getActionContexts() { return ImmutableMap., String>builder() .put(FilesetActionContext.class, "") .put(WorkspaceStatusAction.Context.class, "") .put(SymlinkTreeActionContext.class, "") .build(); } }); for (ActionContextConsumer consumer : actionContextConsumers) { // There are many different SpawnActions, and we want to control the action context they use // independently from each other, for example, to run genrules locally and Java compile action // in prod. Thus, for SpawnActions, we decide the action context to use not only based on the // context class, but also the mnemonic of the action. for (Map.Entry entry : consumer.getSpawnActionContexts().entrySet()) { SpawnActionContext context = strategyConverter.getStrategy(SpawnActionContext.class, entry.getValue()); if (context == null) { throw makeExceptionForInvalidStrategyValue( entry.getValue(), "spawn", strategyConverter.getValidValues(SpawnActionContext.class)); } spawnStrategyMap.put(entry.getKey(), context); } for (Map.Entry, String> entry : consumer.getActionContexts().entrySet()) { ActionContext context = strategyConverter.getStrategy(entry.getKey(), entry.getValue()); if (context == null) { throw makeExceptionForInvalidStrategyValue( entry.getValue(), strategyConverter.getUserFriendlyName(entry.getKey()), strategyConverter.getValidValues(entry.getKey())); } strategies.add(context); } } // If tests are to be run during build, too, we have to explicitly load the test action context. if (request.shouldRunTests()) { String testStrategyValue = request.getOptions(ExecutionOptions.class).testStrategy; ActionContext context = strategyConverter.getStrategy(TestActionContext.class, testStrategyValue); if (context == null) { throw makeExceptionForInvalidStrategyValue(testStrategyValue, "test", strategyConverter.getValidValues(TestActionContext.class)); } strategies.add(context); } } private static ImmutableList getActionContextConsumersFromModules( BlazeRuntime runtime, ActionContextConsumer... extraConsumers) { ImmutableList.Builder builder = ImmutableList.builder(); for (BlazeModule module : runtime.getBlazeModules()) { builder.addAll(module.getActionContextConsumers()); } builder.add(extraConsumers); return builder.build(); } private static ImmutableList getActionContextProvidersFromModules( BlazeRuntime runtime, ActionContextProvider... extraProviders) { ImmutableList.Builder builder = ImmutableList.builder(); for (BlazeModule module : runtime.getBlazeModules()) { builder.addAll(module.getActionContextProviders()); } builder.add(extraProviders); return builder.build(); } private static ExecutorInitException makeExceptionForInvalidStrategyValue(String value, String strategy, String validValues) { return new ExecutorInitException(String.format( "'%s' is an invalid value for %s strategy. Valid values are: %s", value, strategy, validValues), ExitCode.COMMAND_LINE_ERROR); } Executor getExecutor() throws ExecutorInitException { if (executor == null) { executor = createExecutor(); } return executor; } /** * Creates an executor for the current set of blaze runtime, execution options, and request. */ private BlazeExecutor createExecutor() throws ExecutorInitException { return new BlazeExecutor( runtime.getDirectories().getExecRoot(), runtime.getDirectories().getOutputPath(), getReporter(), env.getEventBus(), runtime.getClock(), request, request.getOptions(ExecutionOptions.class).verboseFailures, request.getOptions(ExecutionOptions.class).showSubcommands, strategies, spawnStrategyMap, actionContextProviders); } void init() throws ExecutorInitException { getExecutor(); } void shutdown() { for (ActionContextProvider actionContextProvider : actionContextProviders) { actionContextProvider.executionPhaseEnding(); } } /** * Performs the execution phase (phase 3) of the build, in which the Builder * is applied to the action graph to bring the targets up to date. (This * function will return prior to execution-proper if --nobuild was specified.) * @param buildId UUID of the build id * @param analysisResult the analysis phase output * @param buildResult the mutable build result * @param packageRoots package roots collected from loading phase and BuildConfigutaionCollection * creation */ void executeBuild(UUID buildId, AnalysisResult analysisResult, BuildResult buildResult, BuildConfigurationCollection configurations, ImmutableMap packageRoots) throws BuildFailedException, InterruptedException, TestExecException, AbruptExitException { Stopwatch timer = Stopwatch.createStarted(); prepare(packageRoots, configurations); ActionGraph actionGraph = analysisResult.getActionGraph(); // Get top-level artifacts. ImmutableSet additionalArtifacts = analysisResult.getAdditionalArtifactsToBuild(); // Create symlinks only after we've verified that we're actually // supposed to build something. if (getWorkspace().getFileSystem().supportsSymbolicLinks()) { List targetConfigurations = configurations.getTargetConfigurations(); // TODO(bazel-team): This is not optimal - we retain backwards compatibility in the case where // there's only a single configuration, but we don't create any symlinks in the multi-config // case. Can we do better? [multi-config] if (targetConfigurations.size() == 1) { OutputDirectoryLinksUtils.createOutputDirectoryLinks( runtime.getWorkspaceName(), getWorkspace(), getExecRoot(), runtime.getOutputPath(), getReporter(), targetConfigurations.get(0), request.getSymlinkPrefix()); } } OutputService outputService = runtime.getOutputService(); if (outputService != null) { outputService.startBuild(buildId); } else { startLocalOutputBuild(); // TODO(bazel-team): this could be just another OutputService } ActionCache actionCache = getActionCache(); SkyframeExecutor skyframeExecutor = env.getSkyframeExecutor(); Builder builder = createBuilder(request, executor, actionCache, skyframeExecutor); // // Execution proper. All statements below are logically nested in // begin/end pairs. No early returns or exceptions please! // Collection configuredTargets = buildResult.getActualTargets(); env.getEventBus().post(new ExecutionStartingEvent(configuredTargets)); getReporter().handle(Event.progress("Building...")); // Conditionally record dependency-checker log: ExplanationHandler explanationHandler = installExplanationHandler(request.getBuildOptions().explanationPath, request.getOptionsDescription()); Set builtTargets = new HashSet<>(); boolean interrupted = false; try { Collection aspects = analysisResult.getAspects(); Iterable allArtifactsForProviders = Iterables.concat( additionalArtifacts, TopLevelArtifactHelper.getAllArtifactsToBuild( analysisResult.getTargetsToBuild(), analysisResult.getTopLevelContext()) .getAllArtifacts(), TopLevelArtifactHelper.getAllArtifactsToBuildFromAspects( aspects, analysisResult.getTopLevelContext()) .getAllArtifacts(), //TODO(dslomov): Artifacts to test from aspects? TopLevelArtifactHelper.getAllArtifactsToTest(analysisResult.getTargetsToTest())); if (request.isRunningInEmacs()) { // The syntax of this message is tightly constrained by lisp/progmodes/compile.el in emacs request.getOutErr().printErrLn("blaze: Entering directory `" + getExecRoot() + "/'"); } for (ActionContextProvider actionContextProvider : actionContextProviders) { actionContextProvider.executionPhaseStarting( fileCache, actionGraph, allArtifactsForProviders); } executor.executionPhaseStarting(); skyframeExecutor.drainChangedFiles(); if (request.getViewOptions().discardAnalysisCache) { // Free memory by removing cache entries that aren't going to be needed. Note that in // skyframe full, this destroys the action graph as well, so we can only do it after the // action graph is no longer needed. env.getView().clearAnalysisCache(analysisResult.getTargetsToBuild()); actionGraph = null; } configureResourceManager(request); Profiler.instance().markPhase(ProfilePhase.EXECUTE); builder.buildArtifacts( env.getReporter(), additionalArtifacts, analysisResult.getParallelTests(), analysisResult.getExclusiveTests(), analysisResult.getTargetsToBuild(), analysisResult.getAspects(), executor, builtTargets, request.getBuildOptions().explanationPath != null, runtime.getLastExecutionTimeRange()); } catch (InterruptedException e) { interrupted = true; throw e; } finally { runtime.recordLastExecutionTime(); if (request.isRunningInEmacs()) { request.getOutErr().printErrLn("blaze: Leaving directory `" + getExecRoot() + "/'"); } if (!interrupted) { getReporter().handle(Event.progress("Building complete.")); } env.getEventBus().post(new ExecutionFinishedEvent(ImmutableMap. of(), 0L, skyframeExecutor.getOutputDirtyFilesAndClear(), skyframeExecutor.getModifiedFilesDuringPreviousBuildAndClear())); executor.executionPhaseEnding(); for (ActionContextProvider actionContextProvider : actionContextProviders) { actionContextProvider.executionPhaseEnding(); } Profiler.instance().markPhase(ProfilePhase.FINISH); if (!interrupted) { saveCaches(actionCache); } try (AutoProfiler p = AutoProfiler.profiled("Show results", ProfilerTask.INFO)) { determineSuccessfulTargets(buildResult, configuredTargets, builtTargets, timer); showBuildResult(request, buildResult, configuredTargets, analysisResult.getAspects()); Preconditions.checkNotNull(buildResult.getSuccessfulTargets()); } if (explanationHandler != null) { uninstallExplanationHandler(explanationHandler); } // Finalize output service last, so that if we do throw an exception, we know all the other // code has already run. if (runtime.getOutputService() != null) { boolean isBuildSuccessful = buildResult.getSuccessfulTargets().size() == configuredTargets.size(); runtime.getOutputService().finalizeBuild(isBuildSuccessful); } } } private void prepare(ImmutableMap packageRoots, BuildConfigurationCollection configurations) throws ExecutorInitException { // Prepare for build. Profiler.instance().markPhase(ProfilePhase.PREPARE); // Create some tools symlinks / cleanup per-build state createActionLogDirectory(); // Plant the symlink forest. plantSymlinkForest(packageRoots, configurations); } private void createToolsSymlinks() throws ExecutorInitException { try { runtime.getBinTools().setupBuildTools(); } catch (ExecException e) { throw new ExecutorInitException("Tools symlink creation failed", e); } } private void plantSymlinkForest(ImmutableMap packageRoots, BuildConfigurationCollection configurations) throws ExecutorInitException { try { FileSystemUtils.deleteTreesBelowNotPrefixed(getExecRoot(), new String[] { ".", "_", Constants.PRODUCT_NAME + "-"}); // Delete the build configuration's temporary directories for (BuildConfiguration configuration : configurations.getTargetConfigurations()) { configuration.prepareForExecutionPhase(); } FileSystemUtils.plantLinkForest(packageRoots, getExecRoot()); } catch (IOException e) { throw new ExecutorInitException("Source forest creation failed", e); } } private void createActionLogDirectory() throws ExecutorInitException { Path directory = runtime.getDirectories().getActionConsoleOutputDirectory(); try { if (directory.exists()) { FileSystemUtils.deleteTree(directory); } directory.createDirectory(); } catch (IOException e) { throw new ExecutorInitException("Couldn't delete action output directory", e); } } /** * Prepare for a local output build. */ private void startLocalOutputBuild() throws ExecutorInitException { try (AutoProfiler p = AutoProfiler.profiled("Starting local output build", ProfilerTask.INFO)) { Path outputPath = runtime.getOutputPath(); Path localOutputPath = runtime.getDirectories().getLocalOutputPath(); if (outputPath.isSymbolicLink()) { try { // Remove the existing symlink first. outputPath.delete(); if (localOutputPath.exists()) { // Pre-existing local output directory. Move to outputPath. localOutputPath.renameTo(outputPath); } } catch (IOException e) { throw new ExecutorInitException("Couldn't handle local output directory symlinks", e); } } } } /** * If a path is supplied, creates and installs an ExplanationHandler. Returns * an instance on success. Reports an error and returns null otherwise. */ private ExplanationHandler installExplanationHandler(PathFragment explanationPath, String allOptions) { if (explanationPath == null) { return null; } ExplanationHandler handler; try { handler = new ExplanationHandler( getWorkspace().getRelative(explanationPath).getOutputStream(), allOptions); } catch (IOException e) { getReporter().handle(Event.warn(String.format( "Cannot write explanation of rebuilds to file '%s': %s", explanationPath, e.getMessage()))); return null; } getReporter().handle( Event.info("Writing explanation of rebuilds to '" + explanationPath + "'")); getReporter().addHandler(handler); return handler; } /** * Uninstalls the specified ExplanationHandler (if any) and closes the log * file. */ private void uninstallExplanationHandler(ExplanationHandler handler) { if (handler != null) { getReporter().removeHandler(handler); handler.log.close(); } } /** * An ErrorEventListener implementation that records DEPCHECKER events into a log * file, iff the --explain flag is specified during a build. */ private static class ExplanationHandler implements EventHandler { private final PrintWriter log; private ExplanationHandler(OutputStream log, String optionsDescription) { this.log = new PrintWriter(log); this.log.println("Build options: " + optionsDescription); } @Override public void handle(Event event) { if (event.getKind() == EventKind.DEPCHECKER) { log.println(event.getMessage()); } } } /** * Computes the result of the build. Sets the list of successful (up-to-date) * targets in the request object. * * @param configuredTargets The configured targets whose artifacts are to be * built. * @param timer A timer that was started when the execution phase started. */ private void determineSuccessfulTargets(BuildResult result, Collection configuredTargets, Set builtTargets, Stopwatch timer) { // Maintain the ordering by copying builtTargets into a LinkedHashSet in the same iteration // order as configuredTargets. Collection successfulTargets = new LinkedHashSet<>(); for (ConfiguredTarget target : configuredTargets) { if (builtTargets.contains(target)) { successfulTargets.add(target); } } env.getEventBus().post( new ExecutionPhaseCompleteEvent(timer.stop().elapsed(TimeUnit.MILLISECONDS))); result.setSuccessfulTargets(successfulTargets); } private interface ArtifactFormatter { boolean shouldPrint(Artifact artifact); String format(Artifact artifact); } private class BriefArtifactFormatter implements ArtifactFormatter { @Override public boolean shouldPrint(Artifact artifact) { return !artifact.isSourceArtifact() && !artifact.isMiddlemanArtifact(); } @Override public String format(Artifact artifact) { return " " + OutputDirectoryLinksUtils.getPrettyPath(artifact.getPath(), runtime.getWorkspaceName(), getWorkspace(), request.getSymlinkPrefix()); } } private class DetailedArtifactFormatter implements ArtifactFormatter { @Override public boolean shouldPrint(Artifact artifact) { return !artifact.isSourceArtifact(); } @Override public String format(Artifact artifact) { return ">>>" + artifact.getPath(); } } /** * Shows the result of the build. Information includes the list of up-to-date * and failed targets and list of output artifacts for successful targets * @param request The build request, which specifies various options. * @param configuredTargets The configured targets whose artifacts are to be * built. *@param aspects */ private void showBuildResult( BuildRequest request, BuildResult result, Collection configuredTargets, Collection aspects) { // NOTE: be careful what you print! We don't want to create a consistency // problem where the summary message and the exit code disagree. The logic // here is already complex. // Filter the targets we care about into two buckets: Collection succeeded = new ArrayList<>(); Collection failed = new ArrayList<>(); for (ConfiguredTarget target : configuredTargets) { // TODO(bazel-team): this is quite ugly. Add a marker provider for this check. if (target instanceof InputFileConfiguredTarget) { // Suppress display of source files (because we do no work to build them). continue; } if (target.getTarget() instanceof Rule) { Rule rule = (Rule) target.getTarget(); if (rule.getRuleClass().contains("$")) { // Suppress display of hidden rules continue; } } if (target instanceof OutputFileConfiguredTarget) { // Suppress display of generated files (because they appear underneath // their generating rule), EXCEPT those ones which are not part of the // filesToBuild of their generating rule (e.g. .par, _deploy.jar // files), OR when a user explicitly requests an output file but not // its rule. TransitiveInfoCollection generatingRule = env.getView().getGeneratingRule((OutputFileConfiguredTarget) target); if (CollectionUtils.containsAll( generatingRule.getProvider(FileProvider.class).getFilesToBuild(), target.getProvider(FileProvider.class).getFilesToBuild()) && configuredTargets.contains(generatingRule)) { continue; } } Collection successfulTargets = result.getSuccessfulTargets(); (successfulTargets.contains(target) ? succeeded : failed).add(target); } // Suppress summary if --show_result value is exceeded: if (succeeded.size() + failed.size() + aspects.size() > request.getBuildOptions().maxResultTargets) { return; } OutErr outErr = request.getOutErr(); ArtifactFormatter formatter = request.getBuildOptions().detailedResult ? new DetailedArtifactFormatter() : new BriefArtifactFormatter(); TopLevelArtifactContext context = request.getTopLevelArtifactContext(); for (ConfiguredTarget target : succeeded) { Label label = target.getLabel(); // For up-to-date targets report generated artifacts, but only // if they have associated action and not middleman artifacts. boolean headerFlag = true; for (Artifact artifact : TopLevelArtifactHelper.getAllArtifactsToBuild(target, context).getImportantArtifacts()) { if (formatter.shouldPrint(artifact)) { if (headerFlag) { outErr.printErr("Target " + label + " up-to-date:\n"); headerFlag = false; } outErr.printErrLn(formatter.format(artifact)); } } if (headerFlag) { outErr.printErr("Target " + label + " up-to-date (nothing to build)\n"); } } for (AspectValue aspect : aspects) { Label label = aspect.getLabel(); String aspectName = aspect.getAspect().getName(); boolean headerFlag = true; NestedSet importantArtifacts = TopLevelArtifactHelper.getAllArtifactsToBuild(aspect, context).getImportantArtifacts(); for (Artifact importantArtifact : importantArtifacts) { if (headerFlag) { outErr.printErr("Aspect " + aspectName + " of " + label + " up-to-date:\n"); headerFlag = false; } if (formatter.shouldPrint(importantArtifact)) { outErr.printErrLn(formatter.format(importantArtifact)); } } if (headerFlag) { outErr.printErr( "Aspect " + aspectName + " of " + label + " up-to-date (nothing to build)\n"); } } for (ConfiguredTarget target : failed) { outErr.printErr("Target " + target.getLabel() + " failed to build\n"); // For failed compilation, it is still useful to examine temp artifacts, // (ie, preprocessed and assembler files). OutputGroupProvider topLevelProvider = target.getProvider(OutputGroupProvider.class); if (topLevelProvider != null) { for (Artifact temp : topLevelProvider.getOutputGroup(OutputGroupProvider.TEMP_FILES)) { if (temp.getPath().exists()) { outErr.printErrLn(" See temp at " + OutputDirectoryLinksUtils.getPrettyPath(temp.getPath(), runtime.getWorkspaceName(), getWorkspace(), request.getSymlinkPrefix())); } } } } if (!failed.isEmpty() && !request.getOptions(ExecutionOptions.class).verboseFailures) { outErr.printErr("Use --verbose_failures to see the command lines of failed build steps.\n"); } } private ActionCache getActionCache() throws LocalEnvironmentException { try { return env.getPersistentActionCache(); } catch (IOException e) { // TODO(bazel-team): (2010) Ideally we should just remove all cache data and reinitialize // caches. LoggingUtil.logToRemote(Level.WARNING, "Failed to initialize action cache: " + e.getMessage(), e); throw new LocalEnvironmentException("couldn't create action cache: " + e.getMessage() + ". If error persists, use 'blaze clean'"); } } private Builder createBuilder(BuildRequest request, Executor executor, ActionCache actionCache, SkyframeExecutor skyframeExecutor) { BuildRequest.BuildRequestOptions options = request.getBuildOptions(); boolean verboseExplanations = options.verboseExplanations; boolean keepGoing = request.getViewOptions().keepGoing; Path actionOutputRoot = runtime.getDirectories().getActionConsoleOutputDirectory(); Predicate executionFilter = CheckUpToDateFilter.fromOptions( request.getOptions(ExecutionOptions.class)); // jobs should have been verified in BuildRequest#validateOptions(). Preconditions.checkState(options.jobs >= -1); int actualJobs = options.jobs == 0 ? 1 : options.jobs; // Treat 0 jobs as a single task. // Unfortunately, the exec root cache is not shared with caches in the remote execution // client. fileCache = createBuildSingleFileCache(executor.getExecRoot()); skyframeExecutor.setActionOutputRoot(actionOutputRoot); return new SkyframeBuilder(skyframeExecutor, new ActionCacheChecker(actionCache, env.getView().getArtifactFactory(), executionFilter, verboseExplanations), keepGoing, actualJobs, options.checkOutputFiles, fileCache, request.getBuildOptions().progressReportInterval); } private void configureResourceManager(BuildRequest request) { ResourceManager resourceMgr = ResourceManager.instance(); ExecutionOptions options = request.getOptions(ExecutionOptions.class); ResourceSet resources; if (options.availableResources != null) { resources = options.availableResources; resourceMgr.setRamUtilizationPercentage(100); } else { resources = LocalHostCapacity.getLocalHostCapacity(); resourceMgr.setRamUtilizationPercentage(options.ramUtilizationPercentage); } resourceMgr.setAvailableResources(ResourceSet.create( resources.getMemoryMb(), resources.getCpuUsage(), resources.getIoUsage(), request.getExecutionOptions().usingLocalTestJobs() ? request.getExecutionOptions().localTestJobs : Integer.MAX_VALUE )); } /** * Writes the cache files to disk, reporting any errors that occurred during * writing. */ private void saveCaches(ActionCache actionCache) { long actionCacheSizeInBytes = 0; long actionCacheSaveTime; AutoProfiler p = AutoProfiler.profiledAndLogged("Saving action cache", ProfilerTask.INFO, LOG); try { actionCacheSizeInBytes = actionCache.save(); } catch (IOException e) { getReporter().handle(Event.error("I/O error while writing action log: " + e.getMessage())); } finally { actionCacheSaveTime = p.completeAndGetElapsedTimeNanos(); } env.getEventBus().post(new CachesSavedEvent( actionCacheSaveTime, actionCacheSizeInBytes)); } private ActionInputFileCache createBuildSingleFileCache(Path execRoot) { String cwd = execRoot.getPathString(); FileSystem fs = runtime.getDirectories().getFileSystem(); ActionInputFileCache cache = null; for (BlazeModule module : runtime.getBlazeModules()) { ActionInputFileCache pluggable = module.createActionInputCache(cwd, fs); if (pluggable != null) { Preconditions.checkState(cache == null); cache = pluggable; } } if (cache == null) { cache = new SingleBuildFileCache(cwd, fs); } return cache; } private Reporter getReporter() { return env.getReporter(); } private Path getWorkspace() { return runtime.getWorkspace(); } private Path getExecRoot() { return runtime.getExecRoot(); } }