aboutsummaryrefslogtreecommitdiffhomepage
path: root/src/main/java/com/google/devtools/build/lib/remote/TreeNodeRepository.java
blob: 0850fec75a868892ee41502b1d1c93ef48bcb159 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//    http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package com.google.devtools.build.lib.remote;

import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Interner;
import com.google.common.collect.Iterables;
import com.google.common.collect.TreeTraverser;
import com.google.devtools.build.lib.actions.ActionInput;
import com.google.devtools.build.lib.actions.ActionInputFileCache;
import com.google.devtools.build.lib.concurrent.BlazeInterners;
import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable;
import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe;
import com.google.devtools.build.lib.remote.RemoteProtocol.ContentDigest;
import com.google.devtools.build.lib.remote.RemoteProtocol.FileNode;
import com.google.devtools.build.lib.util.Preconditions;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.protobuf.ByteString;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.SortedMap;
import java.util.TreeMap;
import javax.annotation.Nullable;

/**
 * A factory and repository for {@link TreeNode} objects. Provides directory structure traversals,
 * computing and caching Merkle hashes on all objects.
 */
@ThreadSafe
public final class TreeNodeRepository extends TreeTraverser<TreeNodeRepository.TreeNode> {
  /**
   * A single node in a hierarchical directory structure. Leaves are the Artifacts, although we only
   * use the ActionInput interface. We assume that the objects used for the ActionInputs are unique
   * (same data corresponds to a canonical object in memory).
   */
  @Immutable
  @ThreadSafe
  public static final class TreeNode {

    private final int hashCode;
    private final ImmutableList<ChildEntry> childEntries; // no need to make it a map thus far.
    @Nullable private final ActionInput actionInput; // Null iff this is a directory.

    /** A pair of path segment, TreeNode. */
    @Immutable
    public static final class ChildEntry {
      private final String segment;
      private final TreeNode child;

      public ChildEntry(String segment, TreeNode child) {
        this.segment = segment;
        this.child = child;
      }

      public TreeNode getChild() {
        return child;
      }

      public String getSegment() {
        return segment;
      }

      @Override
      @SuppressWarnings("ReferenceEquality")
      public boolean equals(Object o) {
        if (o == this) {
          return true;
        }
        if (!(o instanceof ChildEntry)) {
          return false;
        }
        ChildEntry other = (ChildEntry) o;
        // Pointer comparisons only, because both the Path segments and the TreeNodes are interned.
        return other.segment == segment && other.child == child;
      }

      @Override
      public int hashCode() {
        return Objects.hash(segment, child);
      }
    }

    // Should only be called by the TreeNodeRepository.
    private TreeNode(Iterable<ChildEntry> childEntries) {
      this.actionInput = null;
      this.childEntries = ImmutableList.copyOf(childEntries);
      hashCode = Arrays.hashCode(this.childEntries.toArray());
    }

    // Should only be called by the TreeNodeRepository.
    private TreeNode(ActionInput actionInput) {
      this.actionInput = actionInput;
      this.childEntries = ImmutableList.of();
      hashCode = actionInput.hashCode(); // This will ensure efficient interning of TreeNodes as
      // long as all ActionInputs either implement data-based hashCode or are interned themselves.
    }

    public ActionInput getActionInput() {
      return actionInput;
    }

    public ImmutableList<ChildEntry> getChildEntries() {
      return childEntries;
    }

    public boolean isLeaf() {
      return actionInput != null;
    }

    @Override
    public int hashCode() {
      return hashCode;
    }

    @Override
    public boolean equals(Object o) {
      if (this == o) {
        return true;
      }
      if (!(o instanceof TreeNode)) {
        return false;
      }
      TreeNode otherNode = (TreeNode) o;
      // Full comparison of ActionInputs. If pointers are different, will compare paths.
      return Objects.equals(otherNode.actionInput, actionInput)
          && childEntries.equals(otherNode.childEntries);
    }

    private String toDebugStringAtLevel(int level) {
      char[] prefix = new char[level];
      Arrays.fill(prefix, ' ');
      StringBuilder sb = new StringBuilder();

      if (isLeaf()) {
        sb.append('\n');
        sb.append(prefix);
        sb.append("leaf: ");
        sb.append(actionInput);
      } else {
        for (ChildEntry entry : childEntries) {
          sb.append('\n');
          sb.append(prefix);
          sb.append(entry.segment);
          sb.append(entry.child.toDebugStringAtLevel(level + 1));
        }
      }
      return sb.toString();
    }

    public String toDebugString() {
      return toDebugStringAtLevel(0);
    }
  }

  // Keep only one canonical instance of every TreeNode in the repository.
  private final Interner<TreeNode> interner = BlazeInterners.newWeakInterner();
  // Merkle hashes are computed and cached by the repository, therefore execRoot must
  // be part of the state.
  private final Path execRoot;
  private final ActionInputFileCache inputFileCache;
  private final Map<TreeNode, ContentDigest> treeNodeDigestCache = new HashMap<>();
  private final Map<ContentDigest, TreeNode> digestTreeNodeCache = new HashMap<>();
  private final Map<TreeNode, FileNode> fileNodeCache = new HashMap<>();

  public TreeNodeRepository(Path execRoot, ActionInputFileCache inputFileCache) {
    this.execRoot = execRoot;
    this.inputFileCache = inputFileCache;
  }

  public ActionInputFileCache getInputFileCache() {
    return inputFileCache;
  }

  @Override
  public Iterable<TreeNode> children(TreeNode node) {
    return Iterables.transform(
        node.getChildEntries(),
        new Function<TreeNode.ChildEntry, TreeNode>() {
          @Override
          public TreeNode apply(TreeNode.ChildEntry entry) {
            return entry.getChild();
          }
        });
  }

  /** Traverse the directory structure in order (pre-order tree traversal). */
  public Iterable<TreeNode> descendants(TreeNode node) {
    return preOrderTraversal(node);
  }

  /**
   * Traverse the directory structure in order (pre-order tree traversal), return only the leaves.
   */
  public Iterable<TreeNode> leaves(TreeNode node) {
    return Iterables.filter(
        descendants(node),
        new Predicate<TreeNode>() {
          @Override
          public boolean apply(TreeNode node) {
            return node.isLeaf();
          }
        });
  }

  public TreeNode buildFromActionInputs(Iterable<? extends ActionInput> inputs) {
    TreeMap<PathFragment, ActionInput> sortedMap = new TreeMap<>();
    for (ActionInput input : inputs) {
      sortedMap.put(PathFragment.create(input.getExecPathString()), input);
    }
    return buildFromActionInputs(sortedMap);
  }

  /**
   * This function is a temporary and highly inefficient hack! It builds the tree from a ready list
   * of input files. TODO(olaola): switch to creating and maintaining the TreeNodeRepository based
   * on the build graph structure.
   */
  public TreeNode buildFromActionInputs(SortedMap<PathFragment, ActionInput> sortedMap) {
    ImmutableList.Builder<ImmutableList<String>> segments = ImmutableList.builder();
    for (PathFragment path : sortedMap.keySet()) {
      segments.add(path.getSegments());
    }
    ImmutableList<ActionInput> inputs = ImmutableList.copyOf(sortedMap.values());
    return buildParentNode(inputs, segments.build(), 0, inputs.size(), 0);
  }

  @SuppressWarnings("ReferenceEquality") // Segments are interned.
  private TreeNode buildParentNode(
      ImmutableList<ActionInput> inputs,
      ImmutableList<ImmutableList<String>> segments,
      int inputsStart,
      int inputsEnd,
      int segmentIndex) {
    if (segmentIndex == segments.get(inputsStart).size()) {
      // Leaf node reached. Must be unique.
      Preconditions.checkArgument(
          inputsStart == inputsEnd - 1, "Encountered two inputs with the same path.");
      // TODO: check that the actionInput is a single file!
      return interner.intern(new TreeNode(inputs.get(inputsStart)));
    }
    ArrayList<TreeNode.ChildEntry> entries = new ArrayList<>();
    String segment = segments.get(inputsStart).get(segmentIndex);
    for (int inputIndex = inputsStart; inputIndex < inputsEnd; ++inputIndex) {
      if (inputIndex + 1 == inputsEnd
          || segment != segments.get(inputIndex + 1).get(segmentIndex)) {
        entries.add(
            new TreeNode.ChildEntry(
                segment,
                buildParentNode(inputs, segments, inputsStart, inputIndex + 1, segmentIndex + 1)));
        if (inputIndex + 1 < inputsEnd) {
          inputsStart = inputIndex + 1;
          segment = segments.get(inputsStart).get(segmentIndex);
        }
      }
    }
    return interner.intern(new TreeNode(entries));
  }

  private synchronized FileNode getOrComputeFileNode(TreeNode node) throws IOException {
    // Assumes all child digests have already been computed!
    FileNode fileNode = fileNodeCache.get(node);
    if (fileNode == null) {
      FileNode.Builder b = FileNode.newBuilder();
      if (node.isLeaf()) {
        ActionInput input = node.getActionInput();
        b.getFileMetadataBuilder()
            .setDigest(ContentDigests.getDigestFromInputCache(input, inputFileCache))
            .setExecutable(execRoot.getRelative(input.getExecPathString()).isExecutable());
      } else {
        for (TreeNode.ChildEntry entry : node.getChildEntries()) {
          ContentDigest childDigest = treeNodeDigestCache.get(entry.getChild());
          Preconditions.checkState(childDigest != null);
          b.addChildBuilder().setPath(entry.getSegment()).setDigest(childDigest);
        }
      }
      fileNode = b.build();
      fileNodeCache.put(node, fileNode);
      ContentDigest digest = ContentDigests.computeDigest(fileNode);
      treeNodeDigestCache.put(node, digest);
      digestTreeNodeCache.put(digest, node);
    }
    return fileNode;
  }

  // Recursively traverses the tree, expanding and computing Merkle digests for nodes for which
  // they have not yet been computed and cached.
  public void computeMerkleDigests(TreeNode root) throws IOException {
    synchronized (this) {
      if (fileNodeCache.get(root) != null) {
        // Strong assumption: the cache is valid, i.e. parent present implies children present.
        return;
      }
    }
    if (root.isLeaf()) {
      // Load the digest into the ActionInputFileCache.
      inputFileCache.getDigest(root.getActionInput());
    } else {
      for (TreeNode child : children(root)) {
        computeMerkleDigests(child);
      }
    }
    getOrComputeFileNode(root);
  }

  /**
   * Should only be used after computeMerkleDigests has been called on one of the node ancestors.
   * Returns the precomputed digest.
   */
  public ContentDigest getMerkleDigest(TreeNode node) {
    return treeNodeDigestCache.get(node);
  }

  /**
   * Returns the precomputed digests for both data and metadata. Should only be used after
   * computeMerkleDigests has been called on one of the node ancestors.
   */
  public ImmutableCollection<ContentDigest> getAllDigests(TreeNode root) throws IOException {
    ImmutableSet.Builder<ContentDigest> digests = ImmutableSet.builder();
    for (TreeNode node : descendants(root)) {
      digests.add(Preconditions.checkNotNull(treeNodeDigestCache.get(node)));
      if (node.isLeaf()) {
        digests.add(ContentDigests.getDigestFromInputCache(node.getActionInput(), inputFileCache));
      }
    }
    return digests.build();
  }

  /**
   * Serializes all of the subtree to the file node list. TODO(olaola): add a version that only
   * copies a part of the tree that we are interested in. Should only be used after
   * computeMerkleDigests has been called on one of the node ancestors.
   */
  // Note: this is not, strictly speaking, thread safe. If someone is deleting cached Merkle hashes
  // while this is executing, it will trigger an exception. But I think this is WAI.
  public ImmutableList<FileNode> treeToFileNodes(TreeNode root) throws IOException {
    ImmutableList.Builder<FileNode> fileNodes = ImmutableList.builder();
    for (TreeNode node : descendants(root)) {
      fileNodes.add(Preconditions.checkNotNull(fileNodeCache.get(node)));
    }
    return fileNodes.build();
  }

  /**
   * Should only be used on digests created by a call to computeMerkleDigests. Looks up ActionInputs
   * or FileNodes by cached digests and adds them to the lists.
   */
  public void getDataFromDigests(
      Iterable<ContentDigest> digests, List<ActionInput> actionInputs, List<FileNode> nodes) {
    for (ContentDigest digest : digests) {
      TreeNode treeNode = digestTreeNodeCache.get(digest);
      if (treeNode != null) {
        nodes.add(Preconditions.checkNotNull(fileNodeCache.get(treeNode)));
      } else { // If not there, it must be an ActionInput.
        ByteString hexDigest = ByteString.copyFromUtf8(ContentDigests.toHexString(digest));
        actionInputs.add(Preconditions.checkNotNull(inputFileCache.getInputFromDigest(hexDigest)));
      }
    }
  }
}