diff --git a/src/main/java/com/google/devtools/build/lib/remote/AbstractActionInputPrefetcher.java b/src/main/java/com/google/devtools/build/lib/remote/AbstractActionInputPrefetcher.java index b11515ed40b0e7..c170f9b88bf167 100644 --- a/src/main/java/com/google/devtools/build/lib/remote/AbstractActionInputPrefetcher.java +++ b/src/main/java/com/google/devtools/build/lib/remote/AbstractActionInputPrefetcher.java @@ -13,6 +13,8 @@ // limitations under the License. package com.google.devtools.build.lib.remote; +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static com.google.common.util.concurrent.Futures.addCallback; import static com.google.common.util.concurrent.MoreExecutors.directExecutor; @@ -22,6 +24,7 @@ import static com.google.devtools.build.lib.remote.util.RxUtils.toTransferResult; import static com.google.devtools.build.lib.remote.util.Utils.getFromFuture; +import com.google.auto.value.AutoValue; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -42,7 +45,6 @@ import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.ExtendedEventHandler.Postable; import com.google.devtools.build.lib.events.Reporter; -import com.google.devtools.build.lib.remote.common.BulkTransferException; import com.google.devtools.build.lib.remote.common.CacheNotFoundException; import com.google.devtools.build.lib.remote.util.AsyncTaskCache; import com.google.devtools.build.lib.remote.util.RxUtils.TransferResult; @@ -56,14 +58,13 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.Set; +import java.util.Stack; import java.util.concurrent.atomic.AtomicBoolean; import java.util.regex.Pattern; import javax.annotation.Nullable; +import javax.annotation.concurrent.GuardedBy; /** * Abstract implementation of {@link ActionInputPrefetcher} which implements the orchestration of @@ -83,17 +84,30 @@ public abstract class AbstractActionInputPrefetcher implements ActionInputPrefet private final Set missingActionInputs = Sets.newConcurrentHashSet(); - private static class Context { - private final Set nonWritableDirs = Sets.newConcurrentHashSet(); + // Tracks directories temporarily made writable by prefetcher calls. + // Concurrent calls may write to the same directories, so it's not safe to put back their + // permissions until there are no ongoing calls, as tracked by numCallsInProgress below. + private final Set temporarilyWritableDirectories = Sets.newConcurrentHashSet(); - public void addNonWritableDir(Path dir) { - nonWritableDirs.add(dir); - } + // Tracks the number of currently ongoing prefetcher calls. + @GuardedBy("this") + private int numOngoingCalls = 0; - public void finalizeContext() throws IOException { - for (Path path : nonWritableDirs) { - path.setWritable(false); - } + /** + * A symlink in the output tree. + */ + + @AutoValue + static abstract class Symlink { + + abstract PathFragment getLinkExecPath(); + + abstract PathFragment getTargetExecPath(); + + static Symlink of(PathFragment linkExecPath, PathFragment targetExecPath) { + checkArgument(!linkExecPath.equals(targetExecPath)); + return new AutoValue_AbstractActionInputPrefetcher_Symlink(linkExecPath, + targetExecPath); } } @@ -192,8 +206,8 @@ protected ListenableFuture prefetchFiles( Iterable inputs, MetadataProvider metadataProvider, Priority priority) { - Map> trees = new HashMap<>(); List files = new ArrayList<>(); + for (ActionInput input : inputs) { // Source artifacts don't need to be fetched. if (input instanceof Artifact && ((Artifact) input).isSourceArtifact()) { @@ -205,197 +219,44 @@ protected ListenableFuture prefetchFiles( continue; } - if (input instanceof TreeFileArtifact) { - TreeFileArtifact treeFile = (TreeFileArtifact) input; - SpecialArtifact treeArtifact = treeFile.getParent(); - trees.computeIfAbsent(treeArtifact, unusedKey -> new ArrayList<>()).add(treeFile); - continue; - } - files.add(input); } - Context context = new Context(); - - Flowable treeDownloads = - Flowable.fromIterable(trees.entrySet()) - .flatMapSingle( - entry -> - toTransferResult( - prefetchInputTreeOrSymlink( - context, - metadataProvider, - entry.getKey(), - entry.getValue(), - priority))); - - Flowable fileDownloads = + Flowable transfers = Flowable.fromIterable(files) .flatMapSingle( input -> toTransferResult( - prefetchInputFileOrSymlink(context, metadataProvider, input, priority))); + prefetchFile(metadataProvider, input, priority))); - Flowable transfers = Flowable.merge(treeDownloads, fileDownloads); - Completable prefetch = - Completable.using( - () -> context, ctx -> mergeBulkTransfer(transfers), Context::finalizeContext) - .onErrorResumeNext(this::onErrorResumeNext); - return toListenableFuture(prefetch); - } - - private Completable prefetchInputTreeOrSymlink( - Context context, - MetadataProvider provider, - SpecialArtifact tree, - List treeFiles, - Priority priority) - throws IOException { + Completable prefetch = Completable.using(this::startPrefetching, + ctx -> mergeBulkTransfer(transfers), + this::stopPrefetching).onErrorResumeNext(this::onErrorResumeNext); - PathFragment execPath = tree.getExecPath(); - - FileArtifactValue treeMetadata = provider.getMetadata(tree); - // TODO(tjgq): Only download individual files that were requested within the tree. - // This isn't straightforward because multiple tree artifacts may share the same output tree - // when a ctx.actions.symlink is involved. - if (treeMetadata == null || !canDownloadAnyTreeFiles(treeFiles, treeMetadata)) { - return Completable.complete(); - } - - PathFragment prefetchExecPath = treeMetadata.getMaterializationExecPath().orElse(execPath); - - Completable prefetch = - prefetchInputTree( - context, provider, prefetchExecPath, tree, treeFiles, treeMetadata, priority); - - // If prefetching to a different path, plant a symlink into it. - if (!prefetchExecPath.equals(execPath)) { - Completable prefetchAndSymlink = - prefetch.doOnComplete(() -> createSymlink(execPath, prefetchExecPath)); - return downloadCache.executeIfNot(execRoot.getRelative(execPath), prefetchAndSymlink); - } - - return prefetch; + return toListenableFuture(prefetch); } - private boolean canDownloadAnyTreeFiles( - Iterable treeFiles, FileArtifactValue metadata) { - for (TreeFileArtifact treeFile : treeFiles) { - if (canDownloadFile(treeFile.getPath(), metadata)) { - return true; - } - } - return false; + private synchronized AbstractActionInputPrefetcher startPrefetching() { + numOngoingCalls++; + return this; } - private boolean shouldDownloadAnyTreeFiles( - Iterable treeFiles, FileArtifactValue metadata) { - for (TreeFileArtifact treeFile : treeFiles) { - if (shouldDownloadFile(treeFile.getPath(), metadata)) { - return true; + private synchronized void stopPrefetching(AbstractActionInputPrefetcher unused) + throws IOException { + numOngoingCalls--; + if (numOngoingCalls == 0) { + // Set output permissions on directories (files are handled in finalizeDownload), matching the + // behavior of SkyframeActionExecutor#checkOutputs when artifacts are produced by a locally + // executed action. + for (Path dir : temporarilyWritableDirectories) { + dir.chmod(outputPermissions.getPermissionsMode()); } + temporarilyWritableDirectories.clear(); } - return false; - } - - private Completable prefetchInputTree( - Context context, - MetadataProvider provider, - PathFragment execPath, - SpecialArtifact tree, - List treeFiles, - FileArtifactValue treeMetadata, - Priority priority) { - Path treeRoot = execRoot.getRelative(execPath); - HashMap treeFileTmpPathMap = new HashMap<>(); - - Flowable transfers = - Flowable.fromIterable(treeFiles) - .flatMapSingle( - treeFile -> { - FileArtifactValue metadata = provider.getMetadata(treeFile); - - Path tempPath = tempPathGenerator.generateTempPath(); - treeFileTmpPathMap.put(treeFile, tempPath); - - return toTransferResult( - toCompletable( - () -> - doDownloadFile(tempPath, treeFile.getExecPath(), metadata, priority), - directExecutor())); - }); - - AtomicBoolean completed = new AtomicBoolean(); - Completable download = - mergeBulkTransfer(transfers) - .doOnComplete( - () -> { - HashSet dirs = new HashSet<>(); - - // Even though the root directory for a tree artifact is created prior to action - // execution, we might be prefetching to a different directory that doesn't yet - // exist (when FileArtifactValue#getMaterializationExecPath() is present). - // In any case, we need to make it writable to move files into it. - createWritableDirectory(treeRoot); - dirs.add(treeRoot); - - for (Map.Entry entry : treeFileTmpPathMap.entrySet()) { - TreeFileArtifact treeFile = entry.getKey(); - Path tempPath = entry.getValue(); - - Path path = treeRoot.getRelative(treeFile.getParentRelativePath()); - Path dir = treeRoot; - for (String segment : treeFile.getParentRelativePath().segments()) { - dir = dir.getRelative(segment); - if (dir.equals(path)) { - break; - } - if (dirs.add(dir)) { - createWritableDirectory(dir); - } - } - checkState(dir.equals(path)); - finalizeDownload(context, tempPath, path); - } - - for (Path dir : dirs) { - // Change permission of all directories of a tree artifact (files are - // changed inside {@code finalizeDownload}) in order to match the behaviour when - // the tree artifact is generated locally. In that case, permission of all files - // and directories inside a tree artifact is changed within {@code - // checkOutputs()}. - dir.chmod(outputPermissions.getPermissionsMode()); - } - - completed.set(true); - }) - .doOnError( - error -> { - if (BulkTransferException.anyCausedByCacheNotFoundException(error)) { - missingActionInputs.add(tree); - } - }) - .doFinally( - () -> { - if (!completed.get()) { - for (Map.Entry entry : treeFileTmpPathMap.entrySet()) { - deletePartialDownload(entry.getValue()); - } - } - }); - return downloadCache.executeIfNot( - treeRoot, - Completable.defer( - () -> { - if (shouldDownloadAnyTreeFiles(treeFiles, treeMetadata)) { - return download; - } - return Completable.complete(); - })); } - private Completable prefetchInputFileOrSymlink( - Context context, MetadataProvider metadataProvider, ActionInput input, Priority priority) + private Completable prefetchFile( + MetadataProvider metadataProvider, ActionInput input, Priority priority) throws IOException { if (input instanceof VirtualActionInput) { prefetchVirtualActionInput((VirtualActionInput) input); @@ -409,20 +270,68 @@ private Completable prefetchInputFileOrSymlink( return Completable.complete(); } - PathFragment prefetchExecPath = metadata.getMaterializationExecPath().orElse(execPath); + @Nullable Symlink symlink = maybeGetSymlink(input, metadata, metadataProvider); + + if (symlink != null) { + checkState(execPath.startsWith(symlink.getLinkExecPath())); + execPath = symlink.getTargetExecPath() + .getRelative(execPath.relativeTo(symlink.getLinkExecPath())); + } + + @Nullable PathFragment treeRootExecPath = getTreeRoot(execPath, input); - Completable prefetch = - downloadFileNoCheckRx( - context, execRoot.getRelative(prefetchExecPath), input, metadata, priority); + Completable result = + downloadFileNoCheckRx(execRoot.getRelative(execPath), + treeRootExecPath != null ? execRoot.getRelative(treeRootExecPath) : null, input, + metadata, + priority); - // If prefetching to a different path, plant a symlink into it. - if (!prefetchExecPath.equals(execPath)) { - Completable prefetchAndSymlink = - prefetch.doOnComplete(() -> createSymlink(execPath, prefetchExecPath)); - return downloadCache.executeIfNot(execRoot.getRelative(execPath), prefetchAndSymlink); + if (symlink != null) { + result = result.andThen(plantSymlink(symlink)); } - return prefetch; + return result; + } + + /** + * For an input belonging to a tree artifact, returns the tree artifact root. Otherwise, returns + * null. + */ + @Nullable + private PathFragment getTreeRoot(PathFragment execPath, ActionInput input) { + if (input instanceof TreeFileArtifact) { + // Derive root from execPath because we may prefetch to a different location. + int numChildComponents = ((TreeFileArtifact) input).getParentRelativePath().segmentCount(); + return execPath.subFragment(0, + execPath.segmentCount() - numChildComponents); + } + return null; + } + + /** + * Returns the symlink to be planted in the output tree for artifacts that are prefetched into a + * different location. + * + *

Some artifacts (notably, those created by {@code ctx.actions.symlink}) are materialized in + * the output tree as a symlink to another artifact, as indicated by the + * {@link FileArtifactValue#getMaterializationExecPath()} field in their (or their parent tree + * artifact's) metadata.

+ */ + @Nullable + private Symlink maybeGetSymlink(ActionInput input, + FileArtifactValue metadata, MetadataProvider metadataProvider) throws IOException { + if (input instanceof TreeFileArtifact) { + // Check whether the entire tree artifact should be prefetched into a separate location. + SpecialArtifact treeArtifact = ((TreeFileArtifact) input).getParent(); + FileArtifactValue treeMetadata = checkNotNull(metadataProvider.getMetadata(treeArtifact)); + return maybeGetSymlink(treeArtifact, treeMetadata, metadataProvider); + } + PathFragment execPath = input.getExecPath(); + PathFragment materializationExecPath = metadata.getMaterializationExecPath().orElse(execPath); + if (!materializationExecPath.equals(execPath)) { + return Symlink.of(execPath, materializationExecPath); + } + return null; } /** @@ -432,20 +341,21 @@ private Completable prefetchInputFileOrSymlink( * download finished. */ private Completable downloadFileRx( - Context context, Path path, + @Nullable Path treeRoot, @Nullable ActionInput actionInput, FileArtifactValue metadata, Priority priority) { if (!canDownloadFile(path, metadata)) { return Completable.complete(); } - return downloadFileNoCheckRx(context, path, actionInput, metadata, priority); + return downloadFileNoCheckRx(path, treeRoot, actionInput, metadata, + priority); } private Completable downloadFileNoCheckRx( - Context context, Path path, + @Nullable Path treeRoot, @Nullable ActionInput actionInput, FileArtifactValue metadata, Priority priority) { @@ -465,13 +375,13 @@ private Completable downloadFileNoCheckRx( tempPathGenerator::generateTempPath, tempPath -> toCompletable( - () -> - doDownloadFile( - tempPath, finalPath.relativeTo(execRoot), metadata, priority), - directExecutor()) + () -> + doDownloadFile( + tempPath, finalPath.relativeTo(execRoot), metadata, priority), + directExecutor()) .doOnComplete( () -> { - finalizeDownload(context, tempPath, finalPath); + finalizeDownload(treeRoot, tempPath, finalPath); completed.set(true); }), tempPath -> { @@ -517,33 +427,51 @@ protected ListenableFuture downloadFileAsync( @Nullable ActionInput actionInput, FileArtifactValue metadata, Priority priority) { - Context context = new Context(); return toListenableFuture( - Completable.using( - () -> context, - ctx -> - downloadFileRx( - context, - execRoot.getFileSystem().getPath(path), - actionInput, - metadata, - priority), - Context::finalizeContext)); + Completable.using(this::startPrefetching, + ctx -> downloadFileRx( + execRoot.getFileSystem().getPath(path), + /* treeRoot= */ null, + actionInput, + metadata, + priority), this::stopPrefetching)); } - private void finalizeDownload(Context context, Path tmpPath, Path path) throws IOException { - Path parentDir = path.getParentDirectory(); - // In case the parent directory of the destination is not writable, temporarily change it to - // writable. b/254844173. - if (parentDir != null && !parentDir.isWritable()) { - context.addNonWritableDir(parentDir); - parentDir.setWritable(true); + private void finalizeDownload(@Nullable Path treeRoot, Path tmpPath, Path finalPath) + throws IOException { + Path parentDir = checkNotNull(finalPath.getParentDirectory()); + + if (treeRoot != null) { + checkState(parentDir.startsWith(treeRoot)); + + // Create intermediate tree artifact directories. + // Be careful to minimize filesystem calls when prefetching multiple files into the same tree. + Stack dirs = new Stack<>(); + for (Path dir = parentDir; ; dir = dir.getParentDirectory()) { + dirs.push(dir); + if (dir.exists() || dir.equals(treeRoot)) { + break; + } + } + while (!dirs.empty()) { + Path dir = dirs.pop(); + dir.createWritableDirectory(); // create or make writable + temporarilyWritableDirectories.add(dir); + } + } else { + // If the parent directory is not writable, temporarily make it so. + // This is needed when fetching a non-tree artifact nested inside a tree artifact, or a tree + // artifact inside a fileset (see b/254844173 for the latter). + if (!parentDir.isWritable()) { + parentDir.setWritable(true); + temporarilyWritableDirectories.add(parentDir); + } } - // The permission of output file is changed after action execution. We manually change - // the permission here for the downloaded file to keep this behaviour consistent. + // Set output permissions, matching the behavior of SkyframeActionExecutor#checkOutputs when the + // artifact is produced by a locally executed action. tmpPath.chmod(outputPermissions.getPermissionsMode()); - FileSystemUtils.moveFile(tmpPath, path); + FileSystemUtils.moveFile(tmpPath, finalPath); } private void deletePartialDownload(Path path) { @@ -555,18 +483,18 @@ private void deletePartialDownload(Path path) { } } - private void createWritableDirectory(Path dir) throws IOException { - dir.createDirectory(); - dir.setWritable(true); - } - - private void createSymlink(PathFragment linkPath, PathFragment targetPath) throws IOException { - Path link = execRoot.getRelative(linkPath); - Path target = execRoot.getRelative(targetPath); - // Delete the link path if it already exists. - // This will happen for output directories, which get created before the action runs. - link.delete(); - link.createSymbolicLink(target); + private Completable plantSymlink(Symlink symlink) { + return downloadCache.executeIfNot( + execRoot.getRelative(symlink.getLinkExecPath()), + Completable.defer(() -> { + Path link = execRoot.getRelative(symlink.getLinkExecPath()); + Path target = execRoot.getRelative(symlink.getTargetExecPath()); + // Delete the link path if it already exists. + // This will happen for output directories, which get created before the action runs. + link.delete(); + link.createSymbolicLink(target); + return Completable.complete(); + })); } public ImmutableSet downloadedFiles() { diff --git a/src/main/java/com/google/devtools/build/lib/remote/BUILD b/src/main/java/com/google/devtools/build/lib/remote/BUILD index 46173651621c7e..7d8f38ae75801b 100644 --- a/src/main/java/com/google/devtools/build/lib/remote/BUILD +++ b/src/main/java/com/google/devtools/build/lib/remote/BUILD @@ -193,6 +193,7 @@ java_library( "//src/main/java/com/google/devtools/build/lib/remote/util", "//src/main/java/com/google/devtools/build/lib/vfs", "//src/main/java/com/google/devtools/build/lib/vfs:pathfragment", + "//third_party:auto_value", "//third_party:flogger", "//third_party:guava", "//third_party:jsr305", diff --git a/src/test/java/com/google/devtools/build/lib/remote/ActionInputPrefetcherTestBase.java b/src/test/java/com/google/devtools/build/lib/remote/ActionInputPrefetcherTestBase.java index af0aa4adcfd058..e27038010bcd13 100644 --- a/src/test/java/com/google/devtools/build/lib/remote/ActionInputPrefetcherTestBase.java +++ b/src/test/java/com/google/devtools/build/lib/remote/ActionInputPrefetcherTestBase.java @@ -13,8 +13,10 @@ // limitations under the License. package com.google.devtools.build.lib.remote; +import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Throwables.throwIfInstanceOf; import static com.google.common.truth.Truth.assertThat; +import static com.google.common.truth.Truth.assertWithMessage; import static com.google.devtools.build.lib.actions.util.ActionsTestUtil.createTreeArtifactWithGeneratingAction; import static java.nio.charset.StandardCharsets.UTF_8; import static org.junit.Assert.assertThrows; @@ -48,10 +50,12 @@ import com.google.devtools.build.lib.skyframe.TreeArtifactValue; import com.google.devtools.build.lib.util.Pair; import com.google.devtools.build.lib.vfs.DigestHashFunction; +import com.google.devtools.build.lib.vfs.Dirent; import com.google.devtools.build.lib.vfs.FileSystem; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; +import com.google.devtools.build.lib.vfs.Symlinks; import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem; import java.io.IOException; import java.util.HashMap; @@ -119,43 +123,61 @@ protected Artifact createRemoteArtifact( protected Pair> createRemoteTreeArtifact( String pathFragment, - Map contentMap, + Map localContentMap, + Map remoteContentMap, @Nullable PathFragment materializationExecPath, Map metadata, Map cas) throws IOException { SpecialArtifact parent = createTreeArtifactWithGeneratingAction(artifactRoot, pathFragment); + parent.getPath().createDirectoryAndParents(); parent.getPath().chmod(0555); + TreeArtifactValue.Builder treeBuilder = TreeArtifactValue.newBuilder(parent); - for (Map.Entry entry : contentMap.entrySet()) { - byte[] contentsBytes = entry.getValue().getBytes(UTF_8); - HashCode hashCode = HASH_FUNCTION.getHashFunction().hashBytes(contentsBytes); + for (Map.Entry entry : localContentMap.entrySet()) { + TreeFileArtifact child = + TreeFileArtifact.createTreeOutput(parent, PathFragment.create(entry.getKey())); + byte[] contents = entry.getValue().getBytes(UTF_8); + HashCode hashCode = HASH_FUNCTION.getHashFunction().hashBytes(contents); + FileArtifactValue childValue = FileArtifactValue.createForNormalFile( + hashCode.asBytes(), /* proxy= */ null, contents.length); + treeBuilder.putChild(child, childValue); + metadata.put(child, childValue); + cas.put(hashCode, contents); + } + for (Map.Entry entry : remoteContentMap.entrySet()) { TreeFileArtifact child = TreeFileArtifact.createTreeOutput(parent, PathFragment.create(entry.getKey())); + byte[] contents = entry.getValue().getBytes(UTF_8); + HashCode hashCode = HASH_FUNCTION.getHashFunction().hashBytes(contents); RemoteFileArtifactValue childValue = RemoteFileArtifactValue.create( - hashCode.asBytes(), contentsBytes.length, /* locationIndex= */ 1); + hashCode.asBytes(), contents.length, /* locationIndex= */ 1); treeBuilder.putChild(child, childValue); metadata.put(child, childValue); - cas.put(hashCode, contentsBytes); + cas.put(hashCode, contents); } if (materializationExecPath != null) { treeBuilder.setMaterializationExecPath(materializationExecPath); } TreeArtifactValue treeValue = treeBuilder.build(); + metadata.put(parent, treeValue.getMetadata()); + return Pair.of(parent, treeValue.getChildren().asList()); } protected Pair> createRemoteTreeArtifact( String pathFragment, - Map contentMap, + Map localContentMap, + Map remoteContentMap, Map metadata, Map cas) throws IOException { return createRemoteTreeArtifact( - pathFragment, contentMap, /* materializationExecPath= */ null, metadata, cas); + pathFragment, localContentMap, remoteContentMap, /* materializationExecPath= */ null, + metadata, cas); } protected abstract AbstractActionInputPrefetcher createPrefetcher(Map cas); @@ -215,7 +237,7 @@ public void prefetchFiles_downloadRemoteFiles() throws Exception { } @Test - public void prefetchFiles_downloadRemoteFiles_withmaterializationExecPath() throws Exception { + public void prefetchFiles_downloadRemoteFiles_withMaterializationExecPath() throws Exception { Map metadata = new HashMap<>(); Map cas = new HashMap<>(); PathFragment targetExecPath = artifactRoot.getExecPath().getChild("target"); @@ -238,64 +260,98 @@ public void prefetchFiles_downloadRemoteFiles_withmaterializationExecPath() thro public void prefetchFiles_downloadRemoteTrees() throws Exception { Map metadata = new HashMap<>(); Map cas = new HashMap<>(); - Pair> tree = + Pair> treeAndChildren = createRemoteTreeArtifact( "dir", + /* localContents= */ ImmutableMap.of(), + /* remoteContents= */ ImmutableMap.of("file1", "content1", "nested_dir/file2", "content2"), metadata, cas); - SpecialArtifact parent = tree.getFirst(); - Artifact firstChild = tree.getSecond().get(0); - Artifact secondChild = tree.getSecond().get(1); + SpecialArtifact tree = treeAndChildren.getFirst(); + ImmutableList children = treeAndChildren.getSecond(); + Artifact firstChild = children.get(0); + Artifact secondChild = children.get(1); MetadataProvider metadataProvider = new StaticMetadataProvider(metadata); AbstractActionInputPrefetcher prefetcher = createPrefetcher(cas); - wait(prefetcher.prefetchFiles(tree.getSecond(), metadataProvider)); + wait(prefetcher.prefetchFiles(children, metadataProvider)); - assertReadableNonWritableAndExecutable(parent.getPath()); assertThat(FileSystemUtils.readContent(firstChild.getPath(), UTF_8)).isEqualTo("content1"); - assertReadableNonWritableAndExecutable(firstChild.getPath()); assertThat(FileSystemUtils.readContent(secondChild.getPath(), UTF_8)).isEqualTo("content2"); - assertReadableNonWritableAndExecutable(secondChild.getPath()); - assertThat(prefetcher.downloadedFiles()).containsExactly(parent.getPath()); + + assertTreeReadableNonWritableAndExecutable(tree.getPath()); + + assertThat(prefetcher.downloadedFiles()).containsExactly(firstChild.getPath(), + secondChild.getPath()); assertThat(prefetcher.downloadsInProgress()).isEmpty(); - assertReadableNonWritableAndExecutable(parent.getPath().getRelative("nested_dir")); } @Test - public void prefetchFiles_downloadRemoteTrees_withmaterializationExecPath() throws Exception { + public void prefetchFiles_downloadRemoteTrees_partial() throws Exception { + Map metadata = new HashMap<>(); + Map cas = new HashMap<>(); + Pair> treeAndChildren = + createRemoteTreeArtifact( + "dir", + /* localContents= */ ImmutableMap.of("file1", "content1"), + /* remoteContents= */ ImmutableMap.of("file2", "content2"), + metadata, + cas); + SpecialArtifact tree = treeAndChildren.getFirst(); + ImmutableList children = treeAndChildren.getSecond(); + Artifact firstChild = children.get(0); + Artifact secondChild = children.get(1); + + MetadataProvider metadataProvider = new StaticMetadataProvider(metadata); + AbstractActionInputPrefetcher prefetcher = createPrefetcher(cas); + + wait(prefetcher.prefetchFiles(ImmutableList.of(firstChild, secondChild), metadataProvider)); + + assertThat(firstChild.getPath().exists()).isFalse(); + assertThat(FileSystemUtils.readContent(secondChild.getPath(), UTF_8)).isEqualTo("content2"); + assertTreeReadableNonWritableAndExecutable(tree.getPath()); + assertThat(prefetcher.downloadedFiles()).containsExactly(secondChild.getPath()); + } + + @Test + public void prefetchFiles_downloadRemoteTrees_withMaterializationExecPath() throws Exception { Map metadata = new HashMap<>(); Map cas = new HashMap<>(); PathFragment targetExecPath = artifactRoot.getExecPath().getChild("target"); - Pair> tree = + Pair> treeAndChildren = createRemoteTreeArtifact( "dir", + /* localContents= */ ImmutableMap.of(), + /* remoteContents= */ ImmutableMap.of("file1", "content1", "nested_dir/file2", "content2"), targetExecPath, metadata, cas); - SpecialArtifact parent = tree.getFirst(); - Artifact firstChild = tree.getSecond().get(0); - Artifact secondChild = tree.getSecond().get(1); + SpecialArtifact tree = treeAndChildren.getFirst(); + ImmutableList children = treeAndChildren.getSecond(); + Artifact firstChild = children.get(0); + Artifact secondChild = children.get(1); MetadataProvider metadataProvider = new StaticMetadataProvider(metadata); AbstractActionInputPrefetcher prefetcher = createPrefetcher(cas); - wait(prefetcher.prefetchFiles(tree.getSecond(), metadataProvider)); + wait(prefetcher.prefetchFiles(children, metadataProvider)); - assertThat(parent.getPath().isSymbolicLink()).isTrue(); - assertThat(parent.getPath().readSymbolicLink()) + assertThat(tree.getPath().isSymbolicLink()).isTrue(); + assertThat(tree.getPath().readSymbolicLink()) .isEqualTo(execRoot.getRelative(targetExecPath).asFragment()); - assertReadableNonWritableAndExecutable(parent.getPath()); assertThat(FileSystemUtils.readContent(firstChild.getPath(), UTF_8)).isEqualTo("content1"); - assertReadableNonWritableAndExecutable(firstChild.getPath()); assertThat(FileSystemUtils.readContent(secondChild.getPath(), UTF_8)).isEqualTo("content2"); - assertReadableNonWritableAndExecutable(secondChild.getPath()); - assertThat(prefetcher.downloadedFiles()) - .containsExactly(parent.getPath(), execRoot.getRelative(targetExecPath)); + + assertTreeReadableNonWritableAndExecutable(execRoot.getRelative(targetExecPath)); + + assertThat(prefetcher.downloadedFiles()).containsExactly( + tree.getPath(), + execRoot.getRelative(targetExecPath.getRelative(firstChild.getParentRelativePath())), + execRoot.getRelative(targetExecPath.getRelative(secondChild.getParentRelativePath()))); assertThat(prefetcher.downloadsInProgress()).isEmpty(); - assertReadableNonWritableAndExecutable(parent.getPath().getRelative("nested_dir")); } @Test @@ -315,7 +371,7 @@ public void prefetchFiles_missingFiles_fails() throws Exception { @Test public void prefetchFiles_ignoreNonRemoteFiles() throws Exception { - // Test that files that are not remote are not downloaded + // Test that non-remote files are not downloaded. Path p = execRoot.getRelative(artifactRoot.getExecPath()).getRelative("file1"); FileSystemUtils.writeContent(p, UTF_8, "hello world"); @@ -330,6 +386,35 @@ public void prefetchFiles_ignoreNonRemoteFiles() throws Exception { assertThat(prefetcher.downloadsInProgress()).isEmpty(); } + @Test + public void prefetchFiles_ignoreNonRemoteFiles_tree() throws Exception { + // Test that non-remote tree files are not downloaded, but other files in the tree are. + + Map metadata = new HashMap<>(); + Map cas = new HashMap<>(); + Pair> treeAndChildren = + createRemoteTreeArtifact( + "dir", + ImmutableMap.of("file1", "content1"), + ImmutableMap.of("file2", "content2"), + metadata, + cas); + SpecialArtifact tree = treeAndChildren.getFirst(); + ImmutableList children = treeAndChildren.getSecond(); + Artifact firstChild = children.get(0); + Artifact secondChild = children.get(1); + + MetadataProvider metadataProvider = new StaticMetadataProvider(metadata); + AbstractActionInputPrefetcher prefetcher = createPrefetcher(cas); + + wait(prefetcher.prefetchFiles(children, metadataProvider)); + + assertThat(firstChild.getPath().exists()).isFalse(); + assertThat(FileSystemUtils.readContent(secondChild.getPath(), UTF_8)).isEqualTo("content2"); + assertTreeReadableNonWritableAndExecutable(tree.getPath()); + assertThat(prefetcher.downloadedFiles()).containsExactly(secondChild.getPath()); + } + @Test public void prefetchFiles_multipleThreads_downloadIsCancelled() throws Exception { // Test shared downloads are cancelled if all threads/callers are interrupted @@ -544,8 +629,18 @@ protected static void mockDownload( } private void assertReadableNonWritableAndExecutable(Path path) throws IOException { - assertThat(path.isReadable()).isTrue(); - assertThat(path.isWritable()).isFalse(); - assertThat(path.isExecutable()).isTrue(); + assertWithMessage(path + " should be readable").that(path.isReadable()).isTrue(); + assertWithMessage(path + " should not be writable").that(path.isWritable()).isFalse(); + assertWithMessage(path + " should be executable").that(path.isExecutable()).isTrue(); + } + + private void assertTreeReadableNonWritableAndExecutable(Path path) throws IOException { + checkState(path.isDirectory()); + assertReadableNonWritableAndExecutable(path); + for (Dirent dirent : path.readdir(Symlinks.NOFOLLOW)) { + if (dirent.getType().equals(Dirent.Type.DIRECTORY)) { + assertTreeReadableNonWritableAndExecutable(path.getChild(dirent.getName())); + } + } } }