in src/rust/engine/process_execution/src/remote.rs [625:733]
fn extract_output_files(
&self,
execute_response: &bazel_protos::remote_execution::ExecuteResponse,
) -> BoxFuture<Digest, ExecutionError> {
// Get Digests of output Directories.
// Then we'll make a Directory for the output files, and merge them.
let mut directory_digests =
Vec::with_capacity(execute_response.get_result().get_output_directories().len() + 1);
// TODO: Maybe take rather than clone
let output_directories = execute_response
.get_result()
.get_output_directories()
.to_owned();
for dir in output_directories {
let digest_result: Result<Digest, String> = dir.get_tree_digest().into();
let mut digest = future::done(digest_result).to_boxed();
for component in dir.get_path().rsplit('/') {
let component = component.to_owned();
let store = self.store.clone();
digest = digest
.and_then(move |digest| {
let mut directory = bazel_protos::remote_execution::Directory::new();
directory.mut_directories().push({
let mut node = bazel_protos::remote_execution::DirectoryNode::new();
node.set_name(component);
node.set_digest((&digest).into());
node
});
store.record_directory(&directory, true)
})
.to_boxed();
}
directory_digests.push(digest.map_err(|err| {
ExecutionError::Fatal(format!("Error saving remote output directory: {}", err))
}));
}
// Make a directory for the files
let mut path_map = HashMap::new();
let path_stats_result: Result<Vec<PathStat>, String> = execute_response
.get_result()
.get_output_files()
.iter()
.map(|output_file| {
let output_file_path_buf = PathBuf::from(output_file.get_path());
let digest: Result<Digest, String> = output_file.get_digest().into();
path_map.insert(output_file_path_buf.clone(), digest?);
Ok(PathStat::file(
output_file_path_buf.clone(),
File {
path: output_file_path_buf,
is_executable: output_file.get_is_executable(),
},
))
})
.collect();
let path_stats = try_future!(path_stats_result.map_err(ExecutionError::Fatal));
#[derive(Clone)]
struct StoreOneOffRemoteDigest {
map_of_paths_to_digests: HashMap<PathBuf, Digest>,
}
impl StoreOneOffRemoteDigest {
fn new(map: HashMap<PathBuf, Digest>) -> StoreOneOffRemoteDigest {
StoreOneOffRemoteDigest {
map_of_paths_to_digests: map,
}
}
}
impl fs::StoreFileByDigest<String> for StoreOneOffRemoteDigest {
fn store_by_digest(&self, file: File) -> BoxFuture<Digest, String> {
match self.map_of_paths_to_digests.get(&file.path) {
Some(digest) => future::ok(*digest),
None => future::err(format!(
"Didn't know digest for path in remote execution response: {:?}",
file.path
)),
}
.to_boxed()
}
}
let store = self.store.clone();
fs::Snapshot::digest_from_path_stats(
self.store.clone(),
&StoreOneOffRemoteDigest::new(path_map),
&path_stats,
)
.map_err(move |error| {
ExecutionError::Fatal(format!(
"Error when storing the output file directory info in the remote CAS: {:?}",
error
))
})
.join(future::join_all(directory_digests))
.and_then(|(files_digest, mut directory_digests)| {
directory_digests.push(files_digest);
fs::Snapshot::merge_directories(store, directory_digests).map_err(|err| {
ExecutionError::Fatal(format!(
"Error when merging output files and directories: {}",
err
))
})
})
.to_boxed()
}