Skip to content

Commit

Permalink
Fix fingerprint handling in pipelining mode
Browse files Browse the repository at this point in the history
This commit fixes an issue when pipelining mode is used in handling
recompilations. Previously a sequence of compilations could look like:

* Crate A starts to build
* Crate A produces metadata
* Crate B, which depends on A, starts
* Crate B finishes
* Crate A finishes

In this case the mtime for B is before that of A, which fooled Cargo
into thinking that B needed to be recompiled. In this case, however, B
doesn't actually need to be recompiled because it only depends on the
metadata of A, not the final artifacts.

This unfortunately resulted in some duplication in a few places, but not
really much moreso than already exists between fingerprinting and compilation.
  • Loading branch information
alexcrichton committed May 8, 2019
1 parent c2152f0 commit 6b28a0c
Show file tree
Hide file tree
Showing 10 changed files with 134 additions and 140 deletions.
2 changes: 1 addition & 1 deletion src/cargo/core/compiler/build_context/target_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ pub enum FileFlavor {
/// Not a special file type.
Normal,
/// Something you can link against (e.g., a library).
Linkable { rmeta: PathBuf },
Linkable { rmeta: bool },
/// Piece of external debug information (e.g., `.dSYM`/`.pdb` file).
DebugInfo,
}
Expand Down
16 changes: 12 additions & 4 deletions src/cargo/core/compiler/context/compilation_files.rs
Original file line number Diff line number Diff line change
Expand Up @@ -305,10 +305,10 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> {
// for both libraries and binaries.
let path = out_dir.join(format!("lib{}.rmeta", file_stem));
ret.push(OutputFile {
path: path.clone(),
path,
hardlink: None,
export_path: None,
flavor: FileFlavor::Linkable { rmeta: path },
flavor: FileFlavor::Linkable { rmeta: false },
});
} else {
let mut add = |crate_type: &str, flavor: FileFlavor| -> CargoResult<()> {
Expand Down Expand Up @@ -372,13 +372,21 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> {
add(
kind.crate_type(),
if kind.linkable() {
let rmeta = out_dir.join(format!("lib{}.rmeta", file_stem));
FileFlavor::Linkable { rmeta }
FileFlavor::Linkable { rmeta: false }
} else {
FileFlavor::Normal
},
)?;
}
let path = out_dir.join(format!("lib{}.rmeta", file_stem));
if !unit.target.requires_upstream_objects() {
ret.push(OutputFile {
path,
hardlink: None,
export_path: None,
flavor: FileFlavor::Linkable { rmeta: true },
});
}
}
}
}
Expand Down
94 changes: 59 additions & 35 deletions src/cargo/core/compiler/fingerprint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,7 @@
//! See the `A-rebuild-detection` flag on the issue tracker for more:
//! <https://github.com/rust-lang/cargo/issues?q=is%3Aissue+is%3Aopen+label%3AA-rebuild-detection>
use std::collections::HashMap;
use std::env;
use std::fs;
use std::hash::{self, Hasher};
Expand Down Expand Up @@ -322,6 +323,7 @@ struct DepFingerprint {
pkg_id: u64,
name: String,
public: bool,
only_requires_rmeta: bool,
fingerprint: Arc<Fingerprint>,
}

Expand Down Expand Up @@ -395,17 +397,15 @@ enum FsStatus {
/// unit needs to subsequently be recompiled.
Stale,

/// This unit is up-to-date, it does not need to be recompiled. If there are
/// any outputs then the `FileTime` listed here is the minimum of all their
/// mtimes. This is then later used to see if a unit is newer than one of
/// its dependants, causing the dependant to be recompiled.
UpToDate(Option<FileTime>),
/// This unit is up-to-date. All outputs and their corresponding mtime are
/// listed in the payload here for other dependencies to compare against.
UpToDate { mtimes: HashMap<PathBuf, FileTime> },
}

impl FsStatus {
fn up_to_date(&self) -> bool {
match self {
FsStatus::UpToDate(_) => true,
FsStatus::UpToDate { .. } => true,
FsStatus::Stale => false,
}
}
Expand Down Expand Up @@ -442,6 +442,7 @@ impl<'de> Deserialize<'de> for DepFingerprint {
pkg_id,
name,
public,
only_requires_rmeta: false,
fingerprint: Arc::new(Fingerprint {
memoized_hash: Mutex::new(Some(hash)),
..Fingerprint::new()
Expand Down Expand Up @@ -753,51 +754,71 @@ impl Fingerprint {
) -> CargoResult<()> {
assert!(!self.fs_status.up_to_date());

let mut mtimes = HashMap::new();

// Get the `mtime` of all outputs. Optionally update their mtime
// afterwards based on the `mtime_on_use` flag. Afterwards we want the
// minimum mtime as it's the one we'll be comparing to inputs and
// dependencies.
let status = self
.outputs
.iter()
.map(|f| {
let mtime = paths::mtime(f).ok();
if mtime_on_use {
let t = FileTime::from_system_time(SystemTime::now());
drop(filetime::set_file_times(f, t, t));
for output in self.outputs.iter() {
let mtime = match paths::mtime(output) {
Ok(mtime) => mtime,

// This path failed to report its `mtime`. It probably doesn't
// exists, so leave ourselves as stale and bail out.
Err(e) => {
log::debug!("failed to get mtime of {:?}: {}", output, e);
return Ok(());
}
mtime
})
.min();
};
if mtime_on_use {
let t = FileTime::from_system_time(SystemTime::now());
filetime::set_file_times(output, t, t)?;
}
assert!(mtimes.insert(output.clone(), mtime).is_none());
}

let max_mtime = match mtimes.values().max() {
Some(mtime) => mtime,

let mtime = match status {
// We had no output files. This means we're an overridden build
// script and we're just always up to date because we aren't
// watching the filesystem.
None => {
self.fs_status = FsStatus::UpToDate(None);
self.fs_status = FsStatus::UpToDate { mtimes };
return Ok(());
}

// At least one path failed to report its `mtime`. It probably
// doesn't exists, so leave ourselves as stale and bail out.
Some(None) => return Ok(()),

// All files successfully reported an `mtime`, and we've got the
// minimum one, so let's keep going with that.
Some(Some(mtime)) => mtime,
};

for dep in self.deps.iter() {
let dep_mtime = match dep.fingerprint.fs_status {
let dep_mtimes = match &dep.fingerprint.fs_status {
FsStatus::UpToDate { mtimes } => mtimes,
// If our dependency is stale, so are we, so bail out.
FsStatus::Stale => return Ok(()),
};

// If our dependencies is up to date and has no filesystem
// interactions, then we can move on to the next dependency.
FsStatus::UpToDate(None) => continue,

FsStatus::UpToDate(Some(mtime)) => mtime,
// If our dependency edge only requires the rmeta fiel to be present
// then we only need to look at that one output file, otherwise we
// need to consider all output files to see if we're out of date.
let dep_mtime = if dep.only_requires_rmeta {
dep_mtimes
.iter()
.filter_map(|(path, mtime)| {
if path.extension().and_then(|s| s.to_str()) == Some("rmeta") {
Some(mtime)
} else {
None
}
})
.next()
.expect("failed to find rmeta")
} else {
match dep_mtimes.values().max() {
Some(mtime) => mtime,
// If our dependencies is up to date and has no filesystem
// interactions, then we can move on to the next dependency.
None => continue,
}
};

// If the dependency is newer than our own output then it was
Expand All @@ -807,7 +828,8 @@ impl Fingerprint {
// Note that this comparison should probably be `>=`, not `>`, but
// for a discussion of why it's `>` see the discussion about #5918
// below in `find_stale`.
if dep_mtime > mtime {
if dep_mtime > max_mtime {
log::info!("dependency on `{}` is newer than we are", dep.name);
return Ok(());
}
}
Expand All @@ -824,7 +846,7 @@ impl Fingerprint {
}

// Everything was up to date! Record such.
self.fs_status = FsStatus::UpToDate(Some(mtime));
self.fs_status = FsStatus::UpToDate { mtimes };

Ok(())
}
Expand Down Expand Up @@ -856,6 +878,7 @@ impl hash::Hash for Fingerprint {
name,
public,
fingerprint,
only_requires_rmeta: _,
} in deps
{
pkg_id.hash(h);
Expand Down Expand Up @@ -929,6 +952,7 @@ impl DepFingerprint {
name,
public,
fingerprint,
only_requires_rmeta: cx.only_requires_rmeta(parent, dep),
})
}
}
Expand Down
53 changes: 34 additions & 19 deletions src/cargo/core/compiler/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,10 @@ use crate::core::profiles::{Lto, PanicStrategy, Profile};
use crate::core::Feature;
use crate::core::{PackageId, Target};
use crate::util::errors::{CargoResult, CargoResultExt, Internal, ProcessError};
use crate::util::machine_message::Message;
use crate::util::paths;
use crate::util::{self, machine_message, process, ProcessBuilder};
use crate::util::{internal, join_paths, profile};
use crate::util::machine_message::Message;

/// Indicates whether an object is for the host architcture or the target architecture.
///
Expand Down Expand Up @@ -498,7 +498,8 @@ fn link_targets<'a, 'cfg>(
filenames: destinations,
executable,
fresh,
}.to_json_string();
}
.to_json_string();
state.stdout(&msg);
}
Ok(())
Expand Down Expand Up @@ -1016,20 +1017,14 @@ fn build_deps_args<'a, 'cfg>(
need_unstable_opts: &mut bool,
) -> CargoResult<()> {
let bcx = cx.bcx;
for output in cx.outputs(dep)?.iter() {
let rmeta = match &output.flavor {
FileFlavor::Linkable { rmeta } => rmeta,
_ => continue,
};
let mut v = OsString::new();
let name = bcx.extern_crate_name(current, dep)?;
v.push(name);
v.push("=");
if cx.only_requires_rmeta(current, dep) {
v.push(&rmeta);
} else {
v.push(&output.path);
}

let mut value = OsString::new();
value.push(bcx.extern_crate_name(current, dep)?);
value.push("=");

let mut pass = |file| {
let mut value = value.clone();
value.push(file);

if current
.pkg
Expand All @@ -1045,7 +1040,26 @@ fn build_deps_args<'a, 'cfg>(
cmd.arg("--extern");
}

cmd.arg(&v);
cmd.arg(&value);
};

let outputs = cx.outputs(dep)?;
let mut outputs = outputs.iter().filter_map(|output| match output.flavor {
FileFlavor::Linkable { rmeta } => Some((output, rmeta)),
_ => None,
});

if cx.only_requires_rmeta(current, dep) {
let (output, _rmeta) = outputs
.find(|(_output, rmeta)| *rmeta)
.expect("failed to find rlib dep for pipelined dep");
pass(&output.path);
} else {
for (output, rmeta) in outputs {
if !rmeta {
pass(&output.path);
}
}
}
Ok(())
}
Expand Down Expand Up @@ -1146,7 +1160,7 @@ fn on_stderr_line(
log::debug!("looks like metadata finished early!");
state.rmeta_produced();
}
return Ok(())
return Ok(());
}
}

Expand All @@ -1157,7 +1171,8 @@ fn on_stderr_line(
package_id,
target,
message: compiler_message,
}.to_json_string();
}
.to_json_string();

// Switch json lines from rustc/rustdoc that appear on stderr to stdout
// instead. We want the stdout of Cargo to always be machine parseable as
Expand Down
5 changes: 1 addition & 4 deletions src/cargo/ops/cargo_clean.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use std::collections::HashMap;
use std::fs;
use std::path::Path;

use crate::core::compiler::{UnitInterner, FileFlavor};
use crate::core::compiler::UnitInterner;
use crate::core::compiler::{BuildConfig, BuildContext, CompileMode, Context, Kind};
use crate::core::profiles::UnitFor;
use crate::core::Workspace;
Expand Down Expand Up @@ -119,9 +119,6 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> {
if let Some(ref dst) = output.hardlink {
rm_rf(dst, config)?;
}
if let FileFlavor::Linkable { rmeta } = &output.flavor {
rm_rf(rmeta, config)?;
}
}
}

Expand Down
Loading

0 comments on commit 6b28a0c

Please sign in to comment.