Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

uv-resolver: filter out sibling dependencies in a fork #4415

Merged
merged 3 commits into from
Jun 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions crates/uv-resolver/src/pubgrub/package.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,34 @@ impl PubGrubPackage {
}))
}
}

/// Returns the name of this PubGrub package, if it has one.
pub(crate) fn name(&self) -> Option<&PackageName> {
match &**self {
// A root can never be a dependency of another package, and a `Python` pubgrub
// package is never returned by `get_dependencies`. So these cases never occur.
PubGrubPackageInner::Root(None) | PubGrubPackageInner::Python(_) => None,
PubGrubPackageInner::Root(Some(name))
| PubGrubPackageInner::Package { name, .. }
| PubGrubPackageInner::Extra { name, .. }
| PubGrubPackageInner::Dev { name, .. }
| PubGrubPackageInner::Marker { name, .. } => Some(name),
}
}

/// Returns the marker expression associated with this PubGrub package, if
/// it has one.
pub(crate) fn marker(&self) -> Option<&MarkerTree> {
match &**self {
// A root can never be a dependency of another package, and a `Python` pubgrub
// package is never returned by `get_dependencies`. So these cases never occur.
PubGrubPackageInner::Root(_) | PubGrubPackageInner::Python(_) => None,
PubGrubPackageInner::Package { marker, .. }
| PubGrubPackageInner::Extra { marker, .. }
| PubGrubPackageInner::Dev { marker, .. } => marker.as_ref(),
PubGrubPackageInner::Marker { marker, .. } => Some(marker),
}
}
}

#[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Hash, Ord)]
Expand Down
117 changes: 97 additions & 20 deletions crates/uv-resolver/src/resolver/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1921,15 +1921,12 @@ impl Dependencies {

let mut by_name: FxHashMap<&PackageName, PossibleForks> = FxHashMap::default();
for (index, (ref pkg, _)) in deps.iter().enumerate() {
let (name, marker) = match &**pkg {
// A root can never be a dependency of another package, and a `Python` pubgrub
// package is never returned by `get_dependencies`. So these cases never occur.
PubGrubPackageInner::Root(_) | PubGrubPackageInner::Python(_) => unreachable!(),
PubGrubPackageInner::Package { name, marker, .. }
| PubGrubPackageInner::Extra { name, marker, .. }
| PubGrubPackageInner::Dev { name, marker, .. } => (name, marker.as_ref()),
PubGrubPackageInner::Marker { name, marker, .. } => (name, Some(marker)),
};
// A root can never be a dependency of another package,
// and a `Python` pubgrub package is never returned by
// `get_dependencies`. So a pubgrub package always has a
// name in this context.
let name = pkg.name().expect("dependency always has a name");
let marker = pkg.marker();
let Some(marker) = marker else {
// When no marker is found, it implies there is a dependency on
// this package that is unconditional with respect to marker
Expand Down Expand Up @@ -2009,32 +2006,28 @@ impl Dependencies {
}];
let mut diverging_packages = Vec::new();
for (name, possible_forks) in by_name {
let fork_groups = match possible_forks {
let fork_groups = match possible_forks.finish() {
// 'finish()' guarantees that 'PossiblyForking' implies
// 'DefinitelyForking'.
PossibleForks::PossiblyForking(fork_groups) => fork_groups,
PossibleForks::NoForkPossible(indices) => {
// No fork is provoked by this package, so just add
// everything in this group to each of the forks.
for index in indices {
for fork in &mut forks {
fork.dependencies.push(deps[index].clone());
fork.add_nonfork_package(deps[index].clone());
}
}
continue;
}
};
assert!(fork_groups.forks.len() >= 2, "expected definitive fork");
let mut new_forks: Vec<Fork> = vec![];
for group in fork_groups.forks {
let mut new_forks_for_group = forks.clone();
for (index, markers) in group.packages {
for (index, _) in group.packages {
for fork in &mut new_forks_for_group {
fork.dependencies.push(deps[index].clone());
// Each marker expression in a single fork is,
// by construction, overlapping with at least
// one other marker expression in this fork.
// However, the symmetric differences may be
// non-empty. Therefore, the markers need to be
// combined on the corresponding fork
fork.markers.and(markers.clone());
fork.add_forked_package(deps[index].clone());
}
}
new_forks.extend(new_forks_for_group);
Expand Down Expand Up @@ -2089,6 +2082,11 @@ struct Fork {
/// The list of dependencies for this fork, guaranteed to be conflict
/// free. (i.e., There are no two packages with the same name with
/// non-overlapping marker expressions.)
///
/// Note that callers shouldn't mutate this sequence directly. Instead,
/// they should use `add_forked_package` or `add_nonfork_package`. Namely,
/// it should be impossible for a package with a marker expression that is
/// disjoint from the marker expression on this fork to be added.
dependencies: Vec<(PubGrubPackage, Range<Version>)>,
/// The markers that provoked this fork.
///
Expand All @@ -2100,6 +2098,68 @@ struct Fork {
markers: MarkerTree,
}

impl Fork {
/// Add the given dependency to this fork with the assumption that it
/// provoked this fork into existence.
///
/// In particular, the markers given should correspond to the markers
/// associated with that dependency, and they are combined (via
/// conjunction) with the markers on this fork.
///
/// Finally, and critically, any dependencies that are already in this
/// fork that are disjoint with the markers given are removed. This is
/// because a fork provoked by the given marker should not have packages
/// whose markers are disjoint with it. While it might seem harmless, this
/// can cause the resolver to explore otherwise impossible resolutions,
/// and also run into conflicts (and thus a failed resolution) that don't
/// actually exist.
fn add_forked_package(&mut self, (pkg, range): (PubGrubPackage, Range<Version>)) {
// OK because a package without a marker is unconditional and
// thus can never provoke a fork.
let marker = pkg
.marker()
.cloned()
.expect("forked package always has a marker");
self.remove_disjoint_packages(&marker);
self.dependencies.push((pkg, range));
// Each marker expression in a single fork is,
// by construction, overlapping with at least
// one other marker expression in this fork.
// However, the symmetric differences may be
// non-empty. Therefore, the markers need to be
// combined on the corresponding fork.
self.markers.and(marker);
}

/// Add the given dependency to this fork.
///
/// This works by assuming the given package did *not* provoke a fork.
///
/// It is only added if the markers on the given package are not disjoint
/// with this fork's markers.
fn add_nonfork_package(&mut self, (pkg, range): (PubGrubPackage, Range<Version>)) {
use crate::marker::is_disjoint;

if pkg
.marker()
.map_or(true, |marker| !is_disjoint(marker, &self.markers))
{
self.dependencies.push((pkg, range));
}
}

/// Removes any dependencies in this fork whose markers are disjoint with
/// the given markers.
fn remove_disjoint_packages(&mut self, fork_marker: &MarkerTree) {
use crate::marker::is_disjoint;

self.dependencies.retain(|(pkg, _)| {
pkg.marker()
.map_or(true, |pkg_marker| !is_disjoint(pkg_marker, fork_marker))
});
}
}

/// Intermediate state that represents a *possible* grouping of forks
/// for one package name.
///
Expand Down Expand Up @@ -2138,6 +2198,23 @@ impl<'a> PossibleForks<'a> {
fork_groups.forks.len() > 1
}

/// Consumes this possible set of forks and converts a "possibly forking"
/// variant to a "no fork possible" variant if there are no actual forks.
///
/// This should be called when all dependencies for one package have been
/// considered. It will normalize this value such that `PossiblyForking`
/// means `DefinitelyForking`.
fn finish(mut self) -> PossibleForks<'a> {
let PossibleForks::PossiblyForking(ref fork_groups) = self else {
return self;
};
if fork_groups.forks.len() == 1 {
self.make_no_forks_possible();
return self;
}
self
}

/// Pushes an unconditional index to a package.
///
/// If this previously contained possible forks, those are combined into
Expand Down
2 changes: 1 addition & 1 deletion crates/uv/tests/common/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ pub static EXCLUDE_NEWER: &str = "2024-03-25T00:00:00Z";
/// Using a find links url allows using `--index-url` instead of `--extra-index-url` in tests
/// to prevent dependency confusion attacks against our test suite.
pub const BUILD_VENDOR_LINKS_URL: &str =
"https://raw.githubusercontent.com/astral-sh/packse/0.3.27/vendor/links.html";
"https://raw.githubusercontent.com/astral-sh/packse/0.3.29/vendor/links.html";

#[doc(hidden)] // Macro and test context only, don't use directly.
pub const INSTA_FILTERS: &[(&str, &str)] = &[
Expand Down
Loading
Loading