Skip to content

Commit

Permalink
fix(resolver): Remove unused public-deps error handling
Browse files Browse the repository at this point in the history
To implement rust-lang/rfcs#3516, we need to decouple the resolver's
behavior from the unstable flag.  Since the code path is now dead, I
went ahead and removed it.
  • Loading branch information
epage committed Nov 22, 2023
1 parent 463f307 commit eea1a7c
Show file tree
Hide file tree
Showing 5 changed files with 10 additions and 553 deletions.
71 changes: 0 additions & 71 deletions crates/resolver-tests/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,6 @@ pub fn resolve_with_config_raw(
&mut registry,
&version_prefs,
Some(config),
true,
);

// The largest test in our suite takes less then 30 sec.
Expand Down Expand Up @@ -295,13 +294,6 @@ impl SatResolve {
);

// no two semver compatible versions of the same package
let by_activations_keys = sat_at_most_one_by_key(
&mut cnf,
var_for_is_packages_used
.iter()
.map(|(p, &v)| (p.as_activations_key(), v)),
);

let mut by_name: HashMap<&'static str, Vec<PackageId>> = HashMap::new();

for p in registry.iter() {
Expand Down Expand Up @@ -363,69 +355,6 @@ impl SatResolve {
}
}

let topological_order = graph.sort();

// we already ensure there is only one version for each `activations_key` so we can think of
// `publicly_exports` as being in terms of a set of `activations_key`s
let mut publicly_exports: HashMap<_, HashMap<_, varisat::Var>> = HashMap::new();

for &key in by_activations_keys.keys() {
// everything publicly depends on itself
let var = publicly_exports
.entry(key)
.or_default()
.entry(key)
.or_insert_with(|| cnf.new_var());
cnf.add_clause(&[var.positive()]);
}

// if a `dep` is public then `p` `publicly_exports` all the things that the selected version `publicly_exports`
for &p in topological_order.iter() {
if let Some(deps) = version_selected_for.get(&p) {
let mut p_exports = publicly_exports.remove(&p.as_activations_key()).unwrap();
for (_, versions) in deps.iter().filter(|(d, _)| d.is_public()) {
for (ver, sel) in versions {
for (&export_pid, &export_var) in publicly_exports[ver].iter() {
let our_var =
p_exports.entry(export_pid).or_insert_with(|| cnf.new_var());
cnf.add_clause(&[
sel.negative(),
export_var.negative(),
our_var.positive(),
]);
}
}
}
publicly_exports.insert(p.as_activations_key(), p_exports);
}
}

// we already ensure there is only one version for each `activations_key` so we can think of
// `can_see` as being in terms of a set of `activations_key`s
// and if `p` `publicly_exports` `export` then it `can_see` `export`
let mut can_see: HashMap<_, HashMap<_, varisat::Var>> = HashMap::new();

// if `p` has a `dep` that selected `ver` then it `can_see` all the things that the selected version `publicly_exports`
for (&p, deps) in version_selected_for.iter() {
let p_can_see = can_see.entry(p).or_default();
for (_, versions) in deps.iter() {
for (&ver, sel) in versions {
for (&export_pid, &export_var) in publicly_exports[&ver].iter() {
let our_var = p_can_see.entry(export_pid).or_insert_with(|| cnf.new_var());
cnf.add_clause(&[
sel.negative(),
export_var.negative(),
our_var.positive(),
]);
}
}
}
}

// a package `can_see` only one version by each name
for (_, see) in can_see.iter() {
sat_at_most_one_by_key(&mut cnf, see.iter().map(|((name, _, _), &v)| (name, v)));
}
let mut solver = varisat::Solver::new();
solver.add_formula(&cnf);

Expand Down
225 changes: 2 additions & 223 deletions crates/resolver-tests/tests/resolve.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ use cargo::util::Config;
use cargo_util::is_ci;

use resolver_tests::{
assert_contains, assert_same, dep, dep_kind, dep_loc, dep_req, dep_req_kind, loc_names, names,
pkg, pkg_id, pkg_loc, registry, registry_strategy, remove_dep, resolve, resolve_and_validated,
assert_contains, assert_same, dep, dep_kind, dep_loc, dep_req, loc_names, names, pkg, pkg_id,
pkg_loc, registry, registry_strategy, remove_dep, resolve, resolve_and_validated,
resolve_with_config, PrettyPrintRegistry, SatResolve, ToDep, ToPkgId,
};

Expand Down Expand Up @@ -287,192 +287,6 @@ proptest! {
}
}

#[test]
#[should_panic(expected = "pub dep")] // The error handling is not yet implemented.
fn pub_fail() {
let input = vec![
pkg!(("a", "0.0.4")),
pkg!(("a", "0.0.5")),
pkg!(("e", "0.0.6") => [dep_req_kind("a", "<= 0.0.4", DepKind::Normal, true),]),
pkg!(("kB", "0.0.3") => [dep_req("a", ">= 0.0.5"),dep("e"),]),
];
let reg = registry(input);
assert!(resolve_and_validated(vec![dep("kB")], &reg, None).is_err());
}

#[test]
fn basic_public_dependency() {
let reg = registry(vec![
pkg!(("A", "0.1.0")),
pkg!(("A", "0.2.0")),
pkg!("B" => [dep_req_kind("A", "0.1", DepKind::Normal, true)]),
pkg!("C" => [dep("A"), dep("B")]),
]);

let res = resolve_and_validated(vec![dep("C")], &reg, None).unwrap();
assert_same(
&res,
&names(&[
("root", "1.0.0"),
("C", "1.0.0"),
("B", "1.0.0"),
("A", "0.1.0"),
]),
);
}

#[test]
fn public_dependency_filling_in() {
// The resolver has an optimization where if a candidate to resolve a dependency
// has already bean activated then we skip looking at the candidates dependencies.
// However, we have to be careful as the new path may make pub dependencies invalid.

// Triggering this case requires dependencies to be resolved in a specific order.
// Fuzzing found this unintuitive case, that triggers this unfortunate order of operations:
// 1. `d`'s dep on `c` is resolved
// 2. `d`'s dep on `a` is resolved with `0.1.1`
// 3. `c`'s dep on `b` is resolved with `0.0.2`
// 4. `b`'s dep on `a` is resolved with `0.0.6` no pub dev conflict as `b` is private to `c`
// 5. `d`'s dep on `b` is resolved with `0.0.2` triggering the optimization.
// Do we notice that `d` has a pub dep conflict on `a`? Lets try it and see.
let reg = registry(vec![
pkg!(("a", "0.0.6")),
pkg!(("a", "0.1.1")),
pkg!(("b", "0.0.0") => [dep("bad")]),
pkg!(("b", "0.0.1") => [dep("bad")]),
pkg!(("b", "0.0.2") => [dep_req_kind("a", "=0.0.6", DepKind::Normal, true)]),
pkg!("c" => [dep_req("b", ">=0.0.1")]),
pkg!("d" => [dep("c"), dep("a"), dep("b")]),
]);

let res = resolve_and_validated(vec![dep("d")], &reg, None).unwrap();
assert_same(
&res,
&names(&[
("root", "1.0.0"),
("d", "1.0.0"),
("c", "1.0.0"),
("b", "0.0.2"),
("a", "0.0.6"),
]),
);
}

#[test]
fn public_dependency_filling_in_and_update() {
// The resolver has an optimization where if a candidate to resolve a dependency
// has already bean activated then we skip looking at the candidates dependencies.
// However, we have to be careful as the new path may make pub dependencies invalid.

// Triggering this case requires dependencies to be resolved in a specific order.
// Fuzzing found this unintuitive case, that triggers this unfortunate order of operations:
// 1. `D`'s dep on `B` is resolved
// 2. `D`'s dep on `C` is resolved
// 3. `B`'s dep on `A` is resolved with `0.0.0`
// 4. `C`'s dep on `B` triggering the optimization.
// So did we add `A 0.0.0` to the deps `C` can see?
// Or are we going to resolve `C`'s dep on `A` with `0.0.2`?
// Lets try it and see.
let reg = registry(vec![
pkg!(("A", "0.0.0")),
pkg!(("A", "0.0.2")),
pkg!("B" => [dep_req_kind("A", "=0.0.0", DepKind::Normal, true),]),
pkg!("C" => [dep("A"),dep("B")]),
pkg!("D" => [dep("B"),dep("C")]),
]);
let res = resolve_and_validated(vec![dep("D")], &reg, None).unwrap();
assert_same(
&res,
&names(&[
("root", "1.0.0"),
("D", "1.0.0"),
("C", "1.0.0"),
("B", "1.0.0"),
("A", "0.0.0"),
]),
);
}

#[test]
fn public_dependency_skipping() {
// When backtracking due to a failed dependency, if Cargo is
// trying to be clever and skip irrelevant dependencies, care must
// the effects of pub dep must be accounted for.
let input = vec![
pkg!(("a", "0.2.0")),
pkg!(("a", "2.0.0")),
pkg!(("b", "0.0.0") => [dep("bad")]),
pkg!(("b", "0.2.1") => [dep_req_kind("a", "0.2.0", DepKind::Normal, true)]),
pkg!("c" => [dep("a"),dep("b")]),
];
let reg = registry(input);

resolve_and_validated(vec![dep("c")], &reg, None).unwrap();
}

#[test]
fn public_dependency_skipping_in_backtracking() {
// When backtracking due to a failed dependency, if Cargo is
// trying to be clever and skip irrelevant dependencies, care must
// the effects of pub dep must be accounted for.
let input = vec![
pkg!(("A", "0.0.0") => [dep("bad")]),
pkg!(("A", "0.0.1") => [dep("bad")]),
pkg!(("A", "0.0.2") => [dep("bad")]),
pkg!(("A", "0.0.3") => [dep("bad")]),
pkg!(("A", "0.0.4")),
pkg!(("A", "0.0.5")),
pkg!("B" => [dep_req_kind("A", ">= 0.0.3", DepKind::Normal, true)]),
pkg!("C" => [dep_req("A", "<= 0.0.4"), dep("B")]),
];
let reg = registry(input);

resolve_and_validated(vec![dep("C")], &reg, None).unwrap();
}

#[test]
fn public_sat_topological_order() {
let input = vec![
pkg!(("a", "0.0.1")),
pkg!(("a", "0.0.0")),
pkg!(("b", "0.0.1") => [dep_req_kind("a", "= 0.0.1", DepKind::Normal, true),]),
pkg!(("b", "0.0.0") => [dep("bad"),]),
pkg!("A" => [dep_req("a", "= 0.0.0"),dep_req_kind("b", "*", DepKind::Normal, true)]),
];

let reg = registry(input);
assert!(resolve_and_validated(vec![dep("A")], &reg, None).is_err());
}

#[test]
fn public_sat_unused_makes_things_pub() {
let input = vec![
pkg!(("a", "0.0.1")),
pkg!(("a", "0.0.0")),
pkg!(("b", "8.0.1") => [dep_req_kind("a", "= 0.0.1", DepKind::Normal, true),]),
pkg!(("b", "8.0.0") => [dep_req("a", "= 0.0.1"),]),
pkg!("c" => [dep_req("b", "= 8.0.0"),dep_req("a", "= 0.0.0"),]),
];
let reg = registry(input);

resolve_and_validated(vec![dep("c")], &reg, None).unwrap();
}

#[test]
fn public_sat_unused_makes_things_pub_2() {
let input = vec![
pkg!(("c", "0.0.2")),
pkg!(("c", "0.0.1")),
pkg!(("a-sys", "0.0.2")),
pkg!(("a-sys", "0.0.1") => [dep_req_kind("c", "= 0.0.1", DepKind::Normal, true),]),
pkg!("P" => [dep_req_kind("a-sys", "*", DepKind::Normal, true),dep_req("c", "= 0.0.1"),]),
pkg!("A" => [dep("P"),dep_req("c", "= 0.0.2"),]),
];
let reg = registry(input);

resolve_and_validated(vec![dep("A")], &reg, None).unwrap();
}

#[test]
#[should_panic(expected = "assertion failed: !name.is_empty()")]
fn test_dependency_with_empty_name() {
Expand Down Expand Up @@ -1115,41 +929,6 @@ fn resolving_with_constrained_sibling_backtrack_activation() {
);
}

#[test]
fn resolving_with_public_constrained_sibling() {
// It makes sense to resolve most-constrained deps first, but
// with that logic the backtrack traps here come between the two
// attempted resolutions of 'constrained'. When backtracking,
// cargo should skip past them and resume resolution once the
// number of activations for 'constrained' changes.
let mut reglist = vec![
pkg!(("foo", "1.0.0") => [dep_req("bar", "=1.0.0"),
dep_req("backtrack_trap1", "1.0"),
dep_req("backtrack_trap2", "1.0"),
dep_req("constrained", "<=60")]),
pkg!(("bar", "1.0.0") => [dep_req_kind("constrained", ">=60", DepKind::Normal, true)]),
];
// Bump these to make the test harder, but you'll also need to
// change the version constraints on `constrained` above. To correctly
// exercise Cargo, the relationship between the values is:
// NUM_CONSTRAINED - vsn < NUM_TRAPS < vsn
// to make sure the traps are resolved between `constrained`.
const NUM_TRAPS: usize = 45; // min 1
const NUM_CONSTRAINED: usize = 100; // min 1
for i in 0..NUM_TRAPS {
let vsn = format!("1.0.{}", i);
reglist.push(pkg!(("backtrack_trap1", vsn.clone())));
reglist.push(pkg!(("backtrack_trap2", vsn.clone())));
}
for i in 0..NUM_CONSTRAINED {
let vsn = format!("{}.0.0", i);
reglist.push(pkg!(("constrained", vsn.clone())));
}
let reg = registry(reglist);

let _ = resolve_and_validated(vec![dep_req("foo", "1")], &reg, None);
}

#[test]
fn resolving_with_constrained_sibling_transitive_dep_effects() {
// When backtracking due to a failed dependency, if Cargo is
Expand Down
Loading

0 comments on commit eea1a7c

Please sign in to comment.