Plumb source-aware dependency edges through forks

This commit is contained in:
Charlie Marsh
2026-03-12 19:38:07 -04:00
parent 8d7cee6601
commit 28b5217ab1
10 changed files with 2562 additions and 95 deletions
@@ -188,6 +188,7 @@ impl BuildRequires {
.unwrap_or(&empty);
// Lower the requirements.
let project_name = metadata.name.clone();
let requires_dist = metadata.requires_dist.into_iter();
let requires_dist = requires_dist
.flat_map(|requirement| {
@@ -201,7 +202,7 @@ impl BuildRequires {
LoweredRequirement::from_requirement(
requirement,
None,
project_name.as_ref(),
workspace.install_path(),
project_sources,
project_indexes,
+116 -29
View File
@@ -7,18 +7,19 @@ use pubgrub::Ranges;
use uv_distribution_types::{IndexMetadata, Requirement, RequirementSource};
use uv_normalize::{ExtraName, GroupName, PackageName};
use uv_pep440::{Version, VersionSpecifiers};
use uv_pep508::RequirementOrigin;
use uv_pep508::{MarkerTree, RequirementOrigin};
use uv_pypi_types::{
ConflictItemRef, Conflicts, ParsedArchiveUrl, ParsedDirectoryUrl, ParsedGitUrl, ParsedPathUrl,
ParsedUrl, VerbatimParsedUrl,
ConflictItem, ConflictItemRef, Conflicts, ParsedArchiveUrl, ParsedDirectoryUrl, ParsedGitUrl,
ParsedPathUrl, ParsedUrl, VerbatimParsedUrl,
};
use crate::pubgrub::{PubGrubPackage, PubGrubPackageInner};
use crate::universal_marker::UniversalMarker;
/// The source constraint carried by a single dependency edge.
///
/// Most dependency edges are source-agnostic and use [`DependencySource::Unspecified`]. Direct
/// URLs and group-scoped explicit indexes use a concrete source so fork construction can keep
/// URLs and source-scoped explicit indexes use a concrete source so fork construction can keep
/// that source information attached to the edge that introduced it.
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub(crate) enum DependencySource {
@@ -28,7 +29,10 @@ pub(crate) enum DependencySource {
/// The dependency was introduced by a direct URL-like requirement.
Url(Box<VerbatimParsedUrl>),
/// The dependency was introduced by a requirement pinned to an explicit index.
ExplicitIndex(IndexMetadata),
ExplicitIndex {
index: IndexMetadata,
conflict: Option<ConflictItem>,
},
}
impl DependencySource {
@@ -38,23 +42,43 @@ impl DependencySource {
/// explicit index. Direct URL-like requirements always preserve their verbatim URL.
pub(crate) fn from_requirement(requirement: &Requirement) -> Self {
match &requirement.source {
RequirementSource::Registry { index, .. }
RequirementSource::Registry { .. }
if matches!(
requirement.origin.as_ref(),
Some(RequirementOrigin::Group(_, Some(_), _))
) =>
{
index
.clone()
.map(Self::ExplicitIndex)
.unwrap_or(Self::Unspecified)
Self::from_source(&requirement.source)
}
RequirementSource::Registry { .. } => Self::Unspecified,
RequirementSource::Url { .. }
| RequirementSource::Git { .. }
| RequirementSource::Path { .. }
| RequirementSource::Directory { .. } => requirement
.source
| RequirementSource::Directory { .. } => Self::from_source(&requirement.source),
}
}
/// Derive the edge-local source constraint directly from a requirement source.
///
/// This preserves every explicit source carried by `source`, including direct URLs and named
/// indexes. Use [`DependencySource::from_requirement`] for the normal dependency-lowering path,
/// where plain registry requirements remain source-agnostic unless their origin needs an
/// edge-local index.
pub(crate) fn from_source(source: &RequirementSource) -> Self {
match source {
RequirementSource::Registry {
index, conflict, ..
} => index
.clone()
.map(|index| Self::ExplicitIndex {
index,
conflict: conflict.clone(),
})
.unwrap_or(Self::Unspecified),
RequirementSource::Url { .. }
| RequirementSource::Git { .. }
| RequirementSource::Path { .. }
| RequirementSource::Directory { .. } => source
.to_verbatim_parsed_url()
.map(Box::new)
.map(Self::Url)
@@ -66,17 +90,28 @@ impl DependencySource {
pub(crate) fn verbatim_url(&self) -> Option<&VerbatimParsedUrl> {
match self {
Self::Url(url) => Some(url.as_ref()),
Self::Unspecified | Self::ExplicitIndex(_) => None,
Self::Unspecified | Self::ExplicitIndex { .. } => None,
}
}
/// Return the explicit index attached to this source, if any.
pub(crate) fn explicit_index(&self) -> Option<&IndexMetadata> {
match self {
Self::ExplicitIndex(index) => Some(index),
Self::ExplicitIndex { index, .. } => Some(index),
Self::Unspecified | Self::Url(_) => None,
}
}
/// Return the conflict item attached to this source, if any.
fn conflicting_item(&self) -> Option<ConflictItemRef<'_>> {
match self {
Self::ExplicitIndex {
conflict: Some(conflict),
..
} => Some(conflict.as_ref()),
Self::Unspecified | Self::Url(_) | Self::ExplicitIndex { conflict: None, .. } => None,
}
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
@@ -166,8 +201,15 @@ impl PubGrubDependency {
// Add the package, plus any extra variants.
iter.map(move |(extra, group)| {
let pubgrub_requirement =
PubGrubRequirement::from_requirement(&requirement, extra, group);
let marker = Self::group_scoped_source_marker(&requirement, group_name, parent_name);
let preserve_marker = marker.is_some();
let pubgrub_requirement = PubGrubRequirement::from_requirement_with_marker(
&requirement,
extra,
group,
marker.unwrap_or(requirement.marker),
preserve_marker,
);
let PubGrubRequirement {
package,
version,
@@ -236,7 +278,31 @@ impl PubGrubDependency {
/// If this package can't possibly be classified as conflicting, then this
/// returns `None`.
pub(crate) fn conflicting_item(&self) -> Option<ConflictItemRef<'_>> {
self.package.conflicting_item()
self.source
.conflicting_item()
.or_else(|| self.package.conflicting_item())
}
/// Returns the group-scoped marker for a sourceful dependency-group edge.
fn group_scoped_source_marker(
requirement: &Requirement,
group_name: Option<&GroupName>,
parent_name: Option<&PackageName>,
) -> Option<MarkerTree> {
if matches!(
requirement.source,
RequirementSource::Registry { index: None, .. }
) {
return None;
}
let group = group_name?;
let parent_name = parent_name?;
let conflict = ConflictItem::from((parent_name.clone(), group.clone()));
Some(
UniversalMarker::from_marker_and_conflict_item(requirement.marker, &conflict)
.combined(),
)
}
}
@@ -249,24 +315,25 @@ pub(crate) struct PubGrubRequirement {
}
impl PubGrubRequirement {
fn package_for_requirement(
requirement: &Requirement,
extra: Option<ExtraName>,
group: Option<GroupName>,
) -> PubGrubPackage {
PubGrubPackage::from_package(requirement.name.clone(), extra, group, requirement.marker)
}
/// Convert a [`Requirement`] to a PubGrub-compatible package and range, while returning the URL
/// on the [`Requirement`], if any.
pub(crate) fn from_requirement(
fn from_requirement_with_marker(
requirement: &Requirement,
extra: Option<ExtraName>,
group: Option<GroupName>,
marker: MarkerTree,
preserve_marker: bool,
) -> Self {
let (verbatim_url, parsed_url) = match &requirement.source {
RequirementSource::Registry { specifier, .. } => {
return Self::from_registry_requirement(specifier, extra, group, requirement);
return Self::from_registry_requirement(
specifier,
extra,
group,
requirement,
marker,
preserve_marker,
);
}
RequirementSource::Url {
subdirectory,
@@ -318,8 +385,10 @@ impl PubGrubRequirement {
}
};
let package = Self::package(requirement, extra, group, marker, preserve_marker);
Self {
package: Self::package_for_requirement(requirement, extra, group),
package,
version: Ranges::full(),
source: DependencySource::Url(Box::new(VerbatimParsedUrl {
parsed_url,
@@ -333,11 +402,29 @@ impl PubGrubRequirement {
extra: Option<ExtraName>,
group: Option<GroupName>,
requirement: &Requirement,
marker: MarkerTree,
preserve_marker: bool,
) -> Self {
let package = Self::package(requirement, extra, group, marker, preserve_marker);
Self {
package: Self::package_for_requirement(requirement, extra, group),
package,
source: DependencySource::from_requirement(requirement),
version: Ranges::from(specifier.clone()),
}
}
fn package(
requirement: &Requirement,
extra: Option<ExtraName>,
group: Option<GroupName>,
marker: MarkerTree,
preserve_marker: bool,
) -> PubGrubPackage {
if preserve_marker && extra.is_none() && group.is_none() {
PubGrubPackage::from_base_preserving_marker(requirement.name.clone(), marker)
} else {
PubGrubPackage::from_package(requirement.name.clone(), extra, group, marker)
}
}
}
+17
View File
@@ -133,6 +133,23 @@ impl PubGrubPackage {
}
}
/// Create a base [`PubGrubPackage`] from a package name and marker,
/// preserving the marker expression as-is.
pub(crate) fn from_base_preserving_marker(name: PackageName, marker: MarkerTree) -> Self {
// Unlike `from_package`, preserve extra terms here because complementary
// source requirements encode group/extra conflict markers as extras.
if !marker.is_true() {
Self(Arc::new(PubGrubPackageInner::Marker { name, marker }))
} else {
Self(Arc::new(PubGrubPackageInner::Package {
name,
extra: None,
group: None,
marker,
}))
}
}
/// If this package is a proxy package, return the base package it depends on.
///
/// While dependency groups may be attached to a package, we don't consider them here as
@@ -0,0 +1,601 @@
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::slice;
use pubgrub::Ranges;
use uv_distribution_types::{Requirement, RequirementSource};
use uv_normalize::{ExtraName, GroupName, PackageName};
use uv_pep440::Version;
use uv_pep508::{ExtraOperator, MarkerExpression, MarkerOperator, MarkerTree, MarkerValueExtra};
use uv_types::InstalledPackagesProvider;
use crate::pubgrub::{DependencySource, PubGrubDependency, PubGrubPackage};
use crate::python_requirement::PythonRequirement;
use crate::resolver::environment::ResolverEnvironment;
use crate::resolver::fork_map::ForkScope;
use super::ResolverState;
/// A requirement that should be represented as a complementary source-aware base dependency.
///
/// This captures both the source that should be attached to the complementary dependency edge and
/// the source identity used to find an already-flattened sibling dependency in the root path.
struct ComplementarySourceRequirement<'a> {
requirement: &'a Requirement,
marker: MarkerTree,
version: Ranges<Version>,
attached_source: DependencySource,
flattened_marker: MarkerTree,
flattened_source: DependencySource,
}
impl ComplementarySourceRequirement<'_> {
/// Returns the package name of the underlying requirement.
fn name(&self) -> &PackageName {
&self.requirement.name
}
}
/// How a complementary source requirement should be applied to the dependency list.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
enum ComplementarySourceAction {
/// Rewrite an already-flattened source-specific dependency into the complementary base edge.
RewriteFlattenedDependency,
/// Add a new complementary base dependency with the attached source constraint.
AddDependency,
}
/// Builds the dependency edges emitted for a single package/fork pair.
///
/// The builder first collects the dependencies produced by the normal flattening path, then
/// adjusts them to preserve source-aware edges that only appear in sibling extra or group forks.
pub(super) struct DependencyBuilder<'a, InstalledPackages: InstalledPackagesProvider> {
state: &'a ResolverState<InstalledPackages>,
package: &'a PubGrubPackage,
env: &'a ResolverEnvironment,
python_requirement: &'a PythonRequirement,
deps: Vec<PubGrubDependency>,
}
impl<'a, InstalledPackages: InstalledPackagesProvider> DependencyBuilder<'a, InstalledPackages> {
/// Creates a builder for the dependency edges emitted while resolving `package` in `env`.
pub(super) fn new(
state: &'a ResolverState<InstalledPackages>,
package: &'a PubGrubPackage,
env: &'a ResolverEnvironment,
python_requirement: &'a PythonRequirement,
) -> Self {
Self {
state,
package,
env,
python_requirement,
deps: Vec::new(),
}
}
/// Flattens the given requirements into PubGrub dependencies and appends them to the builder.
pub(super) fn extend_requirements<'req>(
&mut self,
requirements: impl IntoIterator<Item = Cow<'req, Requirement>>,
group_name: Option<&'req GroupName>,
) where
'a: 'req,
{
self.deps
.extend(requirements.into_iter().flat_map(|requirement| {
PubGrubDependency::from_requirement(
&self.state.conflicts,
requirement,
group_name,
Some(self.package),
)
}));
}
/// Appends already-constructed dependencies to the builder.
pub(super) fn extend_dependencies(
&mut self,
deps: impl IntoIterator<Item = PubGrubDependency>,
) {
self.deps.extend(deps);
}
/// Rewrites root dependencies whose source is only active in a sibling extra or group fork.
///
/// Unlike non-root packages, root requirements have already been flattened from
/// `ResolverState::requirements`, so this pass mutates the already-added dependencies in place
/// instead of synthesizing new ones from raw metadata.
pub(super) fn rewrite_root_complementary_sources(&mut self) {
let python_marker = self.python_requirement.to_marker_tree();
for requirement in self.state.overrides.apply(self.state.requirements.iter()) {
let requirement: &Requirement = requirement.as_ref();
let marker = ForkScope::from_requirement(requirement).marker();
for requirement in
self.complementary_source_requirements(requirement, marker, false, python_marker)
{
self.apply_complementary_source_requirement(
requirement,
ComplementarySourceAction::RewriteFlattenedDependency,
);
}
}
}
/// Adds complementary source-aware base dependencies for requirements that are absent from the
/// current fork but present in a sibling extra or dependency-group fork.
///
/// Unlike the root path, non-root packages still have access to their raw metadata
/// requirements, so this pass inspects that metadata and synthesizes any missing
/// source-aware base dependencies.
pub(super) fn add_complementary_source_dependencies(
&mut self,
requirements: &[Requirement],
dependency_groups: &BTreeMap<GroupName, Box<[Requirement]>>,
) {
let python_marker = self.python_requirement.to_marker_tree();
for requirement in self.state.overrides.apply(requirements.iter()) {
let raw_requirement = requirement.into_owned();
if !self.can_synthesize_non_root_complementary_source(&raw_requirement.source) {
continue;
}
let marker = ForkScope::from_requirement(&raw_requirement).marker();
let complementary_requirements = self.complementary_source_requirements(
&raw_requirement,
marker,
raw_requirement.evaluate_markers(self.env.marker_environment(), &[]),
python_marker,
);
for requirement in complementary_requirements {
let extra = Self::single_positive_extra(raw_requirement.marker);
let constraints = self.constraints_for_complementary_extra_source(
&raw_requirement,
requirement.marker,
extra.as_ref(),
python_marker,
);
if self.apply_complementary_source_requirement(
requirement,
ComplementarySourceAction::AddDependency,
) {
self.extend_requirements(constraints.into_iter().map(Cow::Owned), None);
}
}
}
let Some(parent_name) = self.package.name_no_root() else {
return;
};
for (group, requirements) in dependency_groups {
for requirement in self.state.overrides.apply(requirements.iter()) {
let raw_requirement = requirement.into_owned();
if !self.can_synthesize_non_root_complementary_source(&raw_requirement.source) {
continue;
}
let marker =
ForkScope::from_group(raw_requirement.marker, parent_name, group).marker();
let complementary_requirements = self.complementary_source_requirements(
&raw_requirement,
marker,
false,
python_marker,
);
for requirement in complementary_requirements {
let split_requirement =
Self::requirement_with_marker(&raw_requirement, requirement.marker);
let constraints =
self.constraints_for_requirement(&split_requirement, None, python_marker);
if self.apply_complementary_source_requirement(
requirement,
ComplementarySourceAction::AddDependency,
) {
self.extend_requirements(constraints.into_iter().map(Cow::Owned), None);
}
}
}
}
}
/// Applies the complementary-source split for `requirement`.
///
/// Both root and non-root packages narrow the existing unsourced base dependency by excluding
/// `requirement.marker`. They differ only in whether the complementary dependency already
/// exists in flattened form (`RewriteFlattenedDependency`) or must be synthesized
/// (`AddDependency`).
fn apply_complementary_source_requirement(
&mut self,
requirement: ComplementarySourceRequirement<'_>,
action: ComplementarySourceAction,
) -> bool {
let name = requirement.name().clone();
let parent = self.package.name_no_root().cloned();
let Some(base_index) = self.find_unsourced_base_index(&name) else {
if action == ComplementarySourceAction::RewriteFlattenedDependency {
return self.add_root_unsourced_complement(requirement, name, parent);
}
return false;
};
if action == ComplementarySourceAction::RewriteFlattenedDependency {
let Some(flattened_index) = self.find_source_index(
&name,
&requirement.flattened_source,
requirement.flattened_marker,
) else {
return false;
};
self.deps[flattened_index].package =
PubGrubPackage::from_base_preserving_marker(name.clone(), requirement.marker);
}
if self.deps[base_index].package.marker().is_false() {
self.deps[base_index].package = PubGrubPackage::from_base_preserving_marker(
name.clone(),
requirement.marker.negate(),
);
} else {
Self::exclude_marker_from_base(&mut self.deps[base_index], &name, requirement.marker);
}
if action == ComplementarySourceAction::AddDependency {
self.deps.push(PubGrubDependency {
package: PubGrubPackage::from_base_preserving_marker(name, requirement.marker),
version: requirement.version,
parent,
source: requirement.attached_source,
});
}
true
}
/// Adds the unsourced side of a root complementary-source split when root flattening only
/// emitted the sourced edge.
fn add_root_unsourced_complement(
&mut self,
requirement: ComplementarySourceRequirement<'_>,
name: PackageName,
parent: Option<PackageName>,
) -> bool {
let Some(flattened_index) = self.find_source_index(
&name,
&requirement.flattened_source,
requirement.flattened_marker,
) else {
return false;
};
let Some((base_marker, base_version)) =
self.root_unsourced_complement(&name, requirement.marker)
else {
return false;
};
self.deps[flattened_index].package =
PubGrubPackage::from_base_preserving_marker(name.clone(), requirement.marker);
self.deps.push(PubGrubDependency {
package: PubGrubPackage::from_base_preserving_marker(name, base_marker),
version: base_version,
parent,
source: DependencySource::Unspecified,
});
true
}
/// Returns the normalized complementary-source representations for `requirement`, if needed in
/// the current fork.
fn complementary_source_requirements<'req>(
&self,
requirement: &'req Requirement,
marker: MarkerTree,
included_in_fork: bool,
python_marker: MarkerTree,
) -> Vec<ComplementarySourceRequirement<'req>> {
// Already included via `flatten_requirements`.
if included_in_fork {
return Vec::new();
}
// Only explicit sources (URL or named index) have per-fork source
// state that can leak.
if matches!(
requirement.source,
RequirementSource::Registry { index: None, .. }
) {
return Vec::new();
}
// Requirements with requested extras/groups are handled by the
// existing Extra/Group machinery.
if !requirement.extras.is_empty() || !requirement.groups.is_empty() {
return Vec::new();
}
if self.state.excludes.contains(&requirement.name) {
return Vec::new();
}
// This path is specifically for extra/group-gated source splits.
if marker.only_extras().is_true() {
return Vec::new();
}
Self::split_complementary_markers(marker)
.into_iter()
.filter(|marker| !python_marker.is_disjoint(*marker))
.filter(|marker| self.env.included_by_marker(*marker))
.map(|marker| ComplementarySourceRequirement {
requirement,
marker,
version: Self::version_for_requirement(requirement),
attached_source: DependencySource::from_source(&requirement.source),
flattened_marker: requirement.marker,
flattened_source: DependencySource::from_requirement(requirement),
})
.collect()
}
/// Returns the version range implied by a complementary requirement.
fn version_for_requirement(requirement: &Requirement) -> Ranges<Version> {
match &requirement.source {
RequirementSource::Registry { specifier, .. } => Ranges::from(specifier.clone()),
RequirementSource::Url { .. }
| RequirementSource::Git { .. }
| RequirementSource::Path { .. }
| RequirementSource::Directory { .. } => Ranges::full(),
}
}
/// Returns `true` when a non-root complementary dependency can be synthesized for `source`.
///
/// Direct URL-like sources are validated against root requirements and constraints. Recreating
/// them from package metadata would turn them into disallowed transitive URL dependencies.
fn can_synthesize_non_root_complementary_source(&self, source: &RequirementSource) -> bool {
if matches!(source, RequirementSource::Registry { index: Some(_), .. }) {
return true;
}
let Some(package_name) = self.package.name_no_root() else {
return false;
};
self.state.project.as_ref() == Some(package_name)
|| self.state.workspace_members.contains(package_name)
}
/// Returns the positive extra referenced by `marker`, if it names exactly one extra.
fn single_positive_extra(marker: MarkerTree) -> Option<ExtraName> {
let mut extra = None;
let mut has_negative = false;
let mut has_multiple = false;
marker.visit_extras(|operator, candidate| match operator {
MarkerOperator::Equal => match &extra {
Some(extra) if extra != candidate => has_multiple = true,
None => extra = Some(candidate.clone()),
Some(_) => {}
},
MarkerOperator::NotEqual => has_negative = true,
_ => {}
});
if has_negative || has_multiple {
return None;
}
extra
}
/// Returns the marker split for a complementary dependency.
///
/// When the source applies to multiple sibling extra or group forks, emit one complementary
/// edge per fork instead of a single marker spanning all of them.
fn split_complementary_markers(marker: MarkerTree) -> Vec<MarkerTree> {
let mut extras = Vec::new();
marker.visit_extras(|operator, candidate| {
if operator == MarkerOperator::Equal && !extras.contains(candidate) {
extras.push(candidate.clone());
}
});
if extras.len() <= 1 {
return vec![marker];
}
let mut split_markers = Vec::new();
for extra in extras {
let mut split_marker = marker;
split_marker.and(Self::extra_marker(&extra));
if !split_marker.is_false() && !split_markers.contains(&split_marker) {
split_markers.push(split_marker);
}
}
split_markers
}
/// Returns a marker that activates only the given extra or encoded group conflict item.
fn extra_marker(extra: &ExtraName) -> MarkerTree {
MarkerTree::expression(MarkerExpression::Extra {
operator: ExtraOperator::Equal,
name: MarkerValueExtra::Extra(extra.clone()),
})
}
/// Returns the constraints that must be present in the sibling extra or group fork for
/// `requirement`.
fn constraints_for_requirement(
&self,
requirement: &Requirement,
extra: Option<&ExtraName>,
python_marker: MarkerTree,
) -> Vec<Requirement> {
self.state
.constraints_for_requirement(
Cow::Borrowed(requirement),
extra,
self.env,
python_marker,
self.python_requirement,
)
.map(Cow::into_owned)
.collect()
}
/// Returns constraints for an extra-gated complementary source dependency.
///
/// Source extra markers are encoded as conflict markers on the synthesized dependency edge.
/// Root constraints, however, are authored against the raw extra name. Select constraints using
/// the raw requirement marker, then emit them under the encoded marker for this fork.
fn constraints_for_complementary_extra_source(
&self,
raw_requirement: &Requirement,
marker: MarkerTree,
extra: Option<&ExtraName>,
python_marker: MarkerTree,
) -> Vec<Requirement> {
let Some(extra) = extra else {
let split_requirement = Self::requirement_with_marker(raw_requirement, marker);
return self.constraints_for_requirement(&split_requirement, None, python_marker);
};
let Some(constraints) = self.state.constraints.get(&raw_requirement.name) else {
return Vec::new();
};
constraints
.iter()
.filter_map(|constraint| {
let mut raw_marker = constraint.marker;
raw_marker.and(raw_requirement.marker);
if raw_marker.is_false() {
return None;
}
if !constraint
.evaluate_markers(self.env.marker_environment(), slice::from_ref(extra))
{
return None;
}
let mut scoped_marker = raw_marker.without_extras();
scoped_marker.and(marker);
if scoped_marker.is_false()
|| python_marker.is_disjoint(scoped_marker)
|| !self.env.included_by_marker(scoped_marker)
{
return None;
}
Some(Self::requirement_with_marker(constraint, scoped_marker))
})
.collect()
}
/// Returns the root source-agnostic requirement that covers the complement of a sourced edge.
fn root_unsourced_complement(
&self,
name: &PackageName,
source_marker: MarkerTree,
) -> Option<(MarkerTree, Ranges<Version>)> {
let complement = source_marker.negate();
let python_marker = self.python_requirement.to_marker_tree();
self.state
.overrides
.apply(self.state.requirements.iter())
.filter_map(|requirement| {
let requirement: &Requirement = requirement.as_ref();
if &requirement.name != name {
return None;
}
if !matches!(
requirement.source,
RequirementSource::Registry { index: None, .. }
) {
return None;
}
if !requirement.extras.is_empty() || !requirement.groups.is_empty() {
return None;
}
if self.state.excludes.contains(&requirement.name) {
return None;
}
let mut marker = requirement.marker;
marker.and(complement);
if marker.is_false()
|| python_marker.is_disjoint(marker)
|| !self.env.included_by_marker(marker)
{
return None;
}
Some((marker, Self::version_for_requirement(requirement)))
})
.next()
}
/// Clones a requirement with a replacement marker.
fn requirement_with_marker(requirement: &Requirement, marker: MarkerTree) -> Requirement {
Requirement {
name: requirement.name.clone(),
extras: requirement.extras.clone(),
groups: requirement.groups.clone(),
source: requirement.source.clone(),
origin: requirement.origin.clone(),
marker,
}
}
/// Removes `marker` from the existing unsourced base dependency for `name`.
fn exclude_marker_from_base(
base: &mut PubGrubDependency,
name: &PackageName,
marker: MarkerTree,
) {
let mut base_marker = base.package.marker();
base_marker.and(marker.negate());
base.package = PubGrubPackage::from_base_preserving_marker(name.clone(), base_marker);
}
/// Returns the index of the dependency for `name` whose package marker and edge-local source
/// exactly match.
fn find_source_index(
&self,
name: &PackageName,
source: &DependencySource,
marker: MarkerTree,
) -> Option<usize> {
self.deps.iter().position(|dep| {
dep.package.name() == Some(name)
&& dep.package.marker() == marker
&& &dep.source == source
})
}
/// Returns the index of the plain base dependency for `name`, if it exists.
fn find_unsourced_base_index(&self, name: &PackageName) -> Option<usize> {
self.deps.iter().position(|dep| {
dep.package.name() == Some(name)
&& dep.package.extra().is_none()
&& dep.package.group().is_none()
&& matches!(&dep.source, DependencySource::Unspecified)
})
}
/// Returns the accumulated dependency edges.
pub(super) fn finish(self) -> Vec<PubGrubDependency> {
self.deps
}
}
+35 -22
View File
@@ -1,12 +1,12 @@
use rustc_hash::FxHashMap;
use uv_distribution_types::{Requirement, RequirementSource};
use uv_normalize::PackageName;
use uv_normalize::{GroupName, PackageName};
use uv_pep508::{MarkerTree, RequirementOrigin};
use uv_pypi_types::{ConflictItem, ConflictItemRef, ConflictKind};
use uv_pypi_types::{ConflictItem, ConflictItemRef};
use crate::ResolverEnvironment;
use crate::universal_marker::{ConflictMarker, UniversalMarker};
use crate::universal_marker::UniversalMarker;
/// A set of package names associated with a given fork.
pub(crate) type ForkSet = ForkMap<()>;
@@ -24,31 +24,45 @@ struct Entry<T> {
/// The fork visibility of an entry.
#[derive(Debug, Clone, Eq, PartialEq)]
struct ForkScope {
pub(super) struct ForkScope {
marker: MarkerTree,
conflict: Option<ConflictItem>,
}
impl ForkScope {
/// Derive the scope under which a requirement should be visible in forked resolution.
///
/// Group conflicts are folded into the marker so group-scoped entries only appear in forks
/// where that group is active.
fn from_requirement(requirement: &Requirement) -> Self {
/// Derives the fork scope implied by a requirement's marker and conflict state.
pub(super) fn from_requirement(requirement: &Requirement) -> Self {
let conflict = Self::conflict_for_requirement(requirement);
let marker = conflict
.as_ref()
.filter(|conflict_item| matches!(conflict_item.kind(), ConflictKind::Group(_)))
.map_or(requirement.marker, |conflict_item| {
UniversalMarker::new(
requirement.marker.without_extras(),
ConflictMarker::from_conflict_item(conflict_item),
)
.combined()
});
let marker = conflict.as_ref().map_or(requirement.marker, |conflict| {
Self::marker_with_conflict(requirement.marker, conflict)
});
Self { marker, conflict }
}
/// Derives a fork scope for a dependency-group requirement.
pub(super) fn from_group(
marker: MarkerTree,
project_name: &PackageName,
group: &GroupName,
) -> Self {
let conflict = ConflictItem::from((project_name.clone(), group.clone()));
let marker = Self::marker_with_conflict(marker, &conflict);
Self {
marker,
conflict: Some(conflict),
}
}
/// Returns the marker under which the entry is visible.
pub(super) fn marker(&self) -> MarkerTree {
self.marker
}
/// Returns the conflict item that must remain enabled for this scope to match, if any.
fn conflict(&self) -> Option<ConflictItemRef<'_>> {
self.conflict.as_ref().map(ConflictItem::as_ref)
}
fn conflict_for_requirement(requirement: &Requirement) -> Option<ConflictItem> {
let conflict = match &requirement.source {
RequirementSource::Registry { conflict, .. } => conflict.clone(),
@@ -65,9 +79,8 @@ impl ForkScope {
})
}
/// Return the conflict item that further restricts this scope, if any.
fn conflict(&self) -> Option<ConflictItemRef<'_>> {
self.conflict.as_ref().map(ConflictItem::as_ref)
fn marker_with_conflict(marker: MarkerTree, conflict: &ConflictItem) -> MarkerTree {
UniversalMarker::from_marker_and_conflict_item(marker, conflict).combined()
}
fn matches(&self, env: &ResolverEnvironment) -> bool {
+21 -22
View File
@@ -62,6 +62,7 @@ pub(crate) use crate::resolver::availability::{
UnavailableVersion,
};
use crate::resolver::batch_prefetch::BatchPrefetcher;
use crate::resolver::dependency_builder::DependencyBuilder;
pub use crate::resolver::derivation::DerivationChainBuilder;
pub use crate::resolver::environment::ResolverEnvironment;
use crate::resolver::environment::{
@@ -84,6 +85,7 @@ pub(crate) use provider::MetadataUnavailable;
mod availability;
mod batch_prefetch;
mod dependency_builder;
mod derivation;
mod environment;
mod fork_map;
@@ -1789,16 +1791,10 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
python_requirement,
);
requirements
.flat_map(move |requirement| {
PubGrubDependency::from_requirement(
&self.conflicts,
requirement,
None,
Some(package),
)
})
.collect()
let mut compiler = DependencyBuilder::new(self, package, env, python_requirement);
compiler.extend_requirements(requirements, None);
compiler.rewrite_root_complementary_sources();
compiler.finish()
}
PubGrubPackageInner::Package {
@@ -1917,18 +1913,21 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
python_requirement,
);
requirements
.filter(|requirement| !self.excludes.contains(&requirement.name))
.flat_map(|requirement| {
PubGrubDependency::from_requirement(
&self.conflicts,
requirement,
group.as_ref(),
Some(package),
)
})
.chain(system_dependencies)
.collect()
let mut compiler = DependencyBuilder::new(self, package, env, python_requirement);
compiler.extend_requirements(
requirements.filter(|requirement| !self.excludes.contains(&requirement.name)),
group.as_ref(),
);
compiler.extend_dependencies(system_dependencies);
if extra.is_none() && group.is_none() && env.marker_environment().is_none() {
compiler.add_complementary_source_dependencies(
&metadata.requires_dist,
&metadata.dependency_groups,
);
}
compiler.finish()
}
PubGrubPackageInner::Python(_) => return Ok(Dependencies::Unforkable(Vec::default())),
@@ -100,6 +100,20 @@ impl UniversalMarker {
Self::from_combined(pep508_marker)
}
/// Creates a new universal marker from a PEP 508 marker and a conflict item.
///
/// Any `extra` terms in `pep508_marker` are treated as source extras and are replaced by the
/// encoded conflict item.
pub(crate) fn from_marker_and_conflict_item(
pep508_marker: MarkerTree,
conflict: &ConflictItem,
) -> Self {
Self::new(
pep508_marker.without_extras(),
ConflictMarker::from_conflict_item(conflict),
)
}
/// Creates a new universal marker from a marker that has already been
/// combined from a PEP 508 and conflict marker.
pub(crate) fn from_combined(marker: MarkerTree) -> Self {
+2 -2
View File
@@ -3427,7 +3427,7 @@ fn requirements_txt_complex_conflict_markers() -> Result<()> {
# via torch
----- stderr -----
Resolved 33 packages in [TIME]
Resolved 34 packages in [TIME]
");
uv_snapshot!(context.filters(), context.export().arg("--extra").arg("cpu"), @r"
@@ -3581,7 +3581,7 @@ fn requirements_txt_complex_conflict_markers() -> Result<()> {
# via torch
----- stderr -----
Resolved 33 packages in [TIME]
Resolved 34 packages in [TIME]
");
Ok(())
+1705 -19
View File
File diff suppressed because it is too large Load Diff
+49
View File
@@ -8484,6 +8484,55 @@ fn tool_uv_sources_is_in_preview() -> Result<()> {
Ok(())
}
/// Installing a local package should not reintroduce unrequested dependency
/// groups while resolving its sourced dependencies.
#[test]
fn tool_uv_sources_ignore_unrequested_dependency_groups_for_path_install() -> Result<()> {
let context = uv_test::test_context!("3.12").with_exclude_newer("2025-01-30T00:00:00Z");
let package = context.temp_dir.child("pkg");
package.create_dir_all()?;
let pyproject_toml = package.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "pkg"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["jinja2>=3.1.5"]
[dependency-groups]
cu118 = ["jinja2>=3.1.5"]
[tool.uv.sources]
jinja2 = [
{ index = "torch-cu118", group = "cu118" },
]
[[tool.uv.index]]
name = "torch-cu118"
url = "https://astral-sh.github.io/pytorch-mirror/whl/cu118"
explicit = true
"#})?;
uv_snapshot!(context.filters(), context.pip_install()
.arg(package.path()), @"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ jinja2==3.1.5
+ markupsafe==3.0.2
+ pkg==0.1.0 (from file://[TEMP_DIR]/pkg)
"
);
Ok(())
}
/// Allow transitive URLs via recursive extras.
#[test]
fn recursive_extra_transitive_url() -> Result<()> {