diff --git a/crates/pep508-rs/src/marker.rs b/crates/pep508-rs/src/marker.rs index dd851dc7e9bf..ec12f3a6999f 100644 --- a/crates/pep508-rs/src/marker.rs +++ b/crates/pep508-rs/src/marker.rs @@ -1586,6 +1586,11 @@ impl MarkerTree { parse_markers(markers, reporter) } + /// Whether the marker is `MarkerTree::And(Vec::new())`. + pub fn is_universal(&self) -> bool { + self == &MarkerTree::And(Vec::new()) + } + /// Does this marker apply in the given environment? pub fn evaluate(&self, env: &MarkerEnvironment, extras: &[ExtraName]) -> bool { self.evaluate_optional_environment(Some(env), extras) diff --git a/crates/pypi-types/src/requirement.rs b/crates/pypi-types/src/requirement.rs index 0c3d0ce8629b..a0d5245125f1 100644 --- a/crates/pypi-types/src/requirement.rs +++ b/crates/pypi-types/src/requirement.rs @@ -8,7 +8,9 @@ use pep508_rs::{MarkerEnvironment, MarkerTree, RequirementOrigin, VerbatimUrl, V use uv_git::{GitReference, GitSha}; use uv_normalize::{ExtraName, PackageName}; -use crate::{ParsedUrl, VerbatimParsedUrl}; +use crate::{ + ParsedArchiveUrl, ParsedDirectoryUrl, ParsedGitUrl, ParsedPathUrl, ParsedUrl, VerbatimParsedUrl, +}; /// A representation of dependency on a package, an extension over a PEP 508's requirement. /// @@ -251,6 +253,65 @@ impl RequirementSource { } } + /// Convert the source to a [`VerbatimParsedUrl`], if it's a URL source. + pub fn to_verbatim_parsed_url(&self) -> Option { + match &self { + Self::Registry { .. } => None, + Self::Url { + subdirectory, + location, + url, + } => Some(VerbatimParsedUrl { + parsed_url: ParsedUrl::Archive(ParsedArchiveUrl::from_source( + location.clone(), + subdirectory.clone(), + )), + verbatim: url.clone(), + }), + Self::Path { + install_path, + lock_path, + url, + } => Some(VerbatimParsedUrl { + parsed_url: ParsedUrl::Path(ParsedPathUrl::from_source( + install_path.clone(), + lock_path.clone(), + url.to_url(), + )), + verbatim: url.clone(), + }), + Self::Directory { + install_path, + lock_path, + editable, + url, + } => Some(VerbatimParsedUrl { + parsed_url: ParsedUrl::Directory(ParsedDirectoryUrl::from_source( + install_path.clone(), + lock_path.clone(), + *editable, + url.to_url(), + )), + verbatim: url.clone(), + }), + Self::Git { + repository, + reference, + precise, + subdirectory, + url, + } => Some(VerbatimParsedUrl { + parsed_url: ParsedUrl::Git(ParsedGitUrl::from_source( + repository.clone(), + reference.clone(), + *precise, + subdirectory.clone(), + )), + verbatim: url.clone(), + }), + } + } + /// Returns `true` if the source is editable. pub fn is_editable(&self) -> bool { matches!(self, Self::Directory { editable: true, .. }) diff --git a/crates/uv-resolver/src/error.rs b/crates/uv-resolver/src/error.rs index bb6a4d17e04d..9be31aa2af16 100644 --- a/crates/uv-resolver/src/error.rs +++ b/crates/uv-resolver/src/error.rs @@ -9,11 +9,12 @@ use rustc_hash::{FxHashMap, FxHashSet}; use distribution_types::{BuiltDist, IndexLocations, InstalledDist, SourceDist}; use pep440_rs::Version; -use pep508_rs::Requirement; +use pep508_rs::{MarkerTree, Requirement}; use uv_normalize::PackageName; use crate::candidate_selector::CandidateSelector; use crate::dependency_provider::UvDependencyProvider; +use crate::fork_urls::ForkUrls; use crate::pubgrub::{ PubGrubPackage, PubGrubPackageInner, PubGrubReportFormatter, PubGrubSpecifierError, }; @@ -48,11 +49,18 @@ pub enum ResolveError { #[error(transparent)] PubGrubSpecifier(#[from] PubGrubSpecifierError), - #[error("Requirements contain conflicting URLs for package `{0}`:\n- {1}\n- {2}")] - ConflictingUrlsDirect(PackageName, String, String), + #[error("Overrides contain conflicting URLs for package `{0}`:\n- {1}\n- {2}")] + ConflictingOverrideUrls(PackageName, String, String), - #[error("There are conflicting URLs for package `{0}`:\n- {1}\n- {2}")] - ConflictingUrlsTransitive(PackageName, String, String), + #[error("Requirements contain conflicting URLs for package `{0}`:\n- {}", _1.join("\n- "))] + ConflictingUrls(PackageName, Vec), + + #[error("Requirements contain conflicting URLs for package `{package_name}` in split `{fork_markers}`:\n- {}", urls.join("\n- "))] + ConflictingUrlsInFork { + package_name: PackageName, + urls: Vec, + fork_markers: MarkerTree, + }, #[error("Package `{0}` attempted to resolve via URL: {1}. URL dependencies must be expressed as direct requirements or constraints. Consider adding `{0} @ {1}` to your dependencies or constraints file.")] DisallowedUrl(PackageName, String), @@ -156,8 +164,16 @@ fn collapse_proxies( } } -impl From> for ResolveError { - fn from(value: pubgrub::error::PubGrubError) -> Self { +impl ResolveError { + /// Convert an error from PubGrub to a resolver error. + /// + /// [`ForkUrls`] breaks the usual pattern used here since it's part of one the [`SolveState`], + /// not of the [`ResolverState`], so we have to take it from the fork that errored instead of + /// being able to add it later. + pub(crate) fn from_pubgrub_error( + value: pubgrub::error::PubGrubError, + fork_urls: ForkUrls, + ) -> Self { match value { // These are all never type variant that can never match, but never is experimental pubgrub::error::PubGrubError::ErrorChoosingPackageVersion(_) @@ -178,6 +194,7 @@ impl From> for ResolveError { index_locations: None, unavailable_packages: FxHashMap::default(), incomplete_packages: FxHashMap::default(), + fork_urls, }) } pubgrub::error::PubGrubError::SelfDependency { package, version } => { @@ -200,6 +217,7 @@ pub struct NoSolutionError { index_locations: Option, unavailable_packages: FxHashMap, incomplete_packages: FxHashMap>, + fork_urls: ForkUrls, } impl std::error::Error for NoSolutionError {} @@ -222,6 +240,7 @@ impl std::fmt::Display for NoSolutionError { &self.index_locations, &self.unavailable_packages, &self.incomplete_packages, + &self.fork_urls, ) { write!(f, "\n\n{hint}")?; } diff --git a/crates/uv-resolver/src/fork_urls.rs b/crates/uv-resolver/src/fork_urls.rs new file mode 100644 index 000000000000..675e421075be --- /dev/null +++ b/crates/uv-resolver/src/fork_urls.rs @@ -0,0 +1,62 @@ +use std::collections::hash_map::Entry; + +use rustc_hash::FxHashMap; + +use distribution_types::Verbatim; +use pep508_rs::MarkerTree; +use pypi_types::VerbatimParsedUrl; +use uv_normalize::PackageName; + +use crate::ResolveError; + +/// See [`crate::resolver::SolveState`]. +#[derive(Default, Debug, Clone)] +pub(crate) struct ForkUrls(FxHashMap); + +impl ForkUrls { + /// Get the URL previously used for a package in this fork. + pub(crate) fn get(&self, package_name: &PackageName) -> Option<&VerbatimParsedUrl> { + self.0.get(package_name) + } + + /// Whether we use a URL for this package. + pub(crate) fn contains_key(&self, package_name: &PackageName) -> bool { + self.0.contains_key(package_name) + } + + /// Check that this is the only URL used for this package in this fork. + pub(crate) fn insert( + &mut self, + package_name: &PackageName, + url: &VerbatimParsedUrl, + fork_markers: &MarkerTree, + ) -> Result<(), ResolveError> { + match self.0.entry(package_name.clone()) { + Entry::Occupied(previous) => { + if previous.get() != url { + let mut conflicting_url = vec![ + previous.get().verbatim.verbatim().to_string(), + url.verbatim.verbatim().to_string(), + ]; + conflicting_url.sort(); + return if fork_markers.is_universal() { + Err(ResolveError::ConflictingUrls( + package_name.clone(), + conflicting_url, + )) + } else { + Err(ResolveError::ConflictingUrlsInFork { + package_name: package_name.clone(), + urls: conflicting_url, + fork_markers: fork_markers.clone(), + }) + }; + } + } + Entry::Vacant(vacant) => { + vacant.insert(url.clone()); + } + } + Ok(()) + } +} diff --git a/crates/uv-resolver/src/lib.rs b/crates/uv-resolver/src/lib.rs index 445d21f19c22..6c1be8ffe5f4 100644 --- a/crates/uv-resolver/src/lib.rs +++ b/crates/uv-resolver/src/lib.rs @@ -30,6 +30,7 @@ mod error; mod exclude_newer; mod exclusions; mod flat_index; +mod fork_urls; mod lock; mod manifest; mod marker; diff --git a/crates/uv-resolver/src/manifest.rs b/crates/uv-resolver/src/manifest.rs index e124a9a6dbb5..2274d476d966 100644 --- a/crates/uv-resolver/src/manifest.rs +++ b/crates/uv-resolver/src/manifest.rs @@ -98,6 +98,16 @@ impl Manifest { &'a self, markers: Option<&'a MarkerEnvironment>, mode: DependencyMode, + ) -> impl Iterator + 'a { + self.requirements_no_overrides(markers, mode) + .chain(self.overrides(markers, mode)) + } + + /// Like [`Self::requirements`], but without the overrides. + pub fn requirements_no_overrides<'a>( + &'a self, + markers: Option<&'a MarkerEnvironment>, + mode: DependencyMode, ) -> impl Iterator + 'a { match mode { // Include all direct and transitive requirements, with constraints and overrides applied. @@ -120,11 +130,6 @@ impl Manifest { self.constraints .requirements() .filter(move |requirement| requirement.evaluate_markers(markers, &[])), - ) - .chain( - self.overrides - .requirements() - .filter(move |requirement| requirement.evaluate_markers(markers, &[])), ), ), // Include direct requirements, with constraints and overrides applied. @@ -132,7 +137,28 @@ impl Manifest { self.overrides .apply(&self.requirements) .chain(self.constraints.requirements()) - .chain(self.overrides.requirements()) + .filter(move |requirement| requirement.evaluate_markers(markers, &[])), + ), + } + } + + /// Only the overrides from [`Self::requirements`]. + pub fn overrides<'a>( + &'a self, + markers: Option<&'a MarkerEnvironment>, + mode: DependencyMode, + ) -> impl Iterator + 'a { + match mode { + // Include all direct and transitive requirements, with constraints and overrides applied. + DependencyMode::Transitive => Either::Left( + self.overrides + .requirements() + .filter(move |requirement| requirement.evaluate_markers(markers, &[])), + ), + // Include direct requirements, with constraints and overrides applied. + DependencyMode::Direct => Either::Right( + self.overrides + .requirements() .filter(move |requirement| requirement.evaluate_markers(markers, &[])), ), } diff --git a/crates/uv-resolver/src/pubgrub/dependencies.rs b/crates/uv-resolver/src/pubgrub/dependencies.rs index bfc774b0bff3..c9c5a6b871a1 100644 --- a/crates/uv-resolver/src/pubgrub/dependencies.rs +++ b/crates/uv-resolver/src/pubgrub/dependencies.rs @@ -1,43 +1,46 @@ +use std::iter; + use itertools::Itertools; use pubgrub::range::Range; use tracing::warn; -use distribution_types::Verbatim; -use pep440_rs::Version; +use pep440_rs::{Version, VersionSpecifiers}; use pypi_types::{ ParsedArchiveUrl, ParsedDirectoryUrl, ParsedGitUrl, ParsedPathUrl, ParsedUrl, Requirement, - RequirementSource, + RequirementSource, VerbatimParsedUrl, }; -use uv_git::GitResolver; use uv_normalize::{ExtraName, PackageName}; use crate::pubgrub::{PubGrubPackage, PubGrubPackageInner}; -use crate::resolver::{Locals, Urls}; +use crate::resolver::Locals; use crate::{PubGrubSpecifier, ResolveError}; #[derive(Clone, Debug)] pub(crate) struct PubGrubDependency { pub(crate) package: PubGrubPackage, pub(crate) version: Range, + /// This field is set if the [`Requirement`] had a URL. We still use a URL from [`Urls`] + /// even if this field is None where there is an override with a URL or there is a different + /// requirement or constraint for the same package that has a URL. + pub(crate) url: Option, } impl PubGrubDependency { pub(crate) fn from_requirement<'a>( requirement: &'a Requirement, source_name: Option<&'a PackageName>, - urls: &'a Urls, locals: &'a Locals, - git: &'a GitResolver, ) -> impl Iterator> + 'a { // Add the package, plus any extra variants. - std::iter::once(None) + iter::once(None) .chain(requirement.extras.clone().into_iter().map(Some)) - .map(|extra| { - PubGrubRequirement::from_requirement(requirement, extra, urls, locals, git) - }) - .filter_map_ok(move |pubgrub_requirement| { - let PubGrubRequirement { package, version } = pubgrub_requirement; - + .map(|extra| PubGrubRequirement::from_requirement(requirement, extra, locals)) + .filter_map_ok(move |requirement| { + let PubGrubRequirement { + package, + version, + url, + } = requirement; match &*package { PubGrubPackageInner::Package { name, .. } => { // Detect self-dependencies. @@ -49,11 +52,13 @@ impl PubGrubDependency { Some(PubGrubDependency { package: package.clone(), version: version.clone(), + url, }) } PubGrubPackageInner::Marker { .. } => Some(PubGrubDependency { package: package.clone(), version: version.clone(), + url, }), PubGrubPackageInner::Extra { name, .. } => { debug_assert!( @@ -63,6 +68,7 @@ impl PubGrubDependency { Some(PubGrubDependency { package: package.clone(), version: version.clone(), + url: None, }) } _ => None, @@ -76,81 +82,31 @@ impl PubGrubDependency { pub(crate) struct PubGrubRequirement { pub(crate) package: PubGrubPackage, pub(crate) version: Range, + pub(crate) url: Option, } impl PubGrubRequirement { - /// Convert a [`Requirement`] to a PubGrub-compatible package and range. + /// Convert a [`Requirement`] to a PubGrub-compatible package and range, while returning the URL + /// on the [`Requirement`], if any. pub(crate) fn from_requirement( requirement: &Requirement, extra: Option, - urls: &Urls, locals: &Locals, - git: &GitResolver, ) -> Result { - match &requirement.source { + let (verbatim_url, parsed_url) = match &requirement.source { RequirementSource::Registry { specifier, .. } => { - // TODO(konsti): We're currently losing the index information here, but we need - // either pass it to `PubGrubPackage` or the `ResolverProvider` beforehand. - // If the specifier is an exact version, and the user requested a local version that's - // more precise than the specifier, use the local version instead. - let version = if let Some(expected) = locals.get(&requirement.name) { - specifier - .iter() - .map(|specifier| { - Locals::map(expected, specifier) - .map_err(ResolveError::InvalidVersion) - .and_then(|specifier| Ok(PubGrubSpecifier::try_from(&specifier)?)) - }) - .fold_ok(Range::full(), |range, specifier| { - range.intersection(&specifier.into()) - })? - } else { - PubGrubSpecifier::try_from(specifier)?.into() - }; - - Ok(Self { - package: PubGrubPackage::from_package( - requirement.name.clone(), - extra, - requirement.marker.clone(), - urls, - ), - version, - }) + return Self::from_registry_requirement(specifier, extra, requirement, locals); } RequirementSource::Url { subdirectory, location, url, } => { - let Some(expected) = urls.get(&requirement.name) else { - return Err(ResolveError::DisallowedUrl( - requirement.name.clone(), - url.to_string(), - )); - }; - let parsed_url = ParsedUrl::Archive(ParsedArchiveUrl::from_source( location.clone(), subdirectory.clone(), )); - if !Urls::same_resource(&expected.parsed_url, &parsed_url, git) { - return Err(ResolveError::ConflictingUrlsTransitive( - requirement.name.clone(), - expected.verbatim.verbatim().to_string(), - url.verbatim().to_string(), - )); - } - - Ok(Self { - package: PubGrubPackage::from_url( - requirement.name.clone(), - extra, - requirement.marker.clone(), - expected.clone(), - ), - version: Range::full(), - }) + (url, parsed_url) } RequirementSource::Git { repository, @@ -159,71 +115,25 @@ impl PubGrubRequirement { url, subdirectory, } => { - let Some(expected) = urls.get(&requirement.name) else { - return Err(ResolveError::DisallowedUrl( - requirement.name.clone(), - url.to_string(), - )); - }; - let parsed_url = ParsedUrl::Git(ParsedGitUrl::from_source( repository.clone(), reference.clone(), *precise, subdirectory.clone(), )); - if !Urls::same_resource(&expected.parsed_url, &parsed_url, git) { - return Err(ResolveError::ConflictingUrlsTransitive( - requirement.name.clone(), - expected.verbatim.verbatim().to_string(), - url.verbatim().to_string(), - )); - } - - Ok(Self { - package: PubGrubPackage::from_url( - requirement.name.clone(), - extra, - requirement.marker.clone(), - expected.clone(), - ), - version: Range::full(), - }) + (url, parsed_url) } RequirementSource::Path { url, install_path, lock_path, } => { - let Some(expected) = urls.get(&requirement.name) else { - return Err(ResolveError::DisallowedUrl( - requirement.name.clone(), - url.to_string(), - )); - }; - let parsed_url = ParsedUrl::Path(ParsedPathUrl::from_source( install_path.clone(), lock_path.clone(), url.to_url(), )); - if !Urls::same_resource(&expected.parsed_url, &parsed_url, git) { - return Err(ResolveError::ConflictingUrlsTransitive( - requirement.name.clone(), - expected.verbatim.verbatim().to_string(), - url.verbatim().to_string(), - )); - } - - Ok(Self { - package: PubGrubPackage::from_url( - requirement.name.clone(), - extra, - requirement.marker.clone(), - expected.clone(), - ), - version: Range::full(), - }) + (url, parsed_url) } RequirementSource::Directory { editable, @@ -231,37 +141,62 @@ impl PubGrubRequirement { install_path, lock_path, } => { - let Some(expected) = urls.get(&requirement.name) else { - return Err(ResolveError::DisallowedUrl( - requirement.name.clone(), - url.to_string(), - )); - }; - let parsed_url = ParsedUrl::Directory(ParsedDirectoryUrl::from_source( install_path.clone(), lock_path.clone(), *editable, url.to_url(), )); - if !Urls::same_resource(&expected.parsed_url, &parsed_url, git) { - return Err(ResolveError::ConflictingUrlsTransitive( - requirement.name.clone(), - expected.verbatim.verbatim().to_string(), - url.verbatim().to_string(), - )); - } + (url, parsed_url) + } + }; + + Ok(Self { + package: PubGrubPackage::from_package( + requirement.name.clone(), + extra, + requirement.marker.clone(), + ), + version: Range::full(), + url: Some(VerbatimParsedUrl { + parsed_url, + verbatim: verbatim_url.clone(), + }), + }) + } - Ok(Self { - package: PubGrubPackage::from_url( - requirement.name.clone(), - extra, - requirement.marker.clone(), - expected.clone(), - ), - version: Range::full(), + fn from_registry_requirement( + specifier: &VersionSpecifiers, + extra: Option, + requirement: &Requirement, + locals: &Locals, + ) -> Result { + // If the specifier is an exact version, and the user requested a local version that's + // more precise than the specifier, use the local version instead. + let version = if let Some(expected) = locals.get(&requirement.name) { + specifier + .iter() + .map(|specifier| { + Locals::map(expected, specifier) + .map_err(ResolveError::InvalidVersion) + .and_then(|specifier| Ok(PubGrubSpecifier::try_from(&specifier)?)) }) - } - } + .fold_ok(Range::full(), |range, specifier| { + range.intersection(&specifier.into()) + })? + } else { + PubGrubSpecifier::try_from(specifier)?.into() + }; + + let requirement = Self { + package: PubGrubPackage::from_package( + requirement.name.clone(), + extra, + requirement.marker.clone(), + ), + version, + url: None, + }; + Ok(requirement) } } diff --git a/crates/uv-resolver/src/pubgrub/package.rs b/crates/uv-resolver/src/pubgrub/package.rs index afbc3a908dea..c54bee394ef5 100644 --- a/crates/uv-resolver/src/pubgrub/package.rs +++ b/crates/uv-resolver/src/pubgrub/package.rs @@ -3,11 +3,8 @@ use std::ops::Deref; use std::sync::Arc; use pep508_rs::MarkerTree; -use pypi_types::VerbatimParsedUrl; use uv_normalize::{ExtraName, GroupName, PackageName}; -use crate::resolver::Urls; - /// [`Arc`] wrapper around [`PubGrubPackageInner`] to make cloning (inside PubGrub) cheap. #[derive(Debug, Clone, Eq, Hash, PartialEq, PartialOrd, Ord)] pub struct PubGrubPackage(Arc); @@ -51,43 +48,6 @@ pub enum PubGrubPackageInner { extra: Option, dev: Option, marker: Option, - /// The URL of the package, if it was specified in the requirement. - /// - /// There are a few challenges that come with URL-based packages, and how they map to - /// PubGrub. - /// - /// If the user declares a direct URL dependency, and then a transitive dependency - /// appears for the same package, we need to ensure that the direct URL dependency can - /// "satisfy" that requirement. So if the user declares a URL dependency on Werkzeug, and a - /// registry dependency on Flask, we need to ensure that Flask's dependency on Werkzeug - /// is resolved by the URL dependency. This means: (1) we need to avoid adding a second - /// Werkzeug variant from PyPI; and (2) we need to error if the Werkzeug version requested - /// by Flask doesn't match that of the URL dependency. - /// - /// Additionally, we need to ensure that we disallow multiple versions of the same package, - /// even if requested from different URLs. - /// - /// To enforce this requirement, we require that all possible URL dependencies are - /// defined upfront, as `requirements.txt` or `constraints.txt` or similar. Otherwise, - /// solving these graphs becomes far more complicated -- and the "right" behavior isn't - /// even clear. For example, imagine that you define a direct dependency on Werkzeug, and - /// then one of your other direct dependencies declares a dependency on Werkzeug at some - /// URL. Which is correct? By requiring direct dependencies, the semantics are at least - /// clear. - /// - /// With the list of known URLs available upfront, we then only need to do two things: - /// - /// 1. When iterating over the dependencies for a single package, ensure that we respect - /// URL variants over registry variants, if the package declares a dependency on both - /// `Werkzeug==2.0.0` _and_ `Werkzeug @ https://...` , which is strange but possible. - /// This is enforced by [`crate::pubgrub::dependencies::PubGrubDependencies`]. - /// 2. Reject any URL dependencies that aren't known ahead-of-time. - /// - /// Eventually, we could relax this constraint, in favor of something more lenient, e.g., if - /// we're going to have a dependency that's provided as a URL, we _need_ to visit the URL - /// version before the registry version. So we could just error if we visit a URL variant - /// _after_ a registry variant. - url: Option, }, /// A proxy package to represent a dependency with an extra (e.g., `black[colorama]`). /// @@ -106,7 +66,6 @@ pub enum PubGrubPackageInner { name: PackageName, extra: ExtraName, marker: Option, - url: Option, }, /// A proxy package to represent an enabled "dependency group" (e.g., development dependencies). /// @@ -117,7 +76,6 @@ pub enum PubGrubPackageInner { name: PackageName, dev: GroupName, marker: Option, - url: Option, }, /// A proxy package for a base package with a marker (e.g., `black; python_version >= "3.6"`). /// @@ -126,19 +84,16 @@ pub enum PubGrubPackageInner { Marker { name: PackageName, marker: MarkerTree, - url: Option, }, } impl PubGrubPackage { - /// Create a [`PubGrubPackage`] from a package name and version. + /// Create a [`PubGrubPackage`] from a package name and extra. pub(crate) fn from_package( name: PackageName, extra: Option, mut marker: Option, - urls: &Urls, ) -> Self { - let url = urls.get(&name).cloned(); // Remove all extra expressions from the marker, since we track extras // separately. This also avoids an issue where packages added via // extras end up having two distinct marker expressions, which in turn @@ -150,54 +105,15 @@ impl PubGrubPackage { name, extra, marker, - url, })) } else if let Some(marker) = marker { - Self(Arc::new(PubGrubPackageInner::Marker { name, marker, url })) - } else { - Self(Arc::new(PubGrubPackageInner::Package { - name, - extra, - dev: None, - marker, - url, - })) - } - } - - /// Create a [`PubGrubPackage`] from a package name and URL. - pub(crate) fn from_url( - name: PackageName, - extra: Option, - mut marker: Option, - url: VerbatimParsedUrl, - ) -> Self { - // Remove all extra expressions from the marker, since we track extras - // separately. This also avoids an issue where packages added via - // extras end up having two distinct marker expressions, which in turn - // makes them two distinct packages. This results in PubGrub being - // unable to unify version constraints across such packages. - marker = marker.and_then(|m| m.simplify_extras_with(|_| true)); - if let Some(extra) = extra { - Self(Arc::new(PubGrubPackageInner::Extra { - name, - extra, - marker, - url: Some(url), - })) - } else if let Some(marker) = marker { - Self(Arc::new(PubGrubPackageInner::Marker { - name, - marker, - url: Some(url), - })) + Self(Arc::new(PubGrubPackageInner::Marker { name, marker })) } else { Self(Arc::new(PubGrubPackageInner::Package { name, extra, dev: None, marker, - url: Some(url), })) } } diff --git a/crates/uv-resolver/src/pubgrub/priority.rs b/crates/uv-resolver/src/pubgrub/priority.rs index e293e8011473..f105a36e3471 100644 --- a/crates/uv-resolver/src/pubgrub/priority.rs +++ b/crates/uv-resolver/src/pubgrub/priority.rs @@ -3,6 +3,7 @@ use std::cmp::Reverse; use pubgrub::range::Range; use rustc_hash::FxHashMap; +use crate::fork_urls::ForkUrls; use pep440_rs::Version; use uv_normalize::PackageName; @@ -23,24 +24,21 @@ pub(crate) struct PubGrubPriorities(FxHashMap); impl PubGrubPriorities { /// Add a [`PubGrubPackage`] to the priority map. - pub(crate) fn insert(&mut self, package: &PubGrubPackage, version: &Range) { + pub(crate) fn insert( + &mut self, + package: &PubGrubPackage, + version: &Range, + urls: &ForkUrls, + ) { let next = self.0.len(); match &**package { PubGrubPackageInner::Root(_) => {} PubGrubPackageInner::Python(_) => {} - PubGrubPackageInner::Marker { - name, url: None, .. - } - | PubGrubPackageInner::Extra { - name, url: None, .. - } - | PubGrubPackageInner::Dev { - name, url: None, .. - } - | PubGrubPackageInner::Package { - name, url: None, .. - } => { + PubGrubPackageInner::Marker { name, .. } + | PubGrubPackageInner::Extra { name, .. } + | PubGrubPackageInner::Dev { name, .. } + | PubGrubPackageInner::Package { name, .. } => { match self.0.entry(name.clone()) { std::collections::hash_map::Entry::Occupied(mut entry) => { // Preserve the original index. @@ -52,7 +50,9 @@ impl PubGrubPriorities { }; // Compute the priority. - let priority = if version.as_singleton().is_some() { + let priority = if urls.get(name).is_some() { + PubGrubPriority::DirectUrl(Reverse(index)) + } else if version.as_singleton().is_some() { PubGrubPriority::Singleton(Reverse(index)) } else { PubGrubPriority::Unspecified(Reverse(index)) @@ -64,48 +64,17 @@ impl PubGrubPriorities { } } std::collections::hash_map::Entry::Vacant(entry) => { - // Insert the priority. - entry.insert(if version.as_singleton().is_some() { + // Compute the priority. + let priority = if urls.get(name).is_some() { + PubGrubPriority::DirectUrl(Reverse(next)) + } else if version.as_singleton().is_some() { PubGrubPriority::Singleton(Reverse(next)) } else { PubGrubPriority::Unspecified(Reverse(next)) - }); - } - } - } - PubGrubPackageInner::Marker { - name, url: Some(_), .. - } - | PubGrubPackageInner::Extra { - name, url: Some(_), .. - } - | PubGrubPackageInner::Dev { - name, url: Some(_), .. - } - | PubGrubPackageInner::Package { - name, url: Some(_), .. - } => { - match self.0.entry(name.clone()) { - std::collections::hash_map::Entry::Occupied(mut entry) => { - // Preserve the original index. - let index = match entry.get() { - PubGrubPriority::Unspecified(Reverse(index)) => *index, - PubGrubPriority::Singleton(Reverse(index)) => *index, - PubGrubPriority::DirectUrl(Reverse(index)) => *index, - PubGrubPriority::Root => next, }; - // Compute the priority. - let priority = PubGrubPriority::DirectUrl(Reverse(index)); - - // Take the maximum of the new and existing priorities. - if priority > *entry.get() { - entry.insert(priority); - } - } - std::collections::hash_map::Entry::Vacant(entry) => { // Insert the priority. - entry.insert(PubGrubPriority::DirectUrl(Reverse(next))); + entry.insert(priority); } } } @@ -140,6 +109,10 @@ pub(crate) enum PubGrubPriority { Singleton(Reverse), /// The package was specified via a direct URL. + /// + /// N.B.: URLs need to have priority over registry distributions for correctly matching registry + /// distributions to URLs, see [`PubGrubPackage::from_package`] an + /// [`crate::fork_urls::ForkUrls`]. DirectUrl(Reverse), /// The package is the root package. diff --git a/crates/uv-resolver/src/pubgrub/report.rs b/crates/uv-resolver/src/pubgrub/report.rs index a416bcd797e6..6ec81d72f085 100644 --- a/crates/uv-resolver/src/pubgrub/report.rs +++ b/crates/uv-resolver/src/pubgrub/report.rs @@ -17,6 +17,7 @@ use pep440_rs::Version; use uv_normalize::PackageName; use crate::candidate_selector::CandidateSelector; +use crate::fork_urls::ForkUrls; use crate::python_requirement::{PythonRequirement, PythonTarget}; use crate::resolver::{IncompletePackage, UnavailablePackage, UnavailableReason}; use crate::RequiresPython; @@ -407,19 +408,21 @@ impl PubGrubReportFormatter<'_> { index_locations: &Option, unavailable_packages: &FxHashMap, incomplete_packages: &FxHashMap>, + fork_urls: &ForkUrls, ) -> IndexSet { let mut hints = IndexSet::default(); match derivation_tree { DerivationTree::External( External::Custom(package, set, _) | External::NoVersions(package, set), ) => { - if let PubGrubPackageInner::Package { - name, url: None, .. - } = &**package - { + if let PubGrubPackageInner::Package { name, .. } = &**package { // Check for no versions due to pre-release options. if let Some(selector) = selector { - self.prerelease_available_hint(package, name, set, selector, &mut hints); + if !fork_urls.contains_key(name) { + self.prerelease_available_hint( + package, name, set, selector, &mut hints, + ); + } } } @@ -470,6 +473,7 @@ impl PubGrubReportFormatter<'_> { index_locations, unavailable_packages, incomplete_packages, + fork_urls, )); hints.extend(self.hints( &derived.cause2, @@ -477,6 +481,7 @@ impl PubGrubReportFormatter<'_> { index_locations, unavailable_packages, incomplete_packages, + fork_urls, )); } } diff --git a/crates/uv-resolver/src/resolver/batch_prefetch.rs b/crates/uv-resolver/src/resolver/batch_prefetch.rs index ebb2b96aa408..ce22c17f1cd5 100644 --- a/crates/uv-resolver/src/resolver/batch_prefetch.rs +++ b/crates/uv-resolver/src/resolver/batch_prefetch.rs @@ -58,7 +58,6 @@ impl BatchPrefetcher { extra: None, dev: None, marker: None, - url: None, } = &**next else { return Ok(()); diff --git a/crates/uv-resolver/src/resolver/mod.rs b/crates/uv-resolver/src/resolver/mod.rs index 45dad72a0cfd..d6958f19232c 100644 --- a/crates/uv-resolver/src/resolver/mod.rs +++ b/crates/uv-resolver/src/resolver/mod.rs @@ -40,6 +40,7 @@ use uv_types::{BuildContext, HashStrategy, InstalledPackagesProvider}; use crate::candidate_selector::{CandidateDist, CandidateSelector}; use crate::dependency_provider::UvDependencyProvider; use crate::error::ResolveError; +use crate::fork_urls::ForkUrls; use crate::manifest::Manifest; use crate::pins::FilePins; use crate::preferences::Preferences; @@ -78,7 +79,7 @@ pub struct Resolver { project: Option, requirements: Vec, @@ -318,6 +319,7 @@ impl ResolverState ResolverState ResolverState= '3.12'", + // ] + // ``` + // In the `python_version < '3.12'` case, we haven't pre-visited `iniconfig` yet, + // since we weren't sure whether it might also be a URL requirement when + // transforming the requirements. For that case, we do another request here + // (idempotent due to caching). + self.request_package(&state.next, url, &request_sink)?; prefetcher.version_tried(state.next.clone()); @@ -368,14 +390,18 @@ impl ResolverState ResolverState ResolverState { @@ -510,10 +543,23 @@ impl ResolverState { state.add_package_version_dependencies( + for_package.as_deref(), &version, - dependencies, + &self.urls, + dependencies.clone(), + &self.git, &prefetcher, )?; + // Emit a request to fetch the metadata for each registry package. + for dependency in &dependencies { + let PubGrubDependency { + package, + version: _, + url: _, + } = dependency; + let url = package.name().and_then(|name| state.fork_urls.get(name)); + self.visit_package(package, url, &request_sink)?; + } forked_states.push(state); } ForkedDependencies::Forked { @@ -545,10 +591,25 @@ impl ResolverState ResolverState, request_sink: &Sender, ) -> Result<(), ResolveError> { - match &**package { - PubGrubPackageInner::Root(_) => {} - PubGrubPackageInner::Python(_) => {} - PubGrubPackageInner::Marker { - name, url: None, .. - } - | PubGrubPackageInner::Extra { - name, url: None, .. - } - | PubGrubPackageInner::Dev { - name, url: None, .. - } - | PubGrubPackageInner::Package { - name, url: None, .. - } => { + // Ignore unresolved URL packages. + if url.is_none() + && package + .name() + .map(|name| self.urls.any_url(name)) + .unwrap_or(true) + { + return Ok(()); + } + + self.request_package(package, url, request_sink) + } + + fn request_package( + &self, + package: &PubGrubPackage, + url: Option<&VerbatimParsedUrl>, + request_sink: &Sender, + ) -> Result<(), ResolveError> { + match (&**package, url) { + (PubGrubPackageInner::Root(_), _) => {} + (PubGrubPackageInner::Python(_), _) => {} + ( + PubGrubPackageInner::Marker { name, .. } + | PubGrubPackageInner::Extra { name, .. } + | PubGrubPackageInner::Dev { name, .. } + | PubGrubPackageInner::Package { name, .. }, + None, + ) => { // Verify that the package is allowed under the hash-checking policy. if !self.hasher.allows_package(name) { return Err(ResolveError::UnhashedPackage(name.clone())); @@ -611,26 +687,13 @@ impl ResolverState { + ( + PubGrubPackageInner::Marker { name, .. } + | PubGrubPackageInner::Extra { name, .. } + | PubGrubPackageInner::Dev { name, .. } + | PubGrubPackageInner::Package { name, .. }, + Some(url), + ) => { // Verify that the package is allowed under the hash-checking policy. if !self.hasher.allows_url(&url.verbatim) { return Err(ResolveError::UnhashedPackage(name.clone())); @@ -650,6 +713,7 @@ impl ResolverState( packages: impl Iterator)>, + urls: &Urls, request_sink: &Sender, ) -> Result<(), ResolveError> { // Iterate over the potential packages, and fetch file metadata for any of them. These @@ -660,11 +724,15 @@ impl ResolverState ResolverState, pins: &mut FilePins, + fork_urls: &ForkUrls, visited: &mut FxHashSet, request_sink: &Sender, ) -> Result, ResolveError> { - match &**package { - PubGrubPackageInner::Root(_) => { + let url = package.name().and_then(|name| fork_urls.get(name)); + match (&**package, url) { + (PubGrubPackageInner::Root(_), _) => { Ok(Some(ResolverVersion::Available(MIN_VERSION.clone()))) } - PubGrubPackageInner::Python(_) => { + (PubGrubPackageInner::Python(_), _) => { // Dependencies on Python are only added when a package is incompatible; as such, // we don't need to do anything here. + // we don't need to do anything here. Ok(None) } - PubGrubPackageInner::Marker { - name, - url: Some(url), - .. - } - | PubGrubPackageInner::Extra { - name, - url: Some(url), - .. - } - | PubGrubPackageInner::Dev { - name, - url: Some(url), - .. - } - | PubGrubPackageInner::Package { - name, - url: Some(url), - .. - } => { + ( + PubGrubPackageInner::Marker { name, .. } + | PubGrubPackageInner::Extra { name, .. } + | PubGrubPackageInner::Dev { name, .. } + | PubGrubPackageInner::Package { name, .. }, + Some(url), + ) => { debug!( "Searching for a compatible version of {package} @ {} ({range})", url.verbatim @@ -800,18 +858,13 @@ impl ResolverState { + ( + PubGrubPackageInner::Marker { name, .. } + | PubGrubPackageInner::Extra { name, .. } + | PubGrubPackageInner::Dev { name, .. } + | PubGrubPackageInner::Package { name, .. }, + None, + ) => { // Wait for the metadata to be available. let versions_response = self .index @@ -907,11 +960,10 @@ impl ResolverState, ) -> Result { - let result = self.get_dependencies(package, version, markers, priorities, request_sink); + let result = self.get_dependencies(package, version, fork_urls, markers); if self.markers.is_some() { return result.map(|deps| match deps { Dependencies::Available(deps) => ForkedDependencies::Unforked(deps), @@ -927,11 +979,11 @@ impl ResolverState, ) -> Result { - let (dependencies, name) = match &**package { + let url = package.name().and_then(|name| fork_urls.get(name)); + let dependencies = match &**package { PubGrubPackageInner::Root(_) => { let no_dev_deps = BTreeMap::default(); let requirements = self.flatten_requirements( @@ -943,27 +995,18 @@ impl ResolverState, _>>()?; - - (dependencies, None) + .collect::, _>>()? } PubGrubPackageInner::Package { name, extra, dev, marker, - url, } => { // If we're excluding transitive dependencies, short-circuit. if self.dependency_mode.is_direct() { @@ -1083,18 +1126,13 @@ impl ResolverState, _>>()?; // If a package has metadata for an enabled dependency group, // add a dependency from it to the same package with the group // enabled. + if extra.is_none() && dev.is_none() { for group in &self.dev { if !metadata.dev_dependencies.contains_key(group) { @@ -1105,19 +1143,19 @@ impl ResolverState return Ok(Dependencies::Available(Vec::default())), // Add a dependency on both the marker and base package. - PubGrubPackageInner::Marker { name, marker, url } => { + PubGrubPackageInner::Marker { name, marker } => { return Ok(Dependencies::Available( [None, Some(marker)] .into_iter() @@ -1127,9 +1165,9 @@ impl ResolverState ResolverState { return Ok(Dependencies::Available( [None, marker.as_ref()] @@ -1155,9 +1192,9 @@ impl ResolverState ResolverState { + PubGrubPackageInner::Dev { name, dev, marker } => { return Ok(Dependencies::Available( [None, marker.as_ref()] .into_iter() @@ -1185,9 +1217,9 @@ impl ResolverState ResolverState ResolverState {} PubGrubPackageInner::Extra { .. } => {} PubGrubPackageInner::Dev { .. } => {} - PubGrubPackageInner::Package { - name, - url: Some(url), - .. - } => { - reporter.on_progress(name, &VersionOrUrlRef::Url(&url.verbatim)); - } - PubGrubPackageInner::Package { - name, url: None, .. - } => { + PubGrubPackageInner::Package { name, .. } => { reporter.on_progress(name, &VersionOrUrlRef::Version(version)); } } @@ -1603,7 +1610,7 @@ impl ResolverState, version: &Version, + urls: &Urls, dependencies: Vec, + git: &GitResolver, prefetcher: &BatchPrefetcher, ) -> Result<(), ResolveError> { + // Check for self-dependencies. if dependencies .iter() .any(|dependency| dependency.package == self.next) @@ -1667,17 +1685,59 @@ impl SolveState { if enabled!(Level::DEBUG) { prefetcher.log_tried_versions(); } - return Err(PubGrubError::SelfDependency { - package: self.next.clone(), - version: version.clone(), + return Err(ResolveError::from_pubgrub_error( + PubGrubError::SelfDependency { + package: self.next.clone(), + version: version.clone(), + }, + self.fork_urls.clone(), + )); + } + + for dependency in &dependencies { + let PubGrubDependency { + package, + version, + url, + } = dependency; + + // From the [`Requirement`] to [`PubGrubDependency`] conversion, we get a URL if the + // requirement was a URL requirement. `Urls` applies canonicalization to this and + // override URLs to both URL and registry requirements, which we then check for + // conflicts using [`ForkUrl`]. + if let Some(name) = package.name() { + if let Some(url) = urls.get_url(name, url.as_ref(), git)? { + self.fork_urls.insert(name, url, &self.markers)?; + }; } - .into()); + + if let Some(for_package) = for_package { + if self.markers == MarkerTree::And(Vec::new()) { + debug!("Adding transitive dependency for {for_package}: {package}{version}",); + } else { + debug!( + "Adding transitive dependency for {for_package}{{{}}}: {package}{version}", + self.markers + ); + } + } else { + // A dependency from the root package or requirements.txt. + debug!("Adding direct dependency: {package}{version}"); + } + + // Update the package priorities. + self.priorities.insert(package, version, &self.fork_urls); } + self.pubgrub.add_package_version_dependencies( self.next.clone(), version.clone(), dependencies.into_iter().map(|dependency| { - let PubGrubDependency { package, version } = dependency; + let PubGrubDependency { + package, + version, + url: _, + } = dependency; (package, version) }), ); @@ -1839,7 +1899,6 @@ impl SolveState { name, extra, dev, - url, marker: None, } = &*package { @@ -1848,7 +1907,7 @@ impl SolveState { name: name.clone(), extra: extra.clone(), dev: dev.clone(), - url: url.clone(), + url: self.fork_urls.get(name).cloned(), }, FxHashSet::from_iter([version]), )) diff --git a/crates/uv-resolver/src/resolver/urls.rs b/crates/uv-resolver/src/resolver/urls.rs index eb787e60de8f..7a0e64d749f3 100644 --- a/crates/uv-resolver/src/resolver/urls.rs +++ b/crates/uv-resolver/src/resolver/urls.rs @@ -1,22 +1,37 @@ +use std::iter; + use rustc_hash::FxHashMap; use same_file::is_same_file; use tracing::debug; use cache_key::CanonicalUrl; use distribution_types::Verbatim; -use pep508_rs::MarkerEnvironment; -use pypi_types::{ - ParsedArchiveUrl, ParsedDirectoryUrl, ParsedGitUrl, ParsedPathUrl, ParsedUrl, - RequirementSource, VerbatimParsedUrl, -}; +use pep508_rs::{MarkerEnvironment, VerbatimUrl}; +use pypi_types::{ParsedDirectoryUrl, ParsedUrl, VerbatimParsedUrl}; use uv_git::GitResolver; use uv_normalize::PackageName; use crate::{DependencyMode, Manifest, ResolveError}; -/// A map of package names to their associated, required URLs. +/// The URLs that are allowed for packages. +/// +/// These are the URLs used in the root package or by other URL dependencies (including path +/// dependencies). They take precedence over requirements by version (except for the special case +/// where we are in a fork that doesn't use any of the URL(s) used in other forks). Each fork may +/// only use a single URL. +/// +/// This type contains all URLs without checking, the validation happens in +/// [`crate::fork_urls::ForkUrls`]. #[derive(Debug, Default)] -pub(crate) struct Urls(FxHashMap); +pub(crate) struct Urls { + /// URL requirements in overrides. There can only be a single URL per package in overrides + /// (since it replaces all other URLs), and an override URL replaces all requirements and + /// constraints URLs. + overrides: FxHashMap, + /// URLs from regular requirements or from constraints. There can be multiple URLs for the same + /// package as long as they are in different forks. + regular: FxHashMap>, +} impl Urls { pub(crate) fn from_manifest( @@ -25,155 +40,167 @@ impl Urls { git: &GitResolver, dependencies: DependencyMode, ) -> Result { - let mut urls: FxHashMap = FxHashMap::default(); - - // Add all direct requirements and constraints. If there are any conflicts, return an error. - for requirement in manifest.requirements(markers, dependencies) { - match &requirement.source { - RequirementSource::Registry { .. } => {} - RequirementSource::Url { - subdirectory, - location, - url, - } => { - let url = VerbatimParsedUrl { - parsed_url: ParsedUrl::Archive(ParsedArchiveUrl::from_source( - location.clone(), - subdirectory.clone(), - )), - verbatim: url.clone(), - }; - if let Some(previous) = urls.insert(requirement.name.clone(), url.clone()) { - if !Self::same_resource(&previous.parsed_url, &url.parsed_url, git) { - return Err(ResolveError::ConflictingUrlsDirect( - requirement.name.clone(), - previous.verbatim.verbatim().to_string(), - url.verbatim.verbatim().to_string(), - )); - } - } - } - RequirementSource::Path { - install_path, - lock_path, - url, - } => { - let url = VerbatimParsedUrl { - parsed_url: ParsedUrl::Path(ParsedPathUrl::from_source( - install_path.clone(), - lock_path.clone(), - url.to_url(), - )), - verbatim: url.clone(), - }; - if let Some(previous) = urls.insert(requirement.name.clone(), url.clone()) { - if !Self::same_resource(&previous.parsed_url, &url.parsed_url, git) { - return Err(ResolveError::ConflictingUrlsDirect( - requirement.name.clone(), - previous.verbatim.verbatim().to_string(), - url.verbatim.verbatim().to_string(), - )); + let mut urls: FxHashMap> = FxHashMap::default(); + let mut overrides: FxHashMap = FxHashMap::default(); + + // Add all direct regular requirements and constraints URL. + for requirement in manifest.requirements_no_overrides(markers, dependencies) { + let Some(url) = requirement.source.to_verbatim_parsed_url() else { + // Registry requirement + continue; + }; + + let package_urls = urls.entry(requirement.name.clone()).or_default(); + if let Some(package_url) = package_urls + .iter_mut() + .find(|package_url| same_resource(&package_url.parsed_url, &url.parsed_url, git)) + { + // Allow editables to override non-editables. + let previous_editable = package_url.is_editable(); + *package_url = url; + if previous_editable { + if let VerbatimParsedUrl { + parsed_url: ParsedUrl::Directory(ParsedDirectoryUrl { editable, .. }), + verbatim: _, + } = package_url + { + if !*editable { + debug!("Allowing an editable variant of {}", &package_url.verbatim); + *editable = true; } - } - } - RequirementSource::Directory { - install_path, - lock_path, - editable, - url, - } => { - let url = VerbatimParsedUrl { - parsed_url: ParsedUrl::Directory(ParsedDirectoryUrl::from_source( - install_path.clone(), - lock_path.clone(), - *editable, - url.to_url(), - )), - verbatim: url.clone(), }; - match urls.entry(requirement.name.clone()) { - std::collections::hash_map::Entry::Occupied(mut entry) => { - let previous = entry.get(); - if Self::same_resource(&previous.parsed_url, &url.parsed_url, git) { - // Allow editables to override non-editables. - if previous.parsed_url.is_editable() && !editable { - debug!( - "Allowing {} as an editable variant of {}", - &previous.verbatim, url.verbatim - ); - } else { - entry.insert(url.clone()); - } - continue; - } - - return Err(ResolveError::ConflictingUrlsDirect( - requirement.name.clone(), - previous.verbatim.verbatim().to_string(), - url.verbatim.verbatim().to_string(), - )); - } - std::collections::hash_map::Entry::Vacant(entry) => { - entry.insert(url.clone()); - } - } } - RequirementSource::Git { - repository, - reference, - precise, - subdirectory, - url, - } => { - let url = VerbatimParsedUrl { - parsed_url: ParsedUrl::Git(ParsedGitUrl::from_source( - repository.clone(), - reference.clone(), - *precise, - subdirectory.clone(), - )), - verbatim: url.clone(), - }; - if let Some(previous) = urls.insert(requirement.name.clone(), url.clone()) { - if !Self::same_resource(&previous.parsed_url, &url.parsed_url, git) { - return Err(ResolveError::ConflictingUrlsDirect( - requirement.name.clone(), - previous.verbatim.verbatim().to_string(), - url.verbatim.verbatim().to_string(), - )); - } - } + } else { + package_urls.push(url); + } + } + + // Add all URLs from overrides. If there is an override URL, all other URLs from + // requirements and constraints are moot and will be removed. + for requirement in manifest.overrides(markers, dependencies) { + let Some(url) = requirement.source.to_verbatim_parsed_url() else { + // Registry requirement + continue; + }; + // We only clear for non-URL overrides, since e.g. with an override `anyio==0.0.0` and + // a requirements.txt entry `./anyio`, we still use the URL. See + // `allow_recursive_url_local_path_override_constraint`. + urls.remove(&requirement.name); + let previous = overrides.insert(requirement.name.clone(), url.clone()); + if let Some(previous) = previous { + if !same_resource(&previous.parsed_url, &url.parsed_url, git) { + return Err(ResolveError::ConflictingOverrideUrls( + requirement.name.clone(), + previous.verbatim.verbatim().to_string(), + url.verbatim.verbatim().to_string(), + )); } } } - Ok(Self(urls)) + Ok(Self { + regular: urls, + overrides, + }) } - /// Return the [`VerbatimUrl`] associated with the given package name, if any. - pub(crate) fn get(&self, package: &PackageName) -> Option<&VerbatimParsedUrl> { - self.0.get(package) + /// Check and canonicalize the URL of a requirement. + /// + /// If we have a URL override, apply it unconditionally for registry and URL requirements. + /// Otherwise, there are two case: For a URL requirement (`url` isn't `None`), check that the + /// URL is allowed and return its canonical form. For registry requirements, we return `None` + /// if there is no override. + pub(crate) fn get_url<'a>( + &'a self, + name: &'a PackageName, + url: Option<&'a VerbatimParsedUrl>, + git: &'a GitResolver, + ) -> Result, ResolveError> { + if let Some(override_url) = self.get_override(name) { + Ok(Some(override_url)) + } else if let Some(url) = url { + Ok(Some(self.canonicalize_allowed_url( + name, + git, + &url.verbatim, + &url.parsed_url, + )?)) + } else { + Ok(None) + } } - /// Returns `true` if the [`ParsedUrl`] instances point to the same resource. - pub(crate) fn same_resource(a: &ParsedUrl, b: &ParsedUrl, git: &GitResolver) -> bool { - match (a, b) { - (ParsedUrl::Archive(a), ParsedUrl::Archive(b)) => { - a.subdirectory == b.subdirectory - && CanonicalUrl::new(&a.url) == CanonicalUrl::new(&b.url) - } - (ParsedUrl::Git(a), ParsedUrl::Git(b)) => { - a.subdirectory == b.subdirectory && git.same_ref(&a.url, &b.url) - } - (ParsedUrl::Path(a), ParsedUrl::Path(b)) => { - a.install_path == b.install_path - || is_same_file(&a.install_path, &b.install_path).unwrap_or(false) - } - (ParsedUrl::Directory(a), ParsedUrl::Directory(b)) => { - a.install_path == b.install_path - || is_same_file(&a.install_path, &b.install_path).unwrap_or(false) - } - _ => false, + pub(crate) fn any_url(&self, name: &PackageName) -> bool { + self.get_override(name).is_some() || self.get_regular(name).is_some() + } + + /// Return the [`VerbatimUrl`] override for the given package, if any. + fn get_override(&self, package: &PackageName) -> Option<&VerbatimParsedUrl> { + self.overrides.get(package) + } + + /// Return the allowed [`VerbatimUrl`]s for given package from regular requirements and + /// constraints (but not overrides), if any. + /// + /// It's more than one more URL if they are in different forks (or conflict after forking). + fn get_regular(&self, package: &PackageName) -> Option<&[VerbatimParsedUrl]> { + self.regular.get(package).map(Vec::as_slice) + } + + /// Check if a URL is allowed (known), and if so, return its canonical form. + fn canonicalize_allowed_url<'a>( + &'a self, + package_name: &'a PackageName, + git: &'a GitResolver, + verbatim_url: &'a VerbatimUrl, + parsed_url: &'a ParsedUrl, + ) -> Result<&'a VerbatimParsedUrl, ResolveError> { + let Some(expected) = self.get_regular(package_name) else { + return Err(ResolveError::DisallowedUrl( + package_name.clone(), + verbatim_url.to_string(), + )); + }; + + let matching_urls: Vec<_> = expected + .iter() + .filter(|requirement| same_resource(&requirement.parsed_url, parsed_url, git)) + .collect(); + + let [allowed_url] = matching_urls.as_slice() else { + let mut conflicting_urls: Vec<_> = matching_urls + .into_iter() + .map(|parsed_url| parsed_url.verbatim.verbatim().to_string()) + .chain(iter::once(verbatim_url.verbatim().to_string())) + .collect(); + conflicting_urls.sort(); + return Err(ResolveError::ConflictingUrls( + package_name.clone(), + conflicting_urls, + )); + }; + Ok(*allowed_url) + } +} + +/// Returns `true` if the [`ParsedUrl`] instances point to the same resource. +fn same_resource(a: &ParsedUrl, b: &ParsedUrl, git: &GitResolver) -> bool { + match (a, b) { + (ParsedUrl::Archive(a), ParsedUrl::Archive(b)) => { + a.subdirectory == b.subdirectory + && CanonicalUrl::new(&a.url) == CanonicalUrl::new(&b.url) + } + (ParsedUrl::Git(a), ParsedUrl::Git(b)) => { + a.subdirectory == b.subdirectory && git.same_ref(&a.url, &b.url) + } + (ParsedUrl::Path(a), ParsedUrl::Path(b)) => { + a.install_path == b.install_path + || is_same_file(&a.install_path, &b.install_path).unwrap_or(false) + } + (ParsedUrl::Directory(a), ParsedUrl::Directory(b)) => { + a.install_path == b.install_path + || is_same_file(&a.install_path, &b.install_path).unwrap_or(false) } + _ => false, } } diff --git a/crates/uv/tests/branching_urls.rs b/crates/uv/tests/branching_urls.rs new file mode 100644 index 000000000000..0503e9d39e3d --- /dev/null +++ b/crates/uv/tests/branching_urls.rs @@ -0,0 +1,510 @@ +use std::env; +use std::path::Path; + +use anyhow::Result; +use indoc::{formatdoc, indoc}; +use insta::assert_snapshot; + +use crate::common::{uv_snapshot, TestContext}; + +mod common; + +/// Create a stub package `name` in `dir` with the given `pyproject.toml` body. +fn make_project(dir: &Path, name: &str, body: &str) -> Result<()> { + let pyproject_toml = formatdoc! {r#" + [project] + name = "{name}" + version = "0.1.0" + description = "Test package for direct URLs in branches" + requires-python = ">=3.11,<3.13" + {body} + + [build-system] + requires = ["flit_core>=3.8,<4"] + build-backend = "flit_core.buildapi" + "# + }; + fs_err::create_dir_all(dir)?; + fs_err::write(dir.join("pyproject.toml"), pyproject_toml)?; + fs_err::create_dir(dir.join(name))?; + fs_err::write(dir.join(name).join("__init__.py"), "")?; + Ok(()) +} + +/// The root package has diverging URLs for disjoint markers: +/// ```toml +/// dependencies = [ +/// "iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl ; python_version >= '3.12'", +/// "iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl ; python_version < '3.12'", +/// ] +/// ``` +#[test] +fn branching_urls_disjoint() -> Result<()> { + let context = TestContext::new("3.12"); + + let deps = indoc! {r#" + dependencies = [ + # Valid, disjoint split + "iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl ; python_version < '3.12'", + "iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl ; python_version >= '3.12'", + ] + "# }; + make_project(context.temp_dir.path(), "a", deps)?; + + uv_snapshot!(context.filters(), context.lock().arg("--preview").current_dir(&context.temp_dir), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + "### + ); + + Ok(()) +} + +/// The root package has diverging URLs, but their markers are not disjoint: +/// ```toml +/// dependencies = [ +/// "iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl ; python_version >= '3.11'", +/// "iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl ; python_version < '3.12'", +/// ] +/// ``` +#[test] +fn branching_urls_overlapping() -> Result<()> { + let context = TestContext::new("3.12"); + + let deps = indoc! {r#" + dependencies = [ + # Conflicting split + "iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl ; python_version < '3.12'", + "iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl ; python_version >= '3.11'", + ] + "# }; + make_project(context.temp_dir.path(), "a", deps)?; + + uv_snapshot!(context.filters(), context.lock().arg("--preview").current_dir(&context.temp_dir), @r###" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Requirements contain conflicting URLs for package `iniconfig`: + - https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl + - https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl + "### + ); + + Ok(()) +} + +/// The root package has diverging URLs, but transitive dependencies have conflicting URLs. +/// +/// Requirements: +/// ```text +/// a -> anyio (allowed forking urls to force a split) +/// a -> b -> b1 -> https://../iniconfig-1.1.1-py3-none-any.whl +/// a -> b -> b2 -> https://../iniconfig-2.0.0-py3-none-any.whl +/// ``` +#[test] +fn root_package_splits_but_transitive_conflict() -> Result<()> { + let context = TestContext::new("3.12"); + + let deps = indoc! {r#" + dependencies = [ + # Force a split + "anyio==4.3.0 ; python_version >= '3.12'", + "anyio==4.2.0 ; python_version < '3.12'", + "b" + ] + + [tool.uv.sources] + b = { path = "b" } + "# }; + make_project(context.temp_dir.path(), "a", deps)?; + + let deps = indoc! {r#" + dependencies = [ + "b1", + "b2", + ] + + [tool.uv.sources] + b1 = { path = "../b1" } + b2 = { path = "../b2" } + "# }; + make_project(&context.temp_dir.path().join("b"), "b", deps)?; + + let deps = indoc! {r#" + dependencies = [ + "iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl", + ] + "# }; + make_project(&context.temp_dir.path().join("b1"), "b1", deps)?; + + let deps = indoc! {r#" + dependencies = [ + "iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", + ] + "# }; + make_project(&context.temp_dir.path().join("b2"), "b2", deps)?; + + uv_snapshot!(context.filters(), context.lock().arg("--preview").current_dir(&context.temp_dir), @r###" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Requirements contain conflicting URLs for package `iniconfig` in split `python_version < '3.12'`: + - https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl + - https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl + "### + ); + + Ok(()) +} + +/// The root package has diverging URLs, and transitive dependencies through an intermediate +/// package have one URL for each side. +/// +/// Requirements: +/// ```text +/// a -> anyio==4.4.0 ; python_version >= '3.12' +/// a -> anyio==4.3.0 ; python_version < '3.12' +/// a -> b -> b1 ; python_version < '3.12' -> https://../iniconfig-1.1.1-py3-none-any.whl +/// a -> b -> b2 ; python_version >= '3.12' -> https://../iniconfig-2.0.0-py3-none-any.whl +/// ``` +#[test] +fn root_package_splits_transitive_too() -> Result<()> { + let context = TestContext::new("3.12"); + + let deps = indoc! {r#" + dependencies = [ + # Force a split + "anyio==4.3.0 ; python_version >= '3.12'", + "anyio==4.2.0 ; python_version < '3.12'", + "b" + ] + + [tool.uv.sources] + b = { path = "b" } + "# }; + make_project(context.temp_dir.path(), "a", deps)?; + + let deps = indoc! {r#" + dependencies = [ + "b1 ; python_version < '3.12'", + "b2 ; python_version >= '3.12'", + ] + + [tool.uv.sources] + b1 = { path = "../b1" } + b2 = { path = "../b2" } + "# }; + make_project(&context.temp_dir.path().join("b"), "b", deps)?; + + let deps = indoc! {r#" + dependencies = [ + "iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl", + ] + "# }; + make_project(&context.temp_dir.path().join("b1"), "b1", deps)?; + + let deps = indoc! {r#" + dependencies = [ + "iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", + ] + "# }; + make_project(&context.temp_dir.path().join("b2"), "b2", deps)?; + + uv_snapshot!(context.filters(), context.lock().arg("--preview").current_dir(&context.temp_dir), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 10 packages in [TIME] + "### + ); + + Ok(()) +} + +/// The root package has diverging URLs on one package, and other dependencies have one URL +/// for each side. +/// +/// Requirements: +/// ``` +/// a -> anyio==4.4.0 ; python_version >= '3.12' +/// a -> anyio==4.3.0 ; python_version < '3.12' +/// a -> b1 ; python_version < '3.12' -> iniconfig==1.1.1 +/// a -> b2 ; python_version >= '3.12' -> iniconfig==2.0.0 +/// ``` +#[test] +fn root_package_splits_other_dependencies_too() -> Result<()> { + let context = TestContext::new("3.12"); + + let deps = indoc! {r#" + dependencies = [ + # Force a split + "anyio==4.3.0 ; python_version >= '3.12'", + "anyio==4.2.0 ; python_version < '3.12'", + # These two are currently included in both parts of the split. + "b1 ; python_version < '3.12'", + "b2 ; python_version >= '3.12'", + ] + + [tool.uv.sources] + b1 = { path = "b1" } + b2 = { path = "b2" } + "# }; + make_project(context.temp_dir.path(), "a", deps)?; + + let deps = indoc! {r#" + dependencies = [ + "iniconfig==1.1.1", + ] + "# }; + make_project(&context.temp_dir.path().join("b1"), "b1", deps)?; + + let deps = indoc! {r#" + dependencies = [ + "iniconfig==2.0.0" + ] + "# }; + make_project(&context.temp_dir.path().join("b2"), "b2", deps)?; + + uv_snapshot!(context.filters(), context.lock().arg("--preview").current_dir(&context.temp_dir), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 9 packages in [TIME] + "### + ); + + Ok(()) +} + +/// Whether the dependency comes from the registry or a direct URL depends on the branch. +/// +/// ```toml +/// dependencies = [ +/// "iniconfig == 1.1.1 ; python_version < '3.12'", +/// "iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl ; python_version >= '3.12'", +/// ] +/// ``` +#[test] +fn branching_between_registry_and_direct_url() -> Result<()> { + let context = TestContext::new("3.12"); + + let deps = indoc! {r#" + dependencies = [ + "iniconfig == 1.1.1 ; python_version < '3.12'", + "iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl ; python_version >= '3.12'", + ] + "# }; + make_project(context.temp_dir.path(), "a", deps)?; + + uv_snapshot!(context.filters(), context.lock().arg("--preview").current_dir(&context.temp_dir), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + "### + ); + + // We have source dist and wheel for the registry, but only the wheel for the direct URL. + assert_snapshot!(fs_err::read_to_string(context.temp_dir.join("uv.lock"))?, @r###" + version = 1 + requires-python = ">=3.11, <3.13" + + [[distribution]] + name = "a" + version = "0.1.0" + source = "editable+." + sdist = { path = "." } + + [[distribution.dependencies]] + name = "iniconfig" + version = "1.1.1" + source = "registry+https://pypi.org/simple" + marker = "python_version < '3.12'" + + [[distribution.dependencies]] + name = "iniconfig" + version = "2.0.0" + source = "direct+https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl" + marker = "python_version >= '3.12'" + + [[distribution]] + name = "iniconfig" + version = "1.1.1" + source = "registry+https://pypi.org/simple" + sdist = { url = "https://files.pythonhosted.org/packages/23/a2/97899f6bd0e873fed3a7e67ae8d3a08b21799430fb4da15cfedf10d6e2c2/iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32", size = 8104 } + wheels = [{ url = "https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3", size = 4990 }] + + [[distribution]] + name = "iniconfig" + version = "2.0.0" + source = "direct+https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl" + wheels = [{ url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" }] + "###); + + Ok(()) +} + +/// The root package has two different direct URLs for disjoint forks, but they are from different sources. +/// +/// ```toml +/// dependencies = [ +/// "iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl ; python_version < '3.12'", +/// "iniconfig @ git+https://github.com/pytest-dev/iniconfig@93f5930e668c0d1ddf4597e38dd0dea4e2665e7a ; python_version >= '3.12'", +/// ] +/// ``` +#[test] +fn branching_urls_of_different_sources_disjoint() -> Result<()> { + let context = TestContext::new("3.12"); + + let deps = indoc! {r#" + dependencies = [ + # Valid, disjoint split + "iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl ; python_version < '3.12'", + "iniconfig @ git+https://github.com/pytest-dev/iniconfig@93f5930e668c0d1ddf4597e38dd0dea4e2665e7a ; python_version >= '3.12'", + ] + "# }; + make_project(context.temp_dir.path(), "a", deps)?; + + uv_snapshot!(context.filters(), context.lock().arg("--preview").current_dir(&context.temp_dir), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + "### + ); + + // We have source dist and wheel for the registry, but only the wheel for the direct URL. + assert_snapshot!(fs_err::read_to_string(context.temp_dir.join("uv.lock"))?, @r###" + version = 1 + requires-python = ">=3.11, <3.13" + + [[distribution]] + name = "a" + version = "0.1.0" + source = "editable+." + sdist = { path = "." } + + [[distribution.dependencies]] + name = "iniconfig" + version = "1.1.1" + source = "direct+https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl" + marker = "python_version < '3.12'" + + [[distribution.dependencies]] + name = "iniconfig" + version = "2.0.0" + source = "git+https://github.com/pytest-dev/iniconfig?rev=93f5930e668c0d1ddf4597e38dd0dea4e2665e7a#93f5930e668c0d1ddf4597e38dd0dea4e2665e7a" + marker = "python_version >= '3.12'" + + [[distribution]] + name = "iniconfig" + version = "1.1.1" + source = "direct+https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl" + wheels = [{ url = "https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3" }] + + [[distribution]] + name = "iniconfig" + version = "2.0.0" + source = "git+https://github.com/pytest-dev/iniconfig?rev=93f5930e668c0d1ddf4597e38dd0dea4e2665e7a#93f5930e668c0d1ddf4597e38dd0dea4e2665e7a" + sdist = { url = "https://github.com/pytest-dev/iniconfig?rev=93f5930e668c0d1ddf4597e38dd0dea4e2665e7a#93f5930e668c0d1ddf4597e38dd0dea4e2665e7a" } + "###); + + Ok(()) +} + +/// The root package has two different direct URLs from different sources, but they are not +/// disjoint. +/// +/// ```toml +/// dependencies = [ +/// "iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl ; python_version < '3.12'", +/// "iniconfig @ git+https://github.com/pytest-dev/iniconfig@93f5930e668c0d1ddf4597e38dd0dea4e2665e7a ; python_version >= '3.12'", +/// ] +/// ``` +#[test] +fn branching_urls_of_different_sources_conflict() -> Result<()> { + let context = TestContext::new("3.12"); + + let deps = indoc! {r#" + dependencies = [ + # Conflicting split + "iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl ; python_version < '3.12'", + "iniconfig @ git+https://github.com/pytest-dev/iniconfig@93f5930e668c0d1ddf4597e38dd0dea4e2665e7a ; python_version >= '3.11'", + ] + "# }; + make_project(context.temp_dir.path(), "a", deps)?; + + uv_snapshot!(context.filters(), context.lock().arg("--preview").current_dir(&context.temp_dir), @r###" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Requirements contain conflicting URLs for package `iniconfig`: + - git+https://github.com/pytest-dev/iniconfig@93f5930e668c0d1ddf4597e38dd0dea4e2665e7a + - https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl + "### + ); + + Ok(()) +} + +/// Ensure that we don't pre-visit package with URLs. +#[test] +fn dont_previsit_url_packages() -> Result<()> { + let context = TestContext::new("3.12"); + + let deps = indoc! {r#" + dependencies = [ + # This c is not a registry distribution, we must not pre-visit it as such. + "c==0.1.0", + "b", + ] + + [tool.uv.sources] + b = { path = "b" } + "# }; + make_project(context.temp_dir.path(), "a", deps)?; + + let deps = indoc! {r#" + dependencies = [ + "c", + ] + + [tool.uv.sources] + c = { path = "../c" } + "# }; + make_project(&context.temp_dir.join("b"), "b", deps)?; + let deps = indoc! {r" + dependencies = [] + " }; + make_project(&context.temp_dir.join("c"), "c", deps)?; + + uv_snapshot!(context.filters(), context.lock().arg("--preview").arg("--offline").current_dir(&context.temp_dir), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + "### + ); + + Ok(()) +} diff --git a/crates/uv/tests/pip_compile.rs b/crates/uv/tests/pip_compile.rs index 02f89cf779f6..5daffafe49dc 100644 --- a/crates/uv/tests/pip_compile.rs +++ b/crates/uv/tests/pip_compile.rs @@ -1940,8 +1940,8 @@ fn conflicting_repeated_url_dependency() -> Result<()> { ----- stderr ----- error: Requirements contain conflicting URLs for package `uv-public-pypackage`: - - git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2 - git+https://github.com/astral-test/uv-public-pypackage.git@0.0.1 + - git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2 "### ); @@ -8496,6 +8496,8 @@ requires-python = ">3.8" /// Allow URL dependencies recursively for local source trees, but respect both overrides _and_ /// constraints. +/// +/// We have app -> lib -> anyio and root has a directory requirement on app. #[test] fn allow_recursive_url_local_path_override_constraint() -> Result<()> { let context = TestContext::new("3.12");