Expand uv workspace metadata with dependency information from the lock (#18356)

## Summary

This expands `uv workspace metadata` with many of the fields that are
found in `uv.lock` so that we have a format with information about the
dependency graph/resolution that we're willing to call stable and have
people rely upon (rather than `uv.lock` which we'd rather you don't try
to interpret).

To a first approximation you can think of this as "uv.lock but
serialized to json" but with the fields a bit more limited for now (easy
to add later).

The biggest intentional divergence with uv.lock is that we favour
encoding the dependency graph in a form that looks more like our
internal "resolve" graph, in that hopes that it will simplify the work
of anyone doing analysis on the graph (we structure our internal graph
like this for a reason).

Specifically, the `resolve` field contains the entire dependency graph,
with packages desugarred into several different nodes. There are 4 kinds
of nodes (really 3, the build nodes will only be introduced when we
establish build-dependency locking):

* packages: `mypackage==1.0.0 @ registry+https://pypi.org/simple`
* extras: `mypackage[myextra]==1.0.0 @ registry+https://pypi.org/simple`
* groups: `mypackage:mygroup==1.0.0 @ registry+https://pypi.org/simple`
* build:    `mypackage(build)==1.0.0 @ registry+https://pypi.org/simple`

package nodes hold additional metadata about the package itself, and ids
of the associated extra/group/build nodes.

---

A package like this:

```toml
[project]
name = "mypackage"
version = "1.0.0"

dependencies = ["httpx"]

[project.optional-dependencies]
cli = ["rich"]

[dependency-groups]
dev = ["typing-extensions"]

[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
```

will get 4 nodes with the following edges (Version and Source omitted
here for brevity):
* `mypackage`
  * `httpx`
* `mypackage(build)`
  * `hatchling`
* `mypackage[cli]`
  * `mypackage`
  * `rich`
* `mypackage:dev`
  * `typing-extensions`
  
Note that `mypackage[cli]` has a dependency edge on `mypackage` while
`mypackage:dev` does not. This is because
`mypackage[cli]` is fundamentally an augmentation of `mypackage` while
`mypackage:dev` is just a list of packages that happens to be defined by
`mypackage`'s pyproject.toml.
 
 The resulting nodes for `mypackage` will look something like:
 
 <details>
 <summary>json blob</summary>
 
```json
{
  "resolve": {
    "mypackage==1.0.0 @ editable+.": {
      "name": "mypackage",
      "version": "1.0.0",
      "source": {
        "editable": "."
      },
      "kind": "package",
      "dependencies": [
        {
          "id": "httpx==3.6 @ registry+https://pypi.org/simple"
          "marker": "sys_platform == 'linux'"
        },
      ],
      "optional_dependencies": [
        {
          "name": "cli",
          "id": "mypackage[cli]==1.0.0 @ editable+."
        },
      ],
      "dependency_groups": [
        {
          "name": "dev",
          "id": "mypackage:dev==1.0.0 @ editable+."
        }
      ]
      "build_system": {
        "build_backend": "hatchling.build",
        "id": "mypackage(build)==1.0.0 @ editable+."
      }
      "sdist": { ... },
      "wheels": [ ... ]
    },
    "mypackage:dev==1.0.0 @ editable+.": {
        "name": "mypackage",
        "version": "1.0.0",
        "source": {
          "editable": "."
        },
        "kind": {
          "group": "dev"
        },
        "dependencies": [
          {
            "id": "typing-extensions==1.2.3 @ registry+https://pypi.org/simple"
          },
        ]
      },
   }
   "mypackage[cli]==1.0.0 @ editable+.": {
      "name": "mypackage",
      "version": "1.0.0",
      "source": {
        "editable": "."
      },
      "kind": {
        "extra": "cli"
      },
      "dependencies": [
        {
          "id": "rich==2.2.3 @ registry+https://pypi.org/simple"
        },
        {
          "id": "mypackage==1.0.0 @ editable+."
        },
      ]
    },
    "mypackage(build)==1.0.0 @ editable+.": {
      "name": "mypackage",
      "version": "1.0.0",
      "source": {
        "editable": "."
      },
      "kind": "build",
      "dependencies": [
        {
          "id": "hatchling==3.2.3 @ registry+https://pypi.org/simple"
        },
      ]
    }
  }
}
```

</details>

## Test Plan

Snapshots
This commit is contained in:
Aria Desires
2026-03-27 09:22:03 -04:00
committed by GitHub
parent a4ee36e7d3
commit 202e0f0831
20 changed files with 2314 additions and 103 deletions
Generated
+1
View File
@@ -7154,6 +7154,7 @@ dependencies = [
"same-file",
"schemars",
"serde",
"serde_json",
"smallvec",
"textwrap",
"thiserror 2.0.18",
+49 -3
View File
@@ -7884,7 +7884,7 @@ pub enum WorkspaceCommand {
/// View metadata about the current workspace.
///
/// The output of this command is not yet stable.
Metadata(MetadataArgs),
Metadata(Box<MetadataArgs>),
/// Display the path of a workspace member.
///
/// By default, the path to the workspace root directory is displayed.
@@ -7898,9 +7898,55 @@ pub enum WorkspaceCommand {
#[command(hide = true)]
List(WorkspaceListArgs),
}
#[derive(Args)]
pub struct MetadataArgs {
/// Check if the lockfile is up-to-date [env: UV_LOCKED=]
///
/// Asserts that the `uv.lock` would remain unchanged after a resolution. If the lockfile is
/// missing or needs to be updated, uv will exit with an error.
#[arg(long, conflicts_with_all = ["frozen", "upgrade"])]
pub locked: bool,
#[derive(Args, Debug)]
pub struct MetadataArgs;
/// Assert that a `uv.lock` exists without checking if it is up-to-date [env: UV_FROZEN=]
#[arg(long, conflicts_with_all = ["locked"])]
pub frozen: bool,
/// Perform a dry run, without writing the lockfile.
///
/// In dry-run mode, uv will resolve the project's dependencies and report on the resulting
/// changes, but will not write the lockfile to disk.
#[arg(long, conflicts_with = "frozen", conflicts_with = "locked")]
pub dry_run: bool,
#[command(flatten)]
pub resolver: ResolverArgs,
#[command(flatten)]
pub build: BuildOptionsArgs,
#[command(flatten)]
pub refresh: RefreshArgs,
/// The Python interpreter to use during resolution.
///
/// A Python interpreter is required for building source distributions to determine package
/// metadata when there are not wheels.
///
/// The interpreter is also used as the fallback value for the minimum Python version if
/// `requires-python` is not set.
///
/// See `uv help python` for details on Python discovery and supported request formats.
#[arg(
long,
short,
env = EnvVars::UV_PYTHON,
verbatim_doc_comment,
help_heading = "Python options",
value_parser = parse_maybe_string,
value_hint = ValueHint::Other,
)]
pub python: Option<Maybe<String>>,
}
#[derive(Args, Debug)]
pub struct WorkspaceDirArgs {
+1
View File
@@ -66,6 +66,7 @@ rustc-hash = { workspace = true }
same-file = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true }
serde_json = { workspace = true }
smallvec = { workspace = true }
textwrap = { workspace = true }
thiserror = { workspace = true }
+1 -1
View File
@@ -9,7 +9,7 @@ pub use exclusions::Exclusions;
pub use flat_index::{FlatDistributions, FlatIndex};
pub use fork_strategy::ForkStrategy;
pub use lock::{
Installable, Lock, LockError, LockVersion, Package, PackageMap, PylockToml,
Installable, Lock, LockError, LockVersion, Metadata, Package, PackageMap, PylockToml,
PylockTomlErrorKind, RequirementsTxtExport, ResolverManifest, SatisfiesResult, TreeDisplay,
VERSION, cyclonedx_json,
};
@@ -0,0 +1,831 @@
use std::collections::BTreeMap;
use std::fmt::Display;
use uv_distribution_filename::WheelFilename;
use uv_distribution_types::{RequiresPython, UrlString};
use uv_fs::PortablePathBuf;
use uv_normalize::{ExtraName, GroupName, PackageName};
use uv_pep440::Version;
use uv_pypi_types::{ConflictItem, ConflictKind, ConflictSet, Conflicts};
use uv_workspace::Workspace;
use crate::Lock;
use crate::lock::{
Dependency, DirectSource, PackageId, RegistrySource, Source, SourceDist, SourceDistMetadata,
Wheel, WheelWireSource, ZstdWheel,
};
#[derive(Debug, thiserror::Error)]
enum MetadataErrorKind {
#[error(transparent)]
Serialize(#[from] serde_json::error::Error),
}
#[derive(Debug)]
pub struct MetadataError {
kind: Box<MetadataErrorKind>,
}
impl std::error::Error for MetadataError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
self.kind.source()
}
}
impl std::fmt::Display for MetadataError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.kind)?;
Ok(())
}
}
impl<E> From<E> for MetadataError
where
MetadataErrorKind: From<E>,
{
fn from(err: E) -> Self {
Self {
kind: Box::new(MetadataErrorKind::from(err)),
}
}
}
/// The full `uv workspace metadata` JSON object
#[derive(Debug, serde::Serialize)]
pub struct Metadata {
/// Format information
schema: SchemaReport,
/// Absolute path to the workspace root
///
/// Ideally absolute paths to things that are found in subdirs of this should have exactly
/// this as a prefix so it can be stripped to get relative paths if one wants.
workspace_root: PortablePathBuf,
/// The version of python required by the workspace
///
/// Every `marker` we emit implicitly assumes this constraint to keep things clean
requires_python: RequiresPython,
/// Info about conflicting packages
conflicts: MetadataConflicts,
/// An index of which nodes are workspace members
///
/// These entries are often what you should use as the entry-points into the `resolve` graph.
#[serde(skip_serializing_if = "Vec::is_empty", default)]
members: Vec<MetadataWorkspaceMember>,
/// The dependency graph
#[serde(skip_serializing_if = "BTreeMap::is_empty", default)]
resolution: BTreeMap<MetadataNodeIdFlat, MetadataNode>,
}
/// The schema version for the metadata report.
#[derive(serde::Serialize, Debug, Default)]
#[serde(rename_all = "snake_case")]
enum SchemaVersion {
/// An unstable, experimental schema.
#[default]
Preview,
}
/// The schema metadata for the metadata report.
#[derive(serde::Serialize, Debug, Default)]
struct SchemaReport {
/// The version of the schema.
version: SchemaVersion,
}
/// Info for looking up workspace members, most information is stored in the node behind `id`
#[derive(Debug, serde::Serialize)]
struct MetadataWorkspaceMember {
/// Package name
name: PackageName,
/// Absolute path to the member
path: PortablePathBuf,
/// Key for the package's node in the `resolve` graph
id: MetadataNodeIdFlat,
}
/// A node in the dependency graph
///
/// There are 4 kinds of nodes:
///
/// * packages: `mypackage==1.0.0@registry+https://pypi.org/simple`
/// * extras: `mypackage[myextra]==1.0.0@registry+https://pypi.org/simple`
/// * groups: `mypackage:mygroup==1.0.0@registry+https://pypi.org/simple`
/// * build: `mypackage(build)==1.0.0@registry+https://pypi.org/simple`
///
/// -----------
///
/// A package like this:
///
/// ```toml
/// [project]
/// name = "mypackage"
/// version = 1.0.0
///
/// dependencies = ["httpx"]
///
/// [project.optional-dependencies]
/// cli = ["rich"]
///
/// [dependency-groups]
/// dev = ["typing-extensions"]
///
/// [build-system]
/// requires = ["hatchling"]
/// ```
///
/// will get 4 nodes with the following edges (Version and Source omitted here for brevity):
///
/// * `mypackage`
/// * `httpx`
/// * `mypackage(build)`
/// * `hatchling`
/// * `mypackage[cli]`
/// * `mypackage`
/// * `rich`
/// * `mypackage:dev`
/// * `typing-extensions`
///
/// Note that `mypackage[cli]` has a dependency edge on `mypackage` while `mypackage:dev` does not.
/// This is because `mypackage[cli]` is fundamentally an augmentation of `mypackage` while `mypackage:dev`
/// is just a list of packages that happens to be defined by `mypackage`'s pyproject.toml.
#[derive(Debug, Clone, serde::Serialize)]
struct MetadataNode {
/// A unique id for this node that will be used to refer to it
#[serde(flatten)]
id: MetadataNodeId,
/// Dependencies of this node (the edges of The Graph)
dependencies: Vec<MetadataDependency>,
/// Extras
#[serde(skip_serializing_if = "Vec::is_empty", default)]
optional_dependencies: Vec<MetadataExtra>,
/// Groups
#[serde(skip_serializing_if = "Vec::is_empty", default)]
dependency_groups: Vec<MetadataGroup>,
/// Info about building the package
#[serde(skip_serializing_if = "Option::is_none", default)]
build_system: Option<MetadataBuildSystem>,
/// The source distribution found
#[serde(skip_serializing_if = "Option::is_none", default)]
sdist: Option<MetadataSourceDist>,
/// Wheels we found
#[serde(skip_serializing_if = "Vec::is_empty", default)]
wheels: Vec<MetadataWheel>,
}
impl MetadataNode {
fn new(id: MetadataNodeId) -> Self {
Self {
id,
dependencies: Vec::new(),
dependency_groups: Vec::new(),
optional_dependencies: Vec::new(),
wheels: Vec::new(),
build_system: None,
sdist: None,
}
}
fn from_package_id(
workspace_root: &PortablePathBuf,
id: &PackageId,
kind: MetadataNodeKind,
) -> Self {
Self::new(MetadataNodeId::from_package_id(workspace_root, id, kind))
}
fn add_dependency(&mut self, workspace_root: &PortablePathBuf, dependency: &Dependency) {
let extras = dependency.extra();
if extras.is_empty() {
let id = MetadataNodeId::from_package_id(
workspace_root,
&dependency.package_id,
MetadataNodeKind::Package,
);
self.dependencies.push(MetadataDependency {
id: id.to_flat(),
marker: dependency.simplified_marker.try_to_string(),
});
return;
}
for extra in extras {
let id = MetadataNodeId::from_package_id(
workspace_root,
&dependency.package_id,
MetadataNodeKind::Extra(extra.clone()),
);
self.dependencies.push(MetadataDependency {
id: id.to_flat(),
marker: dependency.simplified_marker.try_to_string(),
});
}
}
}
/// The unique key for every node in the graph
///
/// (It's not entirely clear to me that two nodes can differ only by `source` but it doesn't hurt.)
#[derive(Debug, Clone, serde::Serialize)]
struct MetadataNodeId {
/// The name of the package
name: PackageName,
/// The version of the package, if any could be found (source trees may have no version)
#[serde(skip_serializing_if = "Option::is_none", default)]
version: Option<Version>,
/// The source of the package (directory, registry, URL...)
source: MetadataSource,
/// What kind of node is this?
kind: MetadataNodeKind,
}
/// This is intended to be an opaque unique id for referring to a node
///
/// It's human readable for convenience but parsing it or relying on it is inadvisable.
/// As currently implemented this is just a concatenation of the 4 fields in `MetadataNodeId`
/// which every node includes, so parsing it is just making more work for yourself.
type MetadataNodeIdFlat = String;
impl MetadataNodeId {
fn from_package_id(
workspace_root: &PortablePathBuf,
id: &PackageId,
kind: MetadataNodeKind,
) -> Self {
let name = id.name.clone();
let version = id.version.clone();
let source = MetadataSource::from_source(workspace_root, id.source.clone());
Self {
name,
version,
source,
kind,
}
}
fn to_flat(&self) -> MetadataNodeIdFlat {
self.to_string()
}
}
impl Display for MetadataNodeId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.version {
Some(version) => write!(f, "{}{}=={version}@{}", self.name, self.kind, self.source),
None => write!(f, "{}{}@{}", self.name, self.kind, self.source),
}
}
}
#[derive(Debug, Clone, serde::Serialize)]
struct MetadataDependency {
id: MetadataNodeIdFlat,
#[serde(skip_serializing_if = "Option::is_none", default)]
marker: Option<MetadataMarker>,
}
type MetadataMarker = String;
/// The kind a node can have in the dependency graph
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, serde::Serialize)]
#[serde(rename_all = "snake_case")]
enum MetadataNodeKind {
/// The node is the package itself
/// its edges are `project.dependencies`
Package,
/// The node is for building the package's sdist into a wheel
/// its edges are `build-system.requires`
#[expect(dead_code)]
Build,
/// The node is for an extra defined on the package
/// its edges are `project.optional-dependencies.myextra`
Extra(ExtraName),
/// The node is for a dependency-group defined on the package
/// its edges are `dependency-groups.mygroup`
Group(GroupName),
}
impl Display for MetadataNodeKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
// Don't apply any special decoration, this is the default
Self::Package => Ok(()),
Self::Build => f.write_str("(build)"),
Self::Extra(extra_name) => write!(f, "[{extra_name}]"),
Self::Group(group_name) => write!(f, ":{group_name}"),
}
}
}
#[derive(Clone, Debug, serde::Serialize)]
#[serde(untagged, rename_all = "snake_case")]
enum MetadataSource {
Registry {
registry: MetadataRegistrySource,
},
Git {
git: UrlString,
},
Direct {
url: UrlString,
subdirectory: Option<PortablePathBuf>,
},
Path {
path: PortablePathBuf,
},
Directory {
directory: PortablePathBuf,
},
Editable {
editable: PortablePathBuf,
},
Virtual {
r#virtual: PortablePathBuf,
},
}
impl Display for MetadataSource {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Self::Registry {
registry: MetadataRegistrySource::Url(url),
}
| Self::Git { git: url }
| Self::Direct { url, .. } => {
write!(f, "{}+{}", self.name(), url)
}
Self::Registry {
registry: MetadataRegistrySource::Path(path),
}
| Self::Path { path }
| Self::Directory { directory: path }
| Self::Editable { editable: path }
| Self::Virtual { r#virtual: path } => {
write!(f, "{}+{}", self.name(), path)
}
}
}
}
impl MetadataSource {
fn name(&self) -> &str {
match self {
Self::Registry { .. } => "registry",
Self::Git { .. } => "git",
Self::Direct { .. } => "direct",
Self::Path { .. } => "path",
Self::Directory { .. } => "directory",
Self::Editable { .. } => "editable",
Self::Virtual { .. } => "virtual",
}
}
}
impl MetadataSource {
fn from_source(workspace_root: &PortablePathBuf, source: Source) -> Self {
match source {
Source::Registry(source) => match source {
RegistrySource::Url(url) => Self::Registry {
registry: MetadataRegistrySource::Url(url),
},
RegistrySource::Path(path) => Self::Registry {
registry: MetadataRegistrySource::Path(normalize_workspace_relative_path(
workspace_root,
&path,
)),
},
},
Source::Git(url, _) => Self::Git { git: url },
Source::Direct(url, DirectSource { subdirectory }) => Self::Direct {
url,
subdirectory: subdirectory
.map(|path| normalize_workspace_relative_path(workspace_root, &path)),
},
Source::Path(path) => Self::Path {
path: normalize_workspace_relative_path(workspace_root, &path),
},
Source::Directory(path) => Self::Directory {
directory: normalize_workspace_relative_path(workspace_root, &path),
},
Source::Editable(path) => Self::Editable {
editable: normalize_workspace_relative_path(workspace_root, &path),
},
Source::Virtual(path) => Self::Virtual {
r#virtual: normalize_workspace_relative_path(workspace_root, &path),
},
}
}
}
fn normalize_workspace_relative_path(
workspace_root: &PortablePathBuf,
maybe_rel: &std::path::Path,
) -> PortablePathBuf {
if maybe_rel.is_absolute() {
PortablePathBuf::from(maybe_rel)
} else {
PortablePathBuf::from(workspace_root.as_ref().join(maybe_rel).as_path())
}
}
#[derive(Clone, Debug, serde::Serialize)]
#[serde(rename_all = "snake_case")]
enum MetadataRegistrySource {
/// Ex) `https://pypi.org/simple`
Url(UrlString),
/// Ex) `/path/to/local/index`
Path(PortablePathBuf),
}
#[derive(Clone, Debug, serde::Serialize)]
#[serde(untagged, rename_all = "snake_case")]
enum MetadataSourceDist {
Url {
url: UrlString,
#[serde(flatten)]
metadata: MetadataSourceDistMetadata,
},
Path {
path: PortablePathBuf,
#[serde(flatten)]
metadata: MetadataSourceDistMetadata,
},
Metadata {
#[serde(flatten)]
metadata: MetadataSourceDistMetadata,
},
}
impl MetadataSourceDist {
fn from_sdist(workspace_root: &PortablePathBuf, sdist: &SourceDist) -> Self {
match sdist {
SourceDist::Url { url, metadata } => Self::Url {
url: url.clone(),
metadata: MetadataSourceDistMetadata::from_sdist(metadata),
},
SourceDist::Path { path, metadata } => Self::Path {
path: normalize_workspace_relative_path(workspace_root, path),
metadata: MetadataSourceDistMetadata::from_sdist(metadata),
},
SourceDist::Metadata { metadata } => Self::Metadata {
metadata: MetadataSourceDistMetadata::from_sdist(metadata),
},
}
}
}
#[derive(Clone, Debug, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct MetadataSourceDistMetadata {
/// A hash of the source distribution.
#[serde(skip_serializing_if = "BTreeMap::is_empty", default)]
hashes: BTreeMap<HashAlgorithm, Hash>,
/// The size of the source distribution in bytes.
///
/// This is only present for source distributions that come from registries.
#[serde(skip_serializing_if = "Option::is_none", default)]
size: Option<u64>,
/// The upload time of the source distribution.
#[serde(skip_serializing_if = "Option::is_none", default)]
upload_time: Option<jiff::Timestamp>,
}
/// The name of a hash algorithm ("sha256", "blake2b", "md5", etc)
type HashAlgorithm = String;
/// A hex encoded digest of the file
type Hash = String;
/// Oh you wanted a hash map? No this is the hashes map, a sorted map of hashes!
///
/// We prefer matching PEP 691 (JSON-based Simple API for Python) here for future-proofing
/// and convenience of consumption.
fn hashes_map(hash: &crate::lock::Hash) -> BTreeMap<HashAlgorithm, Hash> {
Some((hash.0.algorithm.to_string(), hash.0.digest.to_string()))
.into_iter()
.collect()
}
impl MetadataSourceDistMetadata {
fn from_sdist(sdist: &SourceDistMetadata) -> Self {
Self {
hashes: sdist.hash.as_ref().map(hashes_map).unwrap_or_default(),
size: sdist.size,
upload_time: sdist.upload_time,
}
}
}
#[derive(Clone, Debug, serde::Serialize)]
struct MetadataWheel {
/// A URL or file path (via `file://`) where the wheel that was locked
/// against was found. The location does not need to exist in the future,
/// so this should be treated as only a hint to where to look and/or
/// recording where the wheel file originally came from.
#[serde(flatten)]
source: Option<MetadataWheelWireSource>,
/// A hash of the built distribution.
///
/// This is only present for wheels that come from registries and direct
/// URLs. Wheels from git or path dependencies do not have hashes
/// associated with them.
#[serde(skip_serializing_if = "BTreeMap::is_empty", default)]
hashes: BTreeMap<HashAlgorithm, Hash>,
/// The size of the built distribution in bytes.
///
/// This is only present for wheels that come from registries.
#[serde(skip_serializing_if = "Option::is_none", default)]
size: Option<u64>,
/// The upload time of the built distribution.
///
/// This is only present for wheels that come from registries.
#[serde(skip_serializing_if = "Option::is_none", default)]
upload_time: Option<jiff::Timestamp>,
/// The filename of the wheel.
///
/// This isn't part of the wire format since it's redundant with the
/// URL. But we do use it for various things, and thus compute it at
/// deserialization time. Not being able to extract a wheel filename from a
/// wheel URL is thus a deserialization error.
filename: WheelFilename,
/// The zstandard-compressed wheel metadata, if any.
#[serde(skip_serializing_if = "Option::is_none", default)]
zstd: Option<MetadataZstdWheel>,
}
impl MetadataWheel {
fn from_wheel(workspace_root: &PortablePathBuf, wheel: &Wheel) -> Self {
Self {
source: MetadataWheelWireSource::from_wheel(workspace_root, &wheel.url),
hashes: wheel.hash.as_ref().map(hashes_map).unwrap_or_default(),
size: wheel.size,
upload_time: wheel.upload_time,
filename: wheel.filename.clone(),
zstd: wheel.zstd.as_ref().map(MetadataZstdWheel::from_wheel),
}
}
}
#[derive(Clone, Debug, serde::Serialize)]
#[serde(untagged, rename_all = "snake_case")]
enum MetadataWheelWireSource {
Url { url: UrlString },
Path { path: PortablePathBuf },
}
impl MetadataWheelWireSource {
fn from_wheel(workspace_root: &PortablePathBuf, wheel: &WheelWireSource) -> Option<Self> {
match wheel {
WheelWireSource::Url { url } => Some(Self::Url { url: url.clone() }),
WheelWireSource::Path { path } => Some(Self::Path {
path: normalize_workspace_relative_path(workspace_root, path),
}),
// We guarantee this as a separate field so it's redundant
WheelWireSource::Filename { .. } => None,
}
}
}
#[derive(Clone, Debug, serde::Serialize)]
struct MetadataZstdWheel {
#[serde(skip_serializing_if = "BTreeMap::is_empty", default)]
hashes: BTreeMap<HashAlgorithm, Hash>,
#[serde(skip_serializing_if = "Option::is_none", default)]
size: Option<u64>,
}
impl MetadataZstdWheel {
fn from_wheel(wheel: &ZstdWheel) -> Self {
Self {
hashes: wheel.hash.as_ref().map(hashes_map).unwrap_or_default(),
size: wheel.size,
}
}
}
#[derive(Clone, Debug, serde::Serialize)]
struct MetadataExtra {
name: ExtraName,
id: MetadataNodeIdFlat,
}
#[derive(Clone, Debug, serde::Serialize)]
struct MetadataGroup {
name: GroupName,
id: MetadataNodeIdFlat,
}
#[derive(Clone, Debug, serde::Serialize)]
struct MetadataBuildSystem {
/// The `build-backend` specified in the pyproject.toml
build_backend: String,
id: MetadataNodeIdFlat,
}
/// Conflicts
#[derive(Clone, Debug, serde::Serialize)]
struct MetadataConflicts {
sets: Vec<MetadataConflictSet>,
}
impl MetadataConflicts {
fn from_conflicts(
members: &[MetadataWorkspaceMember],
resolve: &BTreeMap<MetadataNodeIdFlat, MetadataNode>,
conflicts: &Conflicts,
) -> Self {
Self {
sets: conflicts
.iter()
.map(|set| MetadataConflictSet::from_conflicts(members, resolve, set))
.collect(),
}
}
}
#[derive(Clone, Debug, serde::Serialize)]
struct MetadataConflictSet {
items: Vec<MetadataConflictItem>,
}
impl MetadataConflictSet {
fn from_conflicts(
members: &[MetadataWorkspaceMember],
resolve: &BTreeMap<MetadataNodeIdFlat, MetadataNode>,
set: &ConflictSet,
) -> Self {
Self {
items: set
.iter()
.map(|item| MetadataConflictItem::from_conflicts(members, resolve, item))
.collect(),
}
}
}
#[derive(Clone, Debug, serde::Serialize)]
struct MetadataConflictItem {
/// These should always be names of packages referred to in [`Metadata::members`]
package: PackageName,
kind: MetadataConflictKind,
/// This should never be None (should be a validation error way earlier in uv)
/// ...but I'd rather not error if wrong.
id: Option<MetadataNodeIdFlat>,
}
impl MetadataConflictItem {
fn from_conflicts(
members: &[MetadataWorkspaceMember],
resolve: &BTreeMap<MetadataNodeIdFlat, MetadataNode>,
item: &ConflictItem,
) -> Self {
let kind = MetadataConflictKind::from_conflicts(item.kind());
let id = members
.iter()
.find(|member| &member.name == item.package())
.and_then(|member| {
let package_node = resolve.get(&member.id)?;
let id = MetadataNodeId {
kind: kind.to_node_kind(),
..package_node.id.clone()
};
Some(id.to_flat())
});
Self {
package: item.package().clone(),
kind,
id,
}
}
}
#[derive(Clone, Debug, serde::Serialize)]
enum MetadataConflictKind {
Group(GroupName),
Extra(ExtraName),
Project,
}
impl MetadataConflictKind {
fn from_conflicts(item: &ConflictKind) -> Self {
match item {
ConflictKind::Extra(name) => Self::Extra(name.clone()),
ConflictKind::Group(name) => Self::Group(name.clone()),
ConflictKind::Project => Self::Project,
}
}
fn to_node_kind(&self) -> MetadataNodeKind {
match self {
Self::Group(name) => MetadataNodeKind::Group(name.clone()),
Self::Extra(name) => MetadataNodeKind::Extra(name.clone()),
Self::Project => MetadataNodeKind::Package,
}
}
}
impl Metadata {
/// Construct a [`PylockToml`] from a uv lockfile.
pub fn from_lock(workspace: &Workspace, lock: &Lock) -> Result<Self, MetadataError> {
let mut resolve = BTreeMap::new();
let mut members = Vec::new();
let workspace_root = PortablePathBuf::from(workspace.install_path().as_path());
for lock_package in lock.packages() {
let mut meta_package = MetadataNode::from_package_id(
&workspace_root,
&lock_package.id,
MetadataNodeKind::Package,
);
// Direct dependencies go on the package node
for dependency in &lock_package.dependencies {
meta_package.add_dependency(&workspace_root, dependency);
}
// Extras get their own nodes
for (extra, dependencies) in &lock_package.optional_dependencies {
let mut meta_extra = MetadataNode::from_package_id(
&workspace_root,
&lock_package.id,
MetadataNodeKind::Extra(extra.clone()),
);
// Extras always depend on the base package
meta_extra.dependencies.push(MetadataDependency {
id: meta_package.id.to_flat(),
marker: None,
});
for dependency in dependencies {
meta_extra.add_dependency(&workspace_root, dependency);
}
meta_package.optional_dependencies.push(MetadataExtra {
name: extra.clone(),
id: meta_extra.id.to_flat(),
});
resolve.insert(meta_extra.id.to_flat(), meta_extra);
}
// Groups get their own nodes
for (group, dependencies) in &lock_package.dependency_groups {
let mut meta_group = MetadataNode::from_package_id(
&workspace_root,
&lock_package.id,
MetadataNodeKind::Group(group.clone()),
);
// Groups *do not* depend on the base package, so don't add that
for dependency in dependencies {
meta_group.add_dependency(&workspace_root, dependency);
}
meta_package.dependency_groups.push(MetadataGroup {
name: group.clone(),
id: meta_group.id.to_flat(),
});
resolve.insert(meta_group.id.to_flat(), meta_group);
}
// Register this package if it appears to be a workspace member
if let Some(workspace_package) = workspace.packages().get(lock_package.name()) {
let member = MetadataWorkspaceMember {
name: meta_package.id.name.clone(),
path: normalize_workspace_relative_path(
&workspace_root,
workspace_package.root().as_path(),
),
id: meta_package.id.to_flat(),
};
members.push(member);
}
// Record sdist/wheel information
if let Some(sdist) = &lock_package.sdist {
meta_package.sdist = Some(MetadataSourceDist::from_sdist(&workspace_root, sdist));
}
for wheel in &lock_package.wheels {
meta_package
.wheels
.push(MetadataWheel::from_wheel(&workspace_root, wheel));
}
resolve.insert(meta_package.id.to_flat(), meta_package);
}
let conflicts = MetadataConflicts::from_conflicts(&members, &resolve, &lock.conflicts);
Ok(Self {
schema: SchemaReport {
version: SchemaVersion::Preview,
},
conflicts,
workspace_root,
requires_python: lock.requires_python.clone(),
members,
resolution: resolve,
})
}
pub fn to_json(&self) -> Result<String, MetadataError> {
Ok(serde_json::to_string_pretty(self)?)
}
}
@@ -17,6 +17,7 @@ use uv_pypi_types::ConflictItem;
use crate::graph_ops::{Reachable, marker_reachability};
use crate::lock::LockErrorKind;
pub use crate::lock::export::metadata::Metadata;
pub(crate) use crate::lock::export::pylock_toml::PylockTomlPackage;
pub use crate::lock::export::pylock_toml::{PylockToml, PylockTomlErrorKind};
pub use crate::lock::export::requirements_txt::RequirementsTxtExport;
@@ -24,6 +25,7 @@ use crate::universal_marker::resolve_conflicts;
use crate::{Installable, LockError, Package};
pub mod cyclonedx_json;
mod metadata;
mod pylock_toml;
mod requirements_txt;
+1 -1
View File
@@ -58,7 +58,7 @@ use crate::exclude_newer::ExcludeNewerSpan;
use crate::fork_strategy::ForkStrategy;
pub(crate) use crate::lock::export::PylockTomlPackage;
pub use crate::lock::export::RequirementsTxtExport;
pub use crate::lock::export::{PylockToml, PylockTomlErrorKind, cyclonedx_json};
pub use crate::lock::export::{Metadata, PylockToml, PylockTomlErrorKind, cyclonedx_json};
pub use crate::lock::installable::Installable;
pub use crate::lock::map::PackageMap;
pub use crate::lock::tree::TreeDisplay;
+6 -6
View File
@@ -269,7 +269,7 @@ pub(crate) async fn lock(
}
#[derive(Debug, Clone, Copy)]
pub(super) enum LockMode<'env> {
pub(crate) enum LockMode<'env> {
/// Write the lockfile to disk.
Write(&'env Interpreter),
/// Perform a resolution, but don't write the lockfile to disk.
@@ -281,7 +281,7 @@ pub(super) enum LockMode<'env> {
}
/// A lock operation.
pub(super) struct LockOperation<'env> {
pub(crate) struct LockOperation<'env> {
mode: LockMode<'env>,
constraints: Vec<NameRequirementSpecification>,
refresh: Option<&'env Refresh>,
@@ -298,7 +298,7 @@ pub(super) struct LockOperation<'env> {
impl<'env> LockOperation<'env> {
/// Initialize a [`LockOperation`].
pub(super) fn new(
pub(crate) fn new(
mode: LockMode<'env>,
settings: &'env ResolverSettings,
client_builder: &'env BaseClientBuilder<'env>,
@@ -328,7 +328,7 @@ impl<'env> LockOperation<'env> {
/// Set the external constraints for the [`LockOperation`].
#[must_use]
pub(super) fn with_constraints(
pub(crate) fn with_constraints(
mut self,
constraints: Vec<NameRequirementSpecification>,
) -> Self {
@@ -338,13 +338,13 @@ impl<'env> LockOperation<'env> {
/// Set the refresh strategy for the [`LockOperation`].
#[must_use]
pub(super) fn with_refresh(mut self, refresh: &'env Refresh) -> Self {
pub(crate) fn with_refresh(mut self, refresh: &'env Refresh) -> Self {
self.refresh = Some(refresh);
self
}
/// Perform a [`LockOperation`].
pub(super) async fn execute(self, target: LockTarget<'_>) -> Result<LockResult, ProjectError> {
pub(crate) async fn execute(self, target: LockTarget<'_>) -> Result<LockResult, ProjectError> {
match self.mode {
LockMode::Frozen(source) => {
// Read the existing lockfile, but don't attempt to lock the project.
+1 -1
View File
@@ -70,7 +70,7 @@ pub(crate) mod format;
pub(crate) mod init;
mod install_target;
pub(crate) mod lock;
mod lock_target;
pub(crate) mod lock_target;
pub(crate) mod remove;
pub(crate) mod run;
pub(crate) mod sync;
+107 -61
View File
@@ -2,59 +2,45 @@ use std::fmt::Write;
use std::path::Path;
use anyhow::Result;
use serde::Serialize;
use owo_colors::OwoColorize;
use uv_fs::PortablePathBuf;
use uv_normalize::PackageName;
use uv_cache::{Cache, Refresh};
use uv_client::BaseClientBuilder;
use uv_configuration::{Concurrency, DependencyGroupsWithDefaults, DryRun};
use uv_preview::{Preview, PreviewFeature};
use uv_python::{PythonDownloads, PythonPreference, PythonRequest};
use uv_resolver::{Lock, Metadata};
use uv_settings::PythonInstallMirrors;
use uv_warnings::warn_user;
use uv_workspace::{DiscoveryOptions, Workspace, WorkspaceCache};
use uv_workspace::{DiscoveryOptions, VirtualProject, Workspace, WorkspaceCache};
use crate::commands::ExitStatus;
use crate::commands::pip::loggers::DefaultResolveLogger;
use crate::commands::project::lock::{LockMode, LockOperation};
use crate::commands::project::lock_target::LockTarget;
use crate::commands::project::{ProjectError, ProjectInterpreter, UniversalState};
use crate::commands::{ExitStatus, diagnostics};
use crate::printer::Printer;
/// The schema version for the metadata report.
#[derive(Serialize, Debug, Default)]
#[serde(rename_all = "snake_case")]
enum SchemaVersion {
/// An unstable, experimental schema.
#[default]
Preview,
}
/// The schema metadata for the metadata report.
#[derive(Serialize, Debug, Default)]
struct SchemaReport {
/// The version of the schema.
version: SchemaVersion,
}
/// Report for a single workspace member.
#[derive(Serialize, Debug)]
struct WorkspaceMemberReport {
/// The name of the workspace member.
name: PackageName,
/// The path to the workspace member's root directory.
path: PortablePathBuf,
}
/// The report for a metadata operation.
#[derive(Serialize, Debug)]
struct MetadataReport {
/// The schema of this report.
schema: SchemaReport,
/// The workspace root directory.
workspace_root: PortablePathBuf,
/// The workspace members.
members: Vec<WorkspaceMemberReport>,
}
use crate::settings::{FrozenSource, LockCheck, ResolverSettings};
/// Display metadata about the workspace.
pub(crate) async fn metadata(
project_dir: &Path,
preview: Preview,
lock_check: LockCheck,
frozen: Option<FrozenSource>,
dry_run: DryRun,
refresh: Refresh,
python: Option<String>,
install_mirrors: PythonInstallMirrors,
settings: ResolverSettings,
client_builder: BaseClientBuilder<'_>,
python_preference: PythonPreference,
python_downloads: PythonDownloads,
concurrency: Concurrency,
no_config: bool,
cache: &Cache,
workspace_cache: &WorkspaceCache,
printer: Printer,
preview: Preview,
) -> Result<ExitStatus> {
if !preview.is_enabled(PreviewFeature::WorkspaceMetadata) {
warn_user!(
@@ -63,29 +49,89 @@ pub(crate) async fn metadata(
);
}
let workspace =
Workspace::discover(project_dir, &DiscoveryOptions::default(), workspace_cache).await?;
let virtual_project =
VirtualProject::discover(project_dir, &DiscoveryOptions::default(), workspace_cache)
.await?;
let target = LockTarget::Workspace(virtual_project.workspace());
let members = workspace
.packages()
.values()
.map(|package| WorkspaceMemberReport {
name: package.project().name.clone(),
path: PortablePathBuf::from(package.root().as_path()),
})
.collect();
// Determine the lock mode.
let interpreter;
let mode = if let Some(frozen_source) = frozen {
LockMode::Frozen(frozen_source.into())
} else {
interpreter = ProjectInterpreter::discover(
virtual_project.workspace(),
project_dir,
// Don't enable any groups' requires-python for interpreter discovery
&DependencyGroupsWithDefaults::none(),
python.as_deref().map(PythonRequest::parse),
&client_builder,
python_preference,
python_downloads,
&install_mirrors,
false,
no_config,
Some(false),
cache,
printer,
preview,
)
.await?
.into_interpreter();
let report = MetadataReport {
schema: SchemaReport::default(),
workspace_root: PortablePathBuf::from(workspace.install_path().as_path()),
members,
if let LockCheck::Enabled(lock_check) = lock_check {
LockMode::Locked(&interpreter, lock_check)
} else if dry_run.enabled() {
LockMode::DryRun(&interpreter)
} else {
LockMode::Write(&interpreter)
}
};
writeln!(
printer.stdout(),
"{}",
serde_json::to_string_pretty(&report)?
)?;
// Initialize any shared state.
let state = UniversalState::default();
// Perform the lock operation.
match Box::pin(
LockOperation::new(
mode,
&settings,
&client_builder,
&state,
Box::new(DefaultResolveLogger),
&concurrency,
cache,
workspace_cache,
printer,
preview,
)
.with_refresh(&refresh)
.execute(target),
)
.await
{
Ok(lock) => print_lock_as_metadata(virtual_project.workspace(), &lock.into_lock(), printer),
Err(err @ ProjectError::LockMismatch(..)) => {
writeln!(printer.stderr(), "{}", err.to_string().bold())?;
Ok(ExitStatus::Failure)
}
Err(ProjectError::Operation(err)) => {
diagnostics::OperationDiagnostic::with_system_certs(client_builder.system_certs())
.report(err)
.map_or(Ok(ExitStatus::Failure), |err| Err(err.into()))
}
Err(err) => Err(err.into()),
}
}
fn print_lock_as_metadata(
workspace: &Workspace,
lock: &Lock,
printer: Printer,
) -> Result<ExitStatus> {
let export = Metadata::from_lock(workspace, lock)?;
writeln!(printer.stdout(), "{}", export.to_json()?)?;
Ok(ExitStatus::Success)
}
+37 -2
View File
@@ -1932,8 +1932,43 @@ async fn run(cli: Cli) -> Result<ExitStatus> {
.await
}
Commands::Workspace(WorkspaceNamespace { command }) => match command {
WorkspaceCommand::Metadata(_args) => {
commands::metadata(&project_dir, globals.preview, &workspace_cache, printer).await
WorkspaceCommand::Metadata(args) => {
// Resolve the settings from the command-line arguments and workspace configuration.
let args = settings::MetadataSettings::resolve(args, filesystem, environment);
show_settings!(args);
// Check for conflicts between offline and refresh.
globals
.network_settings
.check_refresh_conflict(&args.refresh);
// Initialize the cache.
let cache = cache.init().await?.with_refresh(
args.refresh
.clone()
.combine(Refresh::from(args.settings.upgrade.clone())),
);
Box::pin(commands::metadata(
&project_dir,
args.lock_check,
args.frozen,
args.dry_run,
args.refresh,
args.python,
args.install_mirrors,
args.settings,
client_builder.subcommand(vec!["workspace metadata".to_owned()]),
globals.python_preference,
globals.python_downloads,
globals.concurrency,
cli.top_level.no_config,
&cache,
&workspace_cache,
printer,
globals.preview,
))
.await
}
WorkspaceCommand::Dir(args) => {
commands::dir(args.package, &project_dir, &workspace_cache, printer).await
+60 -6
View File
@@ -14,12 +14,12 @@ use uv_cache::{CacheArgs, Refresh};
use uv_cli::comma::CommaSeparatedRequirements;
use uv_cli::{
AddArgs, AuditArgs, AuthLoginArgs, AuthLogoutArgs, AuthTokenArgs, ColorChoice, ExternalCommand,
GlobalArgs, InitArgs, ListFormat, LockArgs, Maybe, PipCheckArgs, PipCompileArgs, PipFreezeArgs,
PipInstallArgs, PipListArgs, PipShowArgs, PipSyncArgs, PipTreeArgs, PipUninstallArgs,
PythonFindArgs, PythonInstallArgs, PythonListArgs, PythonListFormat, PythonPinArgs,
PythonUninstallArgs, PythonUpgradeArgs, RemoveArgs, RunArgs, SyncArgs, SyncFormat, ToolDirArgs,
ToolInstallArgs, ToolListArgs, ToolRunArgs, ToolUninstallArgs, TreeArgs, VenvArgs, VersionArgs,
VersionBumpSpec, VersionFormat,
GlobalArgs, InitArgs, ListFormat, LockArgs, Maybe, MetadataArgs, PipCheckArgs, PipCompileArgs,
PipFreezeArgs, PipInstallArgs, PipListArgs, PipShowArgs, PipSyncArgs, PipTreeArgs,
PipUninstallArgs, PythonFindArgs, PythonInstallArgs, PythonListArgs, PythonListFormat,
PythonPinArgs, PythonUninstallArgs, PythonUpgradeArgs, RemoveArgs, RunArgs, SyncArgs,
SyncFormat, ToolDirArgs, ToolInstallArgs, ToolListArgs, ToolRunArgs, ToolUninstallArgs,
TreeArgs, VenvArgs, VersionArgs, VersionBumpSpec, VersionFormat,
};
use uv_cli::{
AuthorFrom, BuildArgs, ExportArgs, FormatArgs, PublishArgs, PythonDirArgs,
@@ -1796,6 +1796,60 @@ impl LockSettings {
}
}
}
/// The resolved settings to use for a `lock` invocation.
#[derive(Debug, Clone)]
pub(crate) struct MetadataSettings {
pub(crate) lock_check: LockCheck,
pub(crate) frozen: Option<FrozenSource>,
pub(crate) dry_run: DryRun,
pub(crate) python: Option<String>,
pub(crate) install_mirrors: PythonInstallMirrors,
pub(crate) refresh: Refresh,
pub(crate) settings: ResolverSettings,
}
impl MetadataSettings {
/// Resolve the [`LockSettings`] from the CLI and filesystem configuration.
pub(crate) fn resolve(
args: Box<MetadataArgs>,
filesystem: Option<FilesystemOptions>,
environment: EnvironmentOptions,
) -> Self {
let MetadataArgs {
locked,
frozen,
dry_run,
resolver,
build,
refresh,
python,
} = *args;
let filesystem_install_mirrors = filesystem
.clone()
.map(|fs| fs.install_mirrors.clone())
.unwrap_or_default();
// Resolve flags from CLI and environment variables.
let locked = resolve_flag(locked, "locked", environment.locked);
let frozen = resolve_flag(frozen, "frozen", environment.frozen);
// Check for conflicts between locked and frozen.
check_conflicts(locked, frozen);
Self {
lock_check: resolve_lock_check(locked),
frozen: resolve_frozen(frozen),
dry_run: DryRun::from_args(dry_run),
python: python.and_then(Maybe::into_option),
refresh: Refresh::from(refresh),
settings: ResolverSettings::combine(resolver_options(resolver, build), filesystem),
install_mirrors: environment
.install_mirrors
.combine(filesystem_install_mirrors),
}
}
}
/// The resolved settings to use for a `add` invocation.
#[expect(clippy::struct_excessive_bools)]
File diff suppressed because it is too large Load Diff
+1
View File
@@ -3,3 +3,4 @@
The internals section provides details about uv's internal components and implementation details.
- [Resolver](./resolver.md)
- [Workspace Metadata](./metadata.md)
+326
View File
@@ -0,0 +1,326 @@
# Workspace metadata
`uv workspace metadata` exports the information uv has about your workspace as JSON so other tools
can use it. In particular, if you want access to the information in a `uv.lock`, you should prefer
this command's output, as `uv.lock` is not a stable format we guarantee anything about.
The primary structure is the "resolution" field which contains the dependency graph with exact
package versions that a `uv.lock` encodes.
The edges of the graph are the `dependencies` every node defines. These are the things that must
also be installed for it to be installed (and their `dependencies` recursively, keeping in mind that
cycles are perfectly normal to encounter in this graph). Each dependency entry will include an `id`
for the node it refers to, and an optional `marker` that
[specifies on what platforms the dependency is required](https://packaging.python.org/en/latest/specifications/dependency-specifiers/#dependency-specifiers)
(if there is no marker the dependency is always required).
Nodes in the graph are uniquely identified by package `name`, `version`, `source`, and `kind`.
There are 3 kinds of node in the graph:
- `"package"` -- the package itself
- `{ "extra": "extraname" }` -- an extra the package defines
- `{ "group": "groupname" }` -- a dependency group the package defines
(In the future we will add "build" nodes for the dependencies of
[build environments](https://docs.astral.sh/uv/concepts/projects/config/#build-isolation).)
If you want to install `mypackage`, find its `"kind": "package"` node. This node will also include
information on its sdist, its wheels, its extras (`optional_dependencies`), and dependency groups
(`dependency_groups`).
If you want to install `mypackage[myextra]` then find the node with `"kind": { "extra": "myextra" }`
for `mypackage` (this node will always depend on `mypackage`). If you want to install
`mypackage[extra1, extra2]`, find the two nodes for `mypackage[extra1]` and `mypackage[extra2]`.
If you want to install the dependency group `mypackage:mygroup` then find the node with
`"kind": { "group": "mygroup" }` for `mypackage` (this node will _not_ depend on `mypackage`, as
dependency groups are just lists of things you might want when working on the package itself).
## Handling multiple versions of a package
Two versions of a package cannot be installed into a python environment, but the dependency graph
may still include multiple versions of a package. This can happen for two different reasons.
The first way is for
[different platforms](https://packaging.python.org/en/latest/specifications/dependency-specifiers/#dependency-specifiers)
to have conflicting requirements that force different versions of a package to be used.
The second way is when a workspace has
[conflicts](https://docs.astral.sh/uv/concepts/resolution/#conflicting-dependencies), implying some
workspace members or their extras are mutually exclusive, and only one of them can be installed at a
time. Information about conflicts can be found in the top-level `conflicts` field.
The specific guarantee we provide is that **for any concrete choice of
[markers](https://packaging.python.org/en/latest/specifications/dependency-specifiers/#dependency-specifiers),
if you select a set of packages to install that has no
[conflicts](https://docs.astral.sh/uv/concepts/resolution/#conflicting-dependencies), then the
resulting set of packages to install will not have multiple versions of a package**.
If you just want to get "every version of pydantic this workspace uses" you're free to iterate
through the list of nodes and collect up every instance. If however you want to specifically analyze
the graph and get actual resolutions you will likely need to consult `conflicts` and need to
understand how to resolve `markers` for a specific platform.
The best way to avoid mistakes when working with multiple versions of a package is to keep your
queries into the dependency graph rooted in operations on workspace members, as those are the
natural entry-points to the graph that uv wants to work on, and can give coherent responses for:
"install `member1` and `member2[extra]`".
Another way to put this is that when possible _you should avoid iterating over the `resolution`
object to find a node_. Only access `resolution` like a map using ids that were provided by another
part of the metadata. The only ids this initially gives you access to are the ones listed in the
`members` array, which lists all the workspace members. From there you may find the ids of that
package's dependencies, extras, and dependency groups and recursively discover other packages.
So rather than trying to find a node for anyio in the dependency graph directly, you should decide
what workspace member(s) you're interested in analyzing as if they were going to be installed. While
traversing the `dependencies` of the things you want to install, you may visit an instance of anyio,
which is the one you should use. If you visit multiple instances of anyio then that means you've
selected a conflicting set of things to install which uv would never select.
So if you wanted to analyze say, installing the `dev` dependency group of the workspace member
`mypackage` it would look something like:
```python
member = find_by_name(metadata.members, "mypackage")
member_node = metadata.resolution[member.id]
group = find_by_name(member_node.dependency_groups, "dev")
group_node = metadata.resolution[group.id]
visit(metadata, [group_node])
```
If you wanted to analyze two particular workspace members installed together, it would look
something like:
```python
to_analyze = []
for member_name in ["package1", "package2"]:
member = find_by_name(metadata.members, member_name)
member_node = metadata.resolution[member.id]
to_analyze.append(member_node)
visit(metadata, to_analyze)
```
Where `visit` is your favourite graph traversal algorithm like depth-first-search:
```python
def visit(metadata: UvMetadata, to_analyze: list[Node]):
visited = set()
while len(to_analyze) > 0:
node = to_analyze.pop()
# Handle cycles by avoiding revisiting nodes
if node.id in visited:
continue
visited.add(node.id)
# We also need to analyze its dependencies
for dependency in node.dependencies:
# Only follow edges if they satisfy the desired platform's markers
if dependency.marker and not satisfies(platform, dependency.marker):
continue
to_analyze.append(metadata.resolution[dependency.id])
# Analyze any package node we encounter
if node.kind == "package":
print(node.name, node.version, node.source)
```
## Schema
A full JSON schema for the format will be provided when the format is finalized.
Here is a human-readable annotated example:
```js
{
// Information about the schema of this output
"schema": {
// The version of this output, currently "preview"
"version": "preview"
},
// The directory the uv.lock can be found in
"workspace_root": "/workspace",
// Any requirements on the python version this workspace has
//
// `marker` fields all have this as an implicit constraint that is omitted for cleanliness
"requires_python": ">=3.12",
// A list of workspace members
"members": [
{
// The name of the package
"name": "mypackage",
// The directory that contains its pyproject.toml
"path": "/workspace/packages/mypackage",
// The id of this package's info in the `resolution` map below
"id": "mypackage==0.1.0@editable+/workspace/packages/mypackage"
},
],
// A list-of-sets of workspace items that are mutually-exclusive to install,
// presumably because they need to install different versions of the same package.
//
// Any attempt to install two things that belong to the same set must be rejected.
//
// There are 3 kinds of item:
//
// * Project -- "kind": "project"
// * Extra -- "kind": { "extra": "extraname" }
// * Group -- "kind": { "group": "groupname" }
"conflicts": {
"sets": [
{
"items": [
{
"package": "mypackage",
"kind": { "extra": "myextra" }
"id": "mypackage[myextra]==0.1.0@editable+/workspace/packages/mypackage",
}
{
"package": "mypackage",
"kind": { "group": "mygroup" }
"id": "mypackage:mygroup==0.1.0@editable+/workspace/packages/mypackage",
}
]
}
]
}
// Resolved information about packages and dependencies.
//
// Each entry in this map is a node in the dependency graph. There are currently
// 3 kinds of node in the dependency graph, although more are planned in the future.
//
// * Packages -- "kind": "package"
// * Extras -- "kind": { "extra": "extraname" }
// * Groups -- "kind": { "group": "groupname" }
//
// Package nodes contain most of the metadata, while other nodes are mostly just a list
// of dependencies. The different kinds of node are included like this to encourage correct
// analysis of the graph. For instance, a node for `mypackage[someextra]` always depends on
// `mypackage`, while `mypackage:somegroup` does not (because dependency-groups are just a
// list of packages you might want to install while working on `mypackage`). Sugars like
// `mypackage[extra1, extra2]` are decomposed into separate dependencies on `mypackage[extra1]`
// and `mypackage[extra2]`.
//
// The ids used here are human-readable but should be handled as opaque (the nodes contain
// the same information in a more convenient form).
"resolution": {
// This node is a workspace member
"mypackage==0.1.0@editable+/workspace/packages/mypackage": {
// The name of the package
"name": "mypackage",
// The version of the package (this may be missing, as source trees do not need versions)
"version": "0.1.0",
// The source of the package, in this case it's an editable whose path relative to the
// `workspace_root` is `./packages/mypackage`
"source": {
"editable": "/workspace/packages/mypackage"
},
// The kind of the node, in this case "package" (see the docs on `resolution` above for details)
"kind": "package",
// The dependencies that must be installed to also install this node into an environment
"dependencies": [
{
// The id of the node to lookup for details
"id": "iniconfig==2.0.0@registry+https://pypi.org/simple"
"marker": "marker": "sys_platform == 'linux'"
}
],
// The extras that this package defines
"optional_dependencies": [
{
"name": "myextra",
"id": "mypackage[myextra]==0.1.0@editable+/workspace/packages/mypackage"
}
],
// The dependency groups this package defines
"dependency_groups": [
{
"name": "mygroup",
"id": "mypackage:mygroup==0.1.0@editable+/workspace/packages/mypackage"
}
]
},
// This node is an extra on a workspace member
"mypackage[myextra]==0.1.0@editable+/workspace/packages/mypackage": {
// These fields will match the package node above
"name": "mypackage",
"version": "0.1.0",
"source": {
"editable": "/workspace/packages/mypackage"
},
// But these two will differ from the package node above
"kind": { "extra": "myextra" },
"dependencies": [
{
"id": "mypackage==0.1.0@editable+/workspace/packages/mypackage"
}
{
"id": "anyio==2.0.0@registry+https://pypi.org/simple"
}
]
},
// This node is a dependency-group on a workspace member
"mypackage:mygroup==0.1.0@editable+/workspace/packages/mypackage": {
// These fields will match the package node above
"name": "mypackage",
"version": "0.1.0",
"source": {
"editable": "/workspace/packages/mypackage"
},
// But these two will differ from the package node above
"kind": { "extra": "myextra" },
"dependencies": [
{
"id": "anyio==1.0.0@registry+https://pypi.org/simple"
}
]
},
// This node is a package on pypi
"iniconfig==2.0.0@registry+https://pypi.org/simple": {
"name": "iniconfig",
"version": "2.0.0",
// registry sources look like this
"source": {
"registry": {
"url": "https://pypi.org/simple"
}
},
"kind": "package",
"dependencies": [],
// Details on the package's source distribution
"sdist": {
// May alternatively be `path`
"url": "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz",
"hashes": {
"sha256": "2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"
},
"size": 4646,
"upload_time": "2023-01-07T11:08:11.254Z"
},
// The wheels we found for this package
"wheels": [
{
// May alternatively be `path`
"url": "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl",
"hashes": {
"sha256": "b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"
},
"size": 5892,
"upload_time": "2023-01-07T11:08:09.864Z",
// Parsing this name is how you know what platform a wheel supports
"filename": "iniconfig-2.0.0-py3-none-any.whl"
}
]
}
// ...and so on
"anyio==1.0.0@registry+https://pypi.org/simple": { ... }
"anyio==2.0.0@registry+https://pypi.org/simple": { ... }
}
}
```
+1
View File
@@ -263,6 +263,7 @@ nav:
- Internals:
- reference/internals/index.md
- Resolver: reference/internals/resolver.md
- Workspace Metadata: reference/internals/metadata.md
- Benchmarks: reference/benchmarks.md
- Policies:
- reference/policies/index.md
@@ -0,0 +1,4 @@
from albatross import fly
fly()
print("Success")
@@ -0,0 +1,15 @@
[project]
name = "albatross"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig>=2,<3"]
[project.optional-dependencies]
io = ["anyio"]
[dependency-groups]
dev = ["idna>=3"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
@@ -0,0 +1,2 @@
def fly():
pass
@@ -0,0 +1,2 @@
[dependency-groups]
dev = ["iniconfig>=2,<3"]