diff --git a/crates/uv/tests/cache_prune.rs b/crates/uv/tests/cache_prune.rs index b22de5fb5b9c..fcab2202dcab 100644 --- a/crates/uv/tests/cache_prune.rs +++ b/crates/uv/tests/cache_prune.rs @@ -15,42 +15,8 @@ mod common; /// Create a `cache prune` command with options shared across scenarios. fn prune_command(context: &TestContext) -> Command { let mut command = Command::new(get_bin()); - command - .arg("cache") - .arg("prune") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (8 * 1024 * 1024).to_string()); - } - - command -} - -/// Create a `pip sync` command with options shared across scenarios. -fn sync_command(context: &TestContext) -> Command { - let mut command = Command::new(get_bin()); - command - .arg("pip") - .arg("sync") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (8 * 1024 * 1024).to_string()); - } - + command.arg("cache").arg("prune"); + context.add_shared_args(&mut command); command } @@ -63,7 +29,8 @@ fn prune_no_op() -> Result<()> { requirements_txt.write_str("anyio")?; // Install a requirement, to populate the cache. - sync_command(&context) + context + .pip_sync() .arg("requirements.txt") .assert() .success(); @@ -91,7 +58,8 @@ fn prune_stale_directory() -> Result<()> { requirements_txt.write_str("anyio")?; // Install a requirement, to populate the cache. - sync_command(&context) + context + .pip_sync() .arg("requirements.txt") .assert() .success(); @@ -124,7 +92,8 @@ fn prune_stale_symlink() -> Result<()> { requirements_txt.write_str("anyio")?; // Install a requirement, to populate the cache. - sync_command(&context) + context + .pip_sync() .arg("requirements.txt") .assert() .success(); diff --git a/crates/uv/tests/common/mod.rs b/crates/uv/tests/common/mod.rs index 92464cfe7475..c4ff4bcc357b 100644 --- a/crates/uv/tests/common/mod.rs +++ b/crates/uv/tests/common/mod.rs @@ -1,20 +1,20 @@ // The `unreachable_pub` is to silence false positives in RustRover. #![allow(dead_code, unreachable_pub)] -use assert_cmd::assert::{Assert, OutputAssertExt}; -use assert_cmd::Command; -use assert_fs::assert::PathAssert; -use assert_fs::fixture::{ChildPath, PathChild, PathCreateDir, SymlinkToFile}; -use predicates::prelude::predicate; -use regex::Regex; use std::borrow::BorrowMut; use std::env; use std::ffi::OsString; use std::iter::Iterator; use std::path::{Path, PathBuf}; -use std::process::Output; +use std::process::{Command, Output}; use std::str::FromStr; +use assert_cmd::assert::{Assert, OutputAssertExt}; +use assert_fs::assert::PathAssert; +use assert_fs::fixture::{ChildPath, PathChild, PathCreateDir, SymlinkToFile}; +use predicates::prelude::predicate; +use regex::Regex; + use uv_cache::Cache; use uv_fs::Simplified; use uv_toolchain::managed::InstalledToolchains; @@ -23,7 +23,7 @@ use uv_toolchain::{ }; // Exclude any packages uploaded after this date. -pub static EXCLUDE_NEWER: &str = "2024-03-25T00:00:00Z"; +static EXCLUDE_NEWER: &str = "2024-03-25T00:00:00Z"; /// Using a find links url allows using `--index-url` instead of `--extra-index-url` in tests /// to prevent dependency confusion attacks against our test suite. @@ -55,7 +55,7 @@ pub const INSTA_FILTERS: &[(&str, &str)] = &[ ), ]; -/// Create a context for tests which simplfiies shared behavior across tests. +/// Create a context for tests which simplifies shared behavior across tests. /// /// * Set the current directory to a temporary directory (`temp_dir`). /// * Set the cache dir to a different temporary directory (`cache_dir`). @@ -247,23 +247,18 @@ impl TestContext { } } - /// Create a `pip compile` command for testing. - pub fn compile(&self) -> std::process::Command { - let mut command = self.compile_without_exclude_newer(); - command.arg("--exclude-newer").arg(EXCLUDE_NEWER); - command - } - - /// Create a `pip compile` command with no `--exclude-newer` option. + /// Shared behaviour for almost all test commands. /// - /// One should avoid using this in tests to the extent possible because - /// it can result in tests failing when the index state changes. Therefore, - /// if you use this, there should be some other kind of mitigation in place. - /// For example, pinning package versions. - pub fn compile_without_exclude_newer(&self) -> std::process::Command { - let mut cmd = std::process::Command::new(get_bin()); - cmd.arg("pip") - .arg("compile") + /// * Use a temporary cache directory + /// * Use a temporary virtual environment with the Python version of [`Self`] + /// * Don't wrap text output based on the terminal we're in, the test output doesn't get printed + /// but snapshotted to a string. + /// * Use a fake `HOME` to avoid accidentally changing the developer's machine. + /// * Hide other Python toolchain with `UV_TOOLCHAIN_DIR` and installed interpreters with + /// `UV_TEST_PYTHON_PATH`. + /// * Increase the stack size to avoid stack overflows on windows due to large async functions. + pub fn add_shared_args(&self, command: &mut Command) { + command .arg("--cache-dir") .arg(self.cache_dir.path()) .env("VIRTUAL_ENV", self.venv.as_os_str()) @@ -276,32 +271,65 @@ impl TestContext { if cfg!(all(windows, debug_assertions)) { // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the // default windows stack of 1MB - cmd.env("UV_STACK_SIZE", (8 * 1024 * 1024).to_string()); + command.env("UV_STACK_SIZE", (8 * 1024 * 1024).to_string()); } + } + + /// Create a `pip compile` command for testing. + pub fn pip_compile(&self) -> Command { + let mut command = self.pip_compile_without_exclude_newer(); + command.arg("--exclude-newer").arg(EXCLUDE_NEWER); + command + } + + /// Create a `pip compile` command with no `--exclude-newer` option. + /// + /// One should avoid using this in tests to the extent possible because + /// it can result in tests failing when the index state changes. Therefore, + /// if you use this, there should be some other kind of mitigation in place. + /// For example, pinning package versions. + pub fn pip_compile_without_exclude_newer(&self) -> Command { + let mut command = Command::new(get_bin()); + command.arg("pip").arg("compile"); + self.add_shared_args(&mut command); + command + } + + /// Create a `pip compile` command for testing. + pub fn pip_sync(&self) -> Command { + let mut command = self.pip_sync_without_exclude_newer(); + command.arg("--exclude-newer").arg(EXCLUDE_NEWER); + command + } - cmd + /// Create a `pip sync` command with no `--exclude-newer` option. + /// + /// One should avoid using this in tests to the extent possible because + /// it can result in tests failing when the index state changes. Therefore, + /// if you use this, there should be some other kind of mitigation in place. + /// For example, pinning package versions. + pub fn pip_sync_without_exclude_newer(&self) -> Command { + let mut command = Command::new(get_bin()); + command.arg("pip").arg("sync"); + self.add_shared_args(&mut command); + command } /// Create a `uv venv` command - pub fn venv(&self) -> std::process::Command { - let mut command = std::process::Command::new(get_bin()); + pub fn venv(&self) -> Command { + let mut command = Command::new(get_bin()); command .arg("venv") .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("UV_CACHE_DIR", self.cache_dir.path()) - .env("UV_TOOLCHAIN_DIR", "") - .env("UV_TEST_PYTHON_PATH", &self.python_path()) - .env("UV_NO_WRAP", "1") - .env("HOME", self.home_dir.as_os_str()) - .env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string()) - .current_dir(self.temp_dir.as_os_str()); + .arg(EXCLUDE_NEWER); + self.add_shared_args(&mut command); + command.env_remove("VIRTUAL_ENV"); command } /// Create a `pip install` command with options shared across scenarios. - pub fn install(&self) -> std::process::Command { - let mut command = self.install_without_exclude_newer(); + pub fn pip_install(&self) -> Command { + let mut command = self.pip_install_without_exclude_newer(); command.arg("--exclude-newer").arg(EXCLUDE_NEWER); command } @@ -312,60 +340,33 @@ impl TestContext { /// it can result in tests failing when the index state changes. Therefore, /// if you use this, there should be some other kind of mitigation in place. /// For example, pinning package versions. - pub fn install_without_exclude_newer(&self) -> std::process::Command { - let mut command = std::process::Command::new(get_bin()); + pub fn pip_install_without_exclude_newer(&self) -> Command { + let mut command = Command::new(get_bin()); + command.arg("pip").arg("install"); + self.add_shared_args(&mut command); command - .arg("pip") - .arg("install") - .arg("--cache-dir") - .arg(self.cache_dir.path()) - .env("VIRTUAL_ENV", self.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .env("HOME", self.home_dir.as_os_str()) - .env("UV_TOOLCHAIN_DIR", "") - .env("UV_TEST_PYTHON_PATH", &self.python_path()) - .current_dir(&self.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (4 * 1024 * 1024).to_string()); - } + } + /// Create a `pip uninstall` command with options shared across scenarios. + pub fn pip_uninstall(&self) -> Command { + let mut command = Command::new(get_bin()); + command.arg("pip").arg("uninstall"); + self.add_shared_args(&mut command); command } /// Create a `uv sync` command with options shared across scenarios. - pub fn sync(&self) -> std::process::Command { - let mut command = std::process::Command::new(get_bin()); - command - .arg("sync") - .arg("--cache-dir") - .arg(self.cache_dir.path()) - .env("VIRTUAL_ENV", self.venv.as_os_str()) - .env("UV_TOOLCHAIN_DIR", "") - .env("UV_NO_WRAP", "1") - .env("HOME", self.home_dir.as_os_str()) - .env("UV_TEST_PYTHON_PATH", &self.python_path()) - .current_dir(&self.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (4 * 1024 * 1024).to_string()); - } - + pub fn sync(&self) -> Command { + let mut command = Command::new(get_bin()); + command.arg("sync"); + self.add_shared_args(&mut command); command } /// Create a `uv lock` command with options shared across scenarios. - pub fn lock(&self) -> std::process::Command { + pub fn lock(&self) -> Command { let mut command = self.lock_without_exclude_newer(); - command - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("UV_TOOLCHAIN_DIR", "") - .env("UV_TEST_PYTHON_PATH", &self.python_path()); + command.arg("--exclude-newer").arg(EXCLUDE_NEWER); command } @@ -375,56 +376,28 @@ impl TestContext { /// it can result in tests failing when the index state changes. Therefore, /// if you use this, there should be some other kind of mitigation in place. /// For example, pinning package versions. - pub fn lock_without_exclude_newer(&self) -> std::process::Command { - let mut command = std::process::Command::new(get_bin()); - command - .arg("lock") - .arg("--cache-dir") - .arg(self.cache_dir.path()) - .env("VIRTUAL_ENV", self.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .env("HOME", self.home_dir.as_os_str()) - .env("UV_TOOLCHAIN_DIR", "") - .env("UV_TEST_PYTHON_PATH", &self.python_path()) - .current_dir(&self.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (4 * 1024 * 1024).to_string()); - } - + pub fn lock_without_exclude_newer(&self) -> Command { + let mut command = Command::new(get_bin()); + command.arg("lock"); + self.add_shared_args(&mut command); command } /// Create a `uv toolchain find` command with options shared across scenarios. - pub fn toolchain_find(&self) -> std::process::Command { - let mut command = std::process::Command::new(get_bin()); + pub fn toolchain_find(&self) -> Command { + let mut command = Command::new(get_bin()); command .arg("toolchain") .arg("find") - .arg("--cache-dir") - .arg(self.cache_dir.path()) - .env("VIRTUAL_ENV", self.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .env("HOME", self.home_dir.as_os_str()) - .env("UV_TOOLCHAIN_DIR", "") - .env("UV_TEST_PYTHON_PATH", &self.python_path()) .env("UV_PREVIEW", "1") .env("UV_TOOLCHAIN_DIR", "") .current_dir(&self.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (4 * 1024 * 1024).to_string()); - } - + self.add_shared_args(&mut command); command } /// Create a `uv run` command with options shared across scenarios. - pub fn run(&self) -> std::process::Command { + pub fn run(&self) -> Command { let mut command = self.run_without_exclude_newer(); command.arg("--exclude-newer").arg(EXCLUDE_NEWER); command @@ -436,30 +409,15 @@ impl TestContext { /// it can result in tests failing when the index state changes. Therefore, /// if you use this, there should be some other kind of mitigation in place. /// For example, pinning package versions. - pub fn run_without_exclude_newer(&self) -> std::process::Command { - let mut command = std::process::Command::new(get_bin()); - command - .arg("run") - .arg("--cache-dir") - .arg(self.cache_dir.path()) - .env("VIRTUAL_ENV", self.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .env("HOME", self.home_dir.as_os_str()) - .env("UV_TOOLCHAIN_DIR", "") - .env("UV_TEST_PYTHON_PATH", &self.python_path()) - .current_dir(&self.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (4 * 1024 * 1024).to_string()); - } - + pub fn run_without_exclude_newer(&self) -> Command { + let mut command = Command::new(get_bin()); + command.arg("run"); + self.add_shared_args(&mut command); command } /// Create a `uv tool run` command with options shared across scenarios. - pub fn tool_run(&self) -> std::process::Command { + pub fn tool_run(&self) -> Command { let mut command = self.tool_run_without_exclude_newer(); command.arg("--exclude-newer").arg(EXCLUDE_NEWER); command @@ -471,70 +429,34 @@ impl TestContext { /// it can result in tests failing when the index state changes. Therefore, /// if you use this, there should be some other kind of mitigation in place. /// For example, pinning package versions. - pub fn tool_run_without_exclude_newer(&self) -> std::process::Command { - let mut command = std::process::Command::new(get_bin()); - command - .arg("tool") - .arg("run") - .arg("--cache-dir") - .arg(self.cache_dir.path()) - .env("VIRTUAL_ENV", self.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .env("HOME", self.home_dir.as_os_str()) - .env("UV_TOOLCHAIN_DIR", "") - .env("UV_TEST_PYTHON_PATH", &self.python_path()) - .current_dir(&self.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (4 * 1024 * 1024).to_string()); - } - + pub fn tool_run_without_exclude_newer(&self) -> Command { + let mut command = Command::new(get_bin()); + command.arg("tool").arg("run"); + self.add_shared_args(&mut command); command } /// Create a `uv add` command for the given requirements. - pub fn add(&self, reqs: &[&str]) -> std::process::Command { - let mut command = std::process::Command::new(get_bin()); - command - .arg("add") - .args(reqs) - .arg("--cache-dir") - .arg(self.cache_dir.path()) - .env("VIRTUAL_ENV", self.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .env("HOME", self.home_dir.as_os_str()) - .current_dir(&self.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (4 * 1024 * 1024).to_string()); - } - + pub fn add(&self, reqs: &[&str]) -> Command { + let mut command = Command::new(get_bin()); + command.arg("add").args(reqs); + self.add_shared_args(&mut command); command } /// Create a `uv remove` command for the given requirements. - pub fn remove(&self, reqs: &[&str]) -> std::process::Command { - let mut command = std::process::Command::new(get_bin()); + pub fn remove(&self, reqs: &[&str]) -> Command { + let mut command = Command::new(get_bin()); + command.arg("remove").args(reqs); + self.add_shared_args(&mut command); command - .arg("remove") - .args(reqs) - .arg("--cache-dir") - .arg(self.cache_dir.path()) - .env("VIRTUAL_ENV", self.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .env("HOME", self.home_dir.as_os_str()) - .current_dir(&self.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (4 * 1024 * 1024).to_string()); - } + } + /// Create a `uv clean` command. + pub fn clean(&self) -> Command { + let mut command = Command::new(get_bin()); + command.arg("clean"); + self.add_shared_args(&mut command); command } @@ -544,7 +466,7 @@ impl TestContext { /// Run the given python code and check whether it succeeds. pub fn assert_command(&self, command: &str) -> Assert { - std::process::Command::new(venv_to_interpreter(&self.venv)) + Command::new(venv_to_interpreter(&self.venv)) // Our tests change files in <1s, so we must disable CPython bytecode caching or we'll get stale files // https://github.com/python/cpython/issues/75953 .arg("-B") @@ -556,7 +478,7 @@ impl TestContext { /// Run the given python file and check whether it succeeds. pub fn assert_file(&self, file: impl AsRef) -> Assert { - std::process::Command::new(venv_to_interpreter(&self.venv)) + Command::new(venv_to_interpreter(&self.venv)) // Our tests change files in <1s, so we must disable CPython bytecode caching or we'll get stale files // https://github.com/python/cpython/issues/75953 .arg("-B") @@ -725,7 +647,7 @@ pub fn create_venv_from_executable>( cache_dir: &assert_fs::TempDir, python: &Path, ) { - Command::new(get_bin()) + assert_cmd::Command::new(get_bin()) .arg("venv") .arg(path.as_ref().as_os_str()) .arg("--cache-dir") @@ -801,7 +723,7 @@ pub enum WindowsFilters { /// /// This function is derived from `insta_cmd`s `spawn_with_info`. pub fn run_and_format>( - mut command: impl BorrowMut, + mut command: impl BorrowMut, filters: impl AsRef<[(T, T)]>, function_name: &str, windows_filters: Option, diff --git a/crates/uv/tests/pip_check.rs b/crates/uv/tests/pip_check.rs index b46236c34364..ae53db542292 100644 --- a/crates/uv/tests/pip_check.rs +++ b/crates/uv/tests/pip_check.rs @@ -6,45 +6,15 @@ use assert_fs::fixture::PathChild; use common::uv_snapshot; -use crate::common::{get_bin, TestContext, EXCLUDE_NEWER}; +use crate::common::{get_bin, TestContext}; mod common; -/// Create a `pip install` command with options shared across scenarios. -fn install_command(context: &TestContext) -> Command { - let mut command = Command::new(get_bin()); - command - .arg("pip") - .arg("install") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string()); - } - - command -} - /// Create a `pip check` command with options shared across scenarios. fn check_command(context: &TestContext) -> Command { let mut command = Command::new(get_bin()); - command - .arg("pip") - .arg("check") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir); - + command.arg("pip").arg("check"); + context.add_shared_args(&mut command); command } @@ -55,7 +25,8 @@ fn check_compatible_packages() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("requests==2.31.0")?; - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -98,7 +69,8 @@ fn check_incompatible_packages() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("requests==2.31.0")?; - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -121,7 +93,8 @@ fn check_incompatible_packages() -> Result<()> { let requirements_txt_idna = context.temp_dir.child("requirements_idna.txt"); requirements_txt_idna.write_str("idna==2.4")?; - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements_idna.txt") .arg("--strict"), @r###" @@ -165,7 +138,8 @@ fn check_multiple_incompatible_packages() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("requests==2.31.0")?; - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -188,7 +162,8 @@ fn check_multiple_incompatible_packages() -> Result<()> { let requirements_txt_two = context.temp_dir.child("requirements_two.txt"); requirements_txt_two.write_str("idna==2.4\nurllib3==1.20")?; - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements_two.txt") .arg("--strict"), @r###" diff --git a/crates/uv/tests/pip_compile.rs b/crates/uv/tests/pip_compile.rs index 3a7d5d6147de..9b7c6df72323 100644 --- a/crates/uv/tests/pip_compile.rs +++ b/crates/uv/tests/pip_compile.rs @@ -4,7 +4,6 @@ use std::env::current_dir; use std::fs; use std::path::PathBuf; -use std::process::Command; use anyhow::{bail, Context, Result}; use assert_fs::prelude::*; @@ -14,8 +13,6 @@ use url::Url; use common::{uv_snapshot, TestContext}; use uv_fs::Simplified; -use crate::common::get_bin; - mod common; /// Resolve a specific version of `anyio` from a `requirements.in` file. @@ -26,7 +23,7 @@ fn compile_requirements_in() -> Result<()> { requirements_in.write_str("anyio==3.7.0")?; uv_snapshot!(context - .compile() + .pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -55,7 +52,7 @@ fn compile_requirements_in_annotation_line() -> Result<()> { requirements_in.write_str("anyio==3.7.0")?; uv_snapshot!(context - .compile() + .pip_compile() .arg("--annotation-style=line") .arg("requirements.in"), @r###" success: true @@ -83,7 +80,7 @@ fn compile_requirements_in_stdin() -> Result<()> { requirements_in.write_str("anyio==3.7.0")?; uv_snapshot!(context - .compile() + .pip_compile() .stdin(fs::File::open(requirements_in)?) .arg("-"), @r###" success: true @@ -109,7 +106,7 @@ fn missing_requirements_in() { let context = TestContext::new("3.12"); let requirements_in = context.temp_dir.child("requirements.in"); - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 2 @@ -129,7 +126,7 @@ fn missing_venv() -> Result<()> { context.temp_dir.child("requirements.in").touch()?; fs_err::remove_dir_all(context.venv.path())?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -166,7 +163,7 @@ dependencies = [ "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml"), @r###" success: true exit_code: 0 @@ -207,7 +204,7 @@ dependencies = [ "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml"), @r###" success: true exit_code: 0 @@ -247,7 +244,7 @@ dependencies = [ "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("--annotation-style=line") .arg("pyproject.toml"), @r###" success: true @@ -277,7 +274,7 @@ fn compile_constraints_txt() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("idna<3.4")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--constraint") .arg("constraints.txt"), @r###" @@ -314,7 +311,7 @@ fn compile_constraints_inline() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("idna<3.4")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -344,7 +341,7 @@ fn compile_constraints_markers() -> Result<()> { constraints_txt.write_str("sniffio==1.2.0;python_version<='3.7'")?; constraints_txt.write_str("sniffio==1.3.0;python_version>'3.7'")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--constraint") .arg("constraints.txt"), @r###" @@ -383,7 +380,7 @@ fn compile_constraint_extra() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("flask[dotenv]<24.3.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--constraint") .arg("constraints.txt"), @r###" @@ -438,7 +435,7 @@ optional-dependencies.foo = [ "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml") .arg("--extra") .arg("foo"), @r###" @@ -481,7 +478,7 @@ optional-dependencies."FrIeNdLy-._.-bArD" = [ "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml") .arg("--extra") .arg("FRiENDlY-...-_-BARd"), @r###" @@ -524,7 +521,7 @@ optional-dependencies.foo = [ "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml") .arg("--extra") .arg("bar"), @r###" @@ -566,7 +563,7 @@ build-backend = "poetry.core.masonry.api" "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml") .arg("--extra") .arg("test"), @r###" @@ -627,7 +624,7 @@ build-backend = "poetry.core.masonry.api" "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml"), @r###" success: true exit_code: 0 @@ -682,7 +679,7 @@ build-backend = "poetry.core.masonry.api" "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml"), @r###" success: false exit_code: 2 @@ -741,7 +738,7 @@ setup( "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml") .arg("--extra") .arg("dev"), @r###" @@ -799,7 +796,7 @@ setup( "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("setup.cfg") .arg("--extra") .arg("dev"), @r###" @@ -847,7 +844,7 @@ setup( "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("setup.py") .arg("--extra") .arg("dev"), @r###" @@ -890,7 +887,7 @@ dependencies = [ "#, )?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("pyproject.toml"), @r###" success: false exit_code: 2 @@ -929,7 +926,7 @@ optional-dependencies.foo = [ "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml") .arg("--extra") .arg("foo") @@ -956,7 +953,7 @@ fn compile_requirements_file_extra() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio==3.7.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--all-extras"), @r###" @@ -990,7 +987,7 @@ optional-dependencies.foo = [ "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml") .arg("--extra") .arg("invalid name!"), @r###" @@ -1015,7 +1012,7 @@ fn compile_python_312() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("black==23.10.1")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--python-version") .arg("3.12"), @r###" @@ -1052,7 +1049,7 @@ fn compile_python_312_annotation_line() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("black==23.10.1")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("--annotation-style=line") .arg("requirements.in") .arg("--python-version") @@ -1084,7 +1081,7 @@ fn compile_python_312_no_deps() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("black==23.10.1")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--no-deps") .arg("--python-version") @@ -1124,7 +1121,7 @@ fn compile_python_37() -> Result<()> { .chain(context.filters()) .collect(); - uv_snapshot!(filters, context.compile() + uv_snapshot!(filters, context.pip_compile() .arg("requirements.in") .arg("--python-version") .arg("3.7"), @r###" @@ -1149,7 +1146,7 @@ fn compile_sdist_resolution_lowest() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--resolution=lowest-direct") .arg("--python-version") @@ -1181,7 +1178,7 @@ fn compile_python_invalid_version() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("black==23.10.1")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--python-version") .arg("3.7.x"), @r###" @@ -1206,7 +1203,7 @@ fn compile_python_dev_version() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("black==23.10.1")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--python-version") .arg("3.7-dev"), @r###" @@ -1235,7 +1232,7 @@ fn omit_non_matching_annotation() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("idna <3.7; python_version < '3.7'")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("-c") .arg("constraints.txt"), @r###" @@ -1268,7 +1265,7 @@ fn compile_numpy_py38() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("numpy")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--no-build"), @r###" success: true @@ -1294,7 +1291,7 @@ fn compile_wheel_url_dependency() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1335,7 +1332,7 @@ fn compile_sdist_url_dependency() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask @ https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1383,7 +1380,7 @@ fn compile_git_https_dependency() -> Result<()> { .chain(context.filters()) .collect(); - uv_snapshot!(filters, context.compile() + uv_snapshot!(filters, context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1410,7 +1407,7 @@ fn compile_git_branch_https_dependency() -> Result<()> { "uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@test-branch", )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1438,7 +1435,7 @@ fn compile_git_tag_https_dependency() -> Result<()> { "uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@test-tag", )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1468,7 +1465,7 @@ fn compile_git_date_tag_https_dependency() -> Result<()> { "uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@20240402", )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1496,7 +1493,7 @@ fn compile_git_long_commit_https_dependency() -> Result<()> { "uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979", )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1524,7 +1521,7 @@ fn compile_git_short_commit_https_dependency() -> Result<()> { "uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd6", )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1551,7 +1548,7 @@ fn compile_git_refs_https_dependency() -> Result<()> { requirements_in .write_str("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@refs/pull/4/head")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1577,7 +1574,7 @@ fn compile_git_subdirectory_dependency() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1604,7 +1601,7 @@ fn compile_git_concurrent_access() -> Result<()> { requirements_in .write_str("example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a\nexample-pkg-b @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1633,7 +1630,7 @@ fn compile_git_unnamed_concurrent_access() -> Result<()> { requirements_in .write_str("git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a\ngit+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1662,7 +1659,7 @@ fn compile_git_mismatched_name() -> Result<()> { requirements_in .write_str("flask @ git+https://github.com/pallets/flask.git@2.0.0\ndask @ git+https://github.com/pallets/flask.git@3.0.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 2 @@ -1686,7 +1683,7 @@ fn compile_git_subdirectory_static_metadata() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("uv-public-pypackage @ git+https://github.com/astral-test/uv-workspace-pypackage#subdirectory=uv-public-pypackage")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1712,7 +1709,7 @@ fn mixed_url_dependency() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1754,7 +1751,7 @@ fn conflicting_direct_url_dependency() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("werkzeug==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 1 @@ -1777,7 +1774,7 @@ fn compatible_direct_url_dependency() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("werkzeug==2.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1802,7 +1799,7 @@ fn conflicting_repeated_url_dependency_version_mismatch() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("werkzeug @ https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 2 @@ -1829,7 +1826,7 @@ fn conflicting_repeated_url_dependency_markers() -> Result<()> { werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl ; python_version < '3.10' "})?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1857,7 +1854,7 @@ fn conflicting_repeated_url_dependency_version_match() -> Result<()> { requirements_in.write_str("werkzeug @ git+https://github.com/pallets/werkzeug.git@2.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 2 @@ -1880,7 +1877,7 @@ fn conflicting_transitive_url_dependency() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 1 @@ -1906,7 +1903,7 @@ fn compatible_repeated_url_dependency() -> Result<()> { uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0.0.2 "})?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1935,7 +1932,7 @@ fn conflicting_repeated_url_dependency() -> Result<()> { uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.1 "})?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 2 @@ -1963,7 +1960,7 @@ fn compatible_narrowed_url_dependency() -> Result<()> { uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389 "})?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -1993,7 +1990,7 @@ fn compatible_broader_url_dependency() -> Result<()> { uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2 "})?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -2023,7 +2020,7 @@ fn compatible_repeated_narrowed_url_dependency() -> Result<()> { uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2 "})?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -2056,7 +2053,7 @@ fn incompatible_narrowed_url_dependency() -> Result<()> { uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@test-branch "})?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 2 @@ -2081,7 +2078,7 @@ fn allowed_transitive_git_dependency() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("hatchling_editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -2114,7 +2111,7 @@ fn allowed_transitive_url_dependency() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--constraint") .arg("constraints.txt"), @r###" @@ -2152,7 +2149,7 @@ fn allowed_transitive_canonical_url_dependency() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("iniconfig @ git+https://github.com/pytest-dev/iniconfig.git@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--constraint") .arg("constraints.txt"), @r###" @@ -2187,7 +2184,7 @@ fn allowed_transitive_url_path_dependency() -> Result<()> { requirements_in.write_str("hatchling_editable @ ${HATCH_PATH}")?; let hatchling_path = current_dir()?.join("../../scripts/packages/hatchling_editable"); - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .env("HATCH_PATH", hatchling_path.as_os_str()), @r###" success: true @@ -2223,7 +2220,7 @@ fn requirement_constraint_override_url() -> Result<()> { let overrides_txt = context.temp_dir.child("overrides.txt"); overrides_txt.write_str("anyio==3.7.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--constraint") .arg("constraints.txt") @@ -2255,7 +2252,7 @@ fn requirement_override_prerelease() -> Result<()> { let overrides_txt = context.temp_dir.child("overrides.txt"); overrides_txt.write_str("flask<2.0.1,!=2.0.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--override") .arg("overrides.txt"), @r###" @@ -2309,7 +2306,7 @@ optional-dependencies.bar = [ "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml") .arg("--all-extras"), @r###" success: true @@ -2365,7 +2362,7 @@ optional-dependencies.bar = [ "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("--annotation-style=line") .arg("pyproject.toml") .arg("--all-extras"), @r###" @@ -2411,7 +2408,7 @@ optional-dependencies.bar = [ "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml") .arg("--all-extras") .arg("--extra") @@ -2449,7 +2446,7 @@ dependencies = ["anyio==3.7.0", "anyio==4.0.0"] "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml"), @r###" success: false exit_code: 1 @@ -2481,7 +2478,7 @@ dependencies = ["anyio==300.1.4"] "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml"), @r###" success: false exit_code: 1 @@ -2503,25 +2500,18 @@ fn compile_exclude_newer() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("tqdm")?; - uv_snapshot!(Command::new(get_bin()) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--exclude-newer") - // 4.64.0: 2022-04-04T01:48:46.194635Z1 - // 4.64.1: 2022-09-03T11:10:27.148080Z - .arg("2022-04-04T12:00:00Z") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string()) - .current_dir(context.temp_dir.path()), @r###" + uv_snapshot!(context + .pip_compile_without_exclude_newer() + .arg("requirements.in") + .arg("--exclude-newer") + // 4.64.0: 2022-04-04T01:48:46.194635Z1 + // 4.64.1: 2022-09-03T11:10:27.148080Z + .arg("2022-04-04T12:00:00Z"), @r###" success: true exit_code: 0 ----- stdout ----- # This file was autogenerated by uv via the following command: - # uv pip compile requirements.in --exclude-newer 2022-04-04T12:00:00Z --cache-dir [CACHE_DIR] + # uv pip compile --cache-dir [CACHE_DIR] requirements.in --exclude-newer 2022-04-04T12:00:00Z tqdm==4.64.0 # via -r requirements.in @@ -2532,23 +2522,16 @@ fn compile_exclude_newer() -> Result<()> { // Use a date as input instead. // We interpret a date as including this day - uv_snapshot!(Command::new(get_bin()) - .arg("pip") - .arg("compile") + uv_snapshot!(context + .pip_compile_without_exclude_newer() .arg("requirements.in") .arg("--exclude-newer") - .arg("2022-04-04") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string()) - .current_dir(context.temp_dir.path()), @r###" + .arg("2022-04-04"), @r###" success: true exit_code: 0 ----- stdout ----- # This file was autogenerated by uv via the following command: - # uv pip compile requirements.in --exclude-newer 2022-04-04 --cache-dir [CACHE_DIR] + # uv pip compile --cache-dir [CACHE_DIR] requirements.in --exclude-newer 2022-04-04 tqdm==4.64.0 # via -r requirements.in @@ -2558,18 +2541,11 @@ fn compile_exclude_newer() -> Result<()> { ); // Check the error message for invalid datetime - uv_snapshot!(Command::new(get_bin()) - .arg("pip") - .arg("compile") + uv_snapshot!(context + .pip_compile_without_exclude_newer() .arg("requirements.in") .arg("--exclude-newer") - .arg("2022-04-04+02:00") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string()) - .current_dir(context.temp_dir.path()), @r###" + .arg("2022-04-04+02:00"), @r###" success: false exit_code: 2 ----- stdout ----- @@ -2601,7 +2577,7 @@ fn compile_wheel_path_dependency() -> Result<()> { Url::from_file_path(flask_wheel.path()).unwrap() ))?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -2633,7 +2609,7 @@ fn compile_wheel_path_dependency() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask @ file:flask-3.0.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -2666,7 +2642,7 @@ fn compile_wheel_path_dependency() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask @ file://flask-3.0.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -2699,7 +2675,7 @@ fn compile_wheel_path_dependency() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask @ ./flask-3.0.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -2732,7 +2708,7 @@ fn compile_wheel_path_dependency() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str(&format!("flask @ {}", flask_wheel.path().display()))?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -2766,7 +2742,7 @@ fn compile_wheel_path_dependency() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str(&format!("flask @ file://{}", flask_wheel.path().display()))?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -2803,7 +2779,7 @@ fn compile_wheel_path_dependency() -> Result<()> { flask_wheel.path().display() ))?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -2851,7 +2827,7 @@ fn compile_source_distribution_path_dependency() -> Result<()> { Url::from_file_path(flask_wheel.path()).unwrap() ))?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -2895,7 +2871,7 @@ fn compile_wheel_path_dependency_missing() -> Result<()> { .simplified_display() ))?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 2 @@ -2915,7 +2891,7 @@ fn compile_yanked_version_direct() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("attrs==21.1.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -2941,7 +2917,7 @@ fn compile_yanked_version_indirect() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("attrs>20.3.0,<21.2.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 1 @@ -2972,7 +2948,7 @@ fn override_dependency() -> Result<()> { let overrides_txt = context.temp_dir.child("overrides.txt"); overrides_txt.write_str("werkzeug==2.3.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--override") .arg("overrides.txt"), @r###" @@ -3028,7 +3004,7 @@ fn override_dependency_from_pyproject() -> Result<()> { "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml") .current_dir(&context.temp_dir) , @r###" @@ -3090,7 +3066,7 @@ fn override_dependency_from_specific_uv_toml() -> Result<()> { "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml") .arg("--config-file") .arg("../uv/uv.toml") @@ -3141,7 +3117,7 @@ fn override_multi_dependency() -> Result<()> { "tomli>=1.1.0; python_version >= '3.11'\ntomli<1.0.0; python_version < '3.11'", )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--override") .arg("overrides.txt"), @r###" @@ -3195,7 +3171,7 @@ fn override_dependency_from_workspace_invalid_syntax() -> Result<()> { "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml") .current_dir(&context.temp_dir) , @r###" @@ -3231,7 +3207,7 @@ fn override_dependency_url() -> Result<()> { let overrides_txt = context.temp_dir.child("overrides.txt"); overrides_txt.write_str("werkzeug @ https://files.pythonhosted.org/packages/cc/94/5f7079a0e00bd6863ef8f1da638721e9da21e5bacee597595b318f71d62e/Werkzeug-1.0.1-py2.py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--override") .arg("overrides.txt"), @r###" @@ -3276,7 +3252,7 @@ fn override_dependency_unnamed_url() -> Result<()> { let overrides_txt = context.temp_dir.child("overrides.txt"); overrides_txt.write_str("https://files.pythonhosted.org/packages/cc/94/5f7079a0e00bd6863ef8f1da638721e9da21e5bacee597595b318f71d62e/Werkzeug-1.0.1-py2.py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--override") .arg("overrides.txt"), @r###" @@ -3317,7 +3293,7 @@ fn missing_registry_extra() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("black[tensorboard]==23.10.1")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -3353,7 +3329,7 @@ fn missing_url_extra() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask[tensorboard] @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -3394,7 +3370,7 @@ fn preserve_url() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask @ https://files.PYTHONHOSTED.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -3440,7 +3416,7 @@ fn preserve_project_root() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask @ file://${PROJECT_ROOT}/flask-3.0.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -3480,7 +3456,7 @@ fn respect_http_env_var() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask @ ${URL}")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .env("URL", "https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl"), @r###" success: true @@ -3521,7 +3497,7 @@ fn respect_unnamed_env_var() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("${URL}")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .env("URL", "https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl"), @r###" success: true @@ -3563,7 +3539,7 @@ fn error_missing_unnamed_env_var() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("${URL}")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 2 @@ -3593,7 +3569,7 @@ fn respect_file_env_var() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask @ ${FILE_PATH}")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .env("FILE_PATH", context.temp_dir.join("flask-3.0.0-py3-none-any.whl")), @r###" success: true @@ -3637,7 +3613,7 @@ fn compile_editable() -> Result<()> { " })?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg(requirements_in.path()) .current_dir(current_dir()?), @r###" success: true @@ -3697,7 +3673,7 @@ fn deduplicate_editable() -> Result<()> { " })?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg(requirements_in.path()) .current_dir(current_dir()?), @r###" success: true @@ -3744,7 +3720,7 @@ fn strip_fragment_unnamed() -> Result<()> { " })?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg(requirements_in.path()) .current_dir(current_dir()?), @r###" success: true @@ -3771,7 +3747,7 @@ fn strip_fragment_named() -> Result<()> { " })?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg(requirements_in.path()) .current_dir(current_dir()?), @r###" success: true @@ -3795,14 +3771,8 @@ fn recursive_extras_direct_url() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("black[dev] @ ../../scripts/packages/black_editable")?; - let mut command = context.compile(); - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string()); - } - - uv_snapshot!(context.filters(), command + uv_snapshot!(context.filters(), context + .pip_compile() .arg(requirements_in.path()) .current_dir(current_dir().unwrap()), @r###" success: true @@ -3847,7 +3817,7 @@ fn compile_editable_url_requirement() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("-e ../../scripts/packages/hatchling_editable")?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg(requirements_in.path()) .current_dir(current_dir()?), @r###" success: true @@ -3903,7 +3873,7 @@ fn cache_errors_are_non_fatal() -> Result<()> { ]; let check = || { - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pip") .arg("compile") .arg(requirements_in.path()) @@ -3966,23 +3936,15 @@ fn compile_html() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("jinja2<=3.1.2")?; - uv_snapshot!(Command::new(get_bin()) - .arg("pip") - .arg("compile") + uv_snapshot!(context.pip_compile_without_exclude_newer() .arg("requirements.in") - .arg("--cache-dir") - .arg(context.cache_dir.path()) .arg("--index-url") - .arg("https://download.pytorch.org/whl") - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string()) - .current_dir(context.temp_dir.path()), @r###" + .arg("https://download.pytorch.org/whl"), @r###" success: true exit_code: 0 ----- stdout ----- # This file was autogenerated by uv via the following command: - # uv pip compile requirements.in --cache-dir [CACHE_DIR] + # uv pip compile --cache-dir [CACHE_DIR] requirements.in jinja2==3.1.2 # via -r requirements.in markupsafe==2.1.5 @@ -4003,7 +3965,7 @@ fn trailing_slash() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("jinja2")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--index-url") .arg("https://test.pypi.org/simple"), @r###" @@ -4022,7 +3984,7 @@ fn trailing_slash() -> Result<()> { "### ); - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--index-url") .arg("https://test.pypi.org/simple/"), @r###" @@ -4051,7 +4013,7 @@ fn compile_legacy_sdist_pep_517() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -4082,7 +4044,7 @@ fn compile_legacy_sdist_setuptools() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--legacy-setup-py"), @r###" success: true @@ -4114,7 +4076,7 @@ fn generate_hashes_registry() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio==4.0.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--generate-hashes"), @r###" success: true @@ -4150,7 +4112,7 @@ fn generate_hashes_source_distribution_url() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--generate-hashes"), @r###" success: true @@ -4185,7 +4147,7 @@ fn generate_hashes_built_distribution_url() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--generate-hashes"), @r###" success: true @@ -4220,7 +4182,7 @@ fn generate_hashes_git() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio @ git+https://github.com/agronholm/anyio@4.3.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--generate-hashes"), @r###" success: true @@ -4254,7 +4216,7 @@ fn generate_hashes_unnamed_url() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--generate-hashes"), @r###" success: true @@ -4294,7 +4256,7 @@ fn generate_hashes_local_directory() -> Result<()> { " })?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg(requirements_in.path()) .arg("--generate-hashes") .current_dir(current_dir()?), @r###" @@ -4337,7 +4299,7 @@ fn generate_hashes_editable() -> Result<()> { " })?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg(requirements_in.path()) .arg("--generate-hashes") .current_dir(current_dir()?), @r###" @@ -4379,7 +4341,7 @@ fn find_links_directory() -> Result<()> { werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl "})?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in") .arg("--find-links") .arg(context.workspace_root.join("scripts").join("links")), @r###" @@ -4411,7 +4373,7 @@ fn find_links_url() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("tqdm")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--no-index") .arg("--find-links") @@ -4439,7 +4401,7 @@ fn find_links_env_var() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("tqdm\n--find-links ${URL}")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--no-index") .env("URL", "https://download.pytorch.org/whl/torch_stable.html"), @r###" @@ -4467,7 +4429,7 @@ fn find_links_requirements_txt() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("-f https://download.pytorch.org/whl/torch_stable.html\ntqdm")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--no-index") .arg("--emit-find-links"), @r###" @@ -4501,7 +4463,7 @@ fn avoid_irrelevant_extras() -> Result<()> { extras[dev] "})?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--find-links") .arg(context.workspace_root.join("scripts").join("links")), @r###" @@ -4554,7 +4516,7 @@ coverage = ["example[test]", "extras>=0.0.1,<=0.0.2"] let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("-e .[test,coverage]")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--find-links") .arg(context.workspace_root.join("scripts").join("links")), @r###" @@ -4601,7 +4563,7 @@ fn upgrade_none() -> Result<()> { # via black "})?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--output-file") .arg("requirements.txt"), @r###" @@ -4656,7 +4618,7 @@ fn upgrade_all() -> Result<()> { # via black "})?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--output-file") .arg("requirements.txt") @@ -4712,7 +4674,7 @@ fn upgrade_package() -> Result<()> { # via black "})?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--output-file") .arg("requirements.txt") @@ -4760,7 +4722,7 @@ fn missing_path_requirement() -> Result<()> { .chain(context.filters()) .collect(); - uv_snapshot!(filters, context.compile() + uv_snapshot!(filters, context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 2 @@ -4780,7 +4742,7 @@ fn missing_editable_file() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("-e foo/anyio-3.7.0.tar.gz")?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 2 @@ -4801,7 +4763,7 @@ fn missing_editable_directory() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("-e foo/bar")?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 2 @@ -4822,7 +4784,7 @@ fn unnamed_requirement_with_package_name() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -4861,7 +4823,7 @@ fn no_annotate() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("black==23.10.1")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--no-annotate"), @r###" success: true @@ -4891,7 +4853,7 @@ fn no_header() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("black==23.10.1")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--no-header"), @r###" success: true @@ -4925,7 +4887,7 @@ fn custom_compile_command() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("black==23.10.1")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--custom-compile-command") .arg("./custom-uv-compile.sh"), @r###" @@ -4953,7 +4915,7 @@ fn custom_compile_command() -> Result<()> { ); // with env var - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .env("UV_CUSTOM_COMPILE_COMMAND", "./custom-uv-compile.sh"), @r###" success: true @@ -4989,7 +4951,7 @@ fn allow_unsafe() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("werkzeug==3.0.1")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--allow-unsafe"), @r###" success: true @@ -5018,7 +4980,7 @@ fn resolver_legacy() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("werkzeug==3.0.1")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--resolver=legacy"), @r###" success: false @@ -5041,7 +5003,7 @@ fn emit_index_urls() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("black==23.10.1")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--emit-index-url") .arg("--index-url") @@ -5084,7 +5046,7 @@ fn emit_find_links() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("black==23.10.1")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--emit-find-links") .arg("--find-links") @@ -5124,7 +5086,7 @@ fn emit_build_options() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("black==23.10.1")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--emit-build-options") .arg("--only-binary") @@ -5167,7 +5129,7 @@ fn no_index_requirements_txt() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("--no-index\ntqdm")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 1 @@ -5193,7 +5155,7 @@ fn index_url_requirements_txt() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("--index-url https://google.com\ntqdm")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--index-url") .arg("https://pypi.org/simple"), @r###" @@ -5223,7 +5185,7 @@ fn conflicting_index_urls_requirements_txt() -> Result<()> { let constraints_in = context.temp_dir.child("constraints.in"); constraints_in.write_str("--index-url https://wikipedia.org\nflask")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--constraint") .arg("constraints.in"), @r###" @@ -5249,7 +5211,7 @@ fn matching_index_urls_requirements_txt() -> Result<()> { let constraints_in = context.temp_dir.child("constraints.in"); constraints_in.write_str("--index-url https://pypi.org/simple")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--constraint") .arg("constraints.in"), @r###" @@ -5275,7 +5237,7 @@ fn offline_registry() -> Result<()> { requirements_in.write_str("black==23.10.1")?; // Resolve with `--offline` with an empty cache. - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--offline"), @r###" success: false @@ -5291,7 +5253,7 @@ fn offline_registry() -> Result<()> { ); // Populate the cache. - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -5317,7 +5279,7 @@ fn offline_registry() -> Result<()> { ); // Resolve with `--offline` with a populated cache. - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--offline"), @r###" success: true @@ -5355,7 +5317,7 @@ fn offline_registry_backtrack() -> Result<()> { requirements_in.write_str("iniconfig==1.1.1")?; // Populate the cache. - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -5374,7 +5336,7 @@ fn offline_registry_backtrack() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("iniconfig")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--offline"), @r###" success: true @@ -5404,7 +5366,7 @@ fn offline_find_links() -> Result<()> { // Resolve with `--offline` and `--find-links`. We indicate that the network was disabled, // since both the `--find-links` and the registry lookups fail (but, importantly, we don't error // when failing to fetch the `--find-links` URL). - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--find-links") .arg("https://download.pytorch.org/whl/torch_stable.html") @@ -5422,7 +5384,7 @@ fn offline_find_links() -> Result<()> { ); // Resolve with `--offline`, `--find-links`, and `--no-index`. - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--find-links") .arg("https://download.pytorch.org/whl/torch_stable.html") @@ -5451,7 +5413,7 @@ fn offline_direct_url() -> Result<()> { requirements_in.write_str("iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl")?; // Resolve with `--offline` with an empty cache. - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--offline"), @r###" success: false @@ -5465,7 +5427,7 @@ fn offline_direct_url() -> Result<()> { ); // Populate the cache. - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -5481,7 +5443,7 @@ fn offline_direct_url() -> Result<()> { ); // Resolve with `--offline` with a populated cache. - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--offline"), @r###" success: true @@ -5509,7 +5471,7 @@ fn invalid_metadata_requires_python() -> Result<()> { requirements_in.write_str("validation==2.0.0")?; // `2.0.0` has invalid metadata. - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--no-index") .arg("--find-links") @@ -5541,7 +5503,7 @@ fn invalid_metadata_multiple_dist_info() -> Result<()> { requirements_in.write_str("validation==3.0.0")?; // `3.0.0` has an invalid structure (multiple `.dist-info` directories). - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--no-index") .arg("--find-links") @@ -5571,7 +5533,7 @@ fn invalid_metadata_backtrack() -> Result<()> { // `2.0.0` and `3.0.0` have invalid metadata. We should backtrack to `1.0.0` (the preceding // version, which has valid metadata). - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--no-index") .arg("--find-links") @@ -5607,7 +5569,7 @@ fn compile_relative_subfile() -> Result<()> { requirements_dev_in.write_str("anyio")?; uv_snapshot!(context - .compile() + .pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -5636,7 +5598,7 @@ fn compile_none_extra() -> Result<()> { requirements_in.write_str("entrypoints==0.3")?; uv_snapshot!(context - .compile() + .pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -5666,7 +5628,7 @@ fn compile_types_pytz() -> Result<()> { requirements_txt.write_str("types-pytz==2021.1")?; uv_snapshot!(context - .compile() + .pip_compile() .arg("requirements.in") .arg("-o") .arg("requirements.txt"), @r###" @@ -5697,7 +5659,7 @@ fn compile_unnamed_preference() -> Result<()> { requirements_txt.write_str("./scripts/packages/black_editable")?; uv_snapshot!(context - .compile() + .pip_compile() .arg("requirements.in") .arg("-o") .arg("requirements.txt"), @r###" @@ -5737,7 +5699,7 @@ fn compile_constraints_compatible_url() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--constraint") .arg("constraints.txt"), @r###" @@ -5774,7 +5736,7 @@ fn compile_constraints_compatible_url_version() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("anyio>4")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--constraint") .arg("constraints.txt"), @r###" @@ -5811,7 +5773,7 @@ fn compile_constraints_incompatible_url() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--constraint") .arg("constraints.txt"), @r###" @@ -5836,7 +5798,7 @@ fn index_url_in_requirements() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("--index-url https://download.pytorch.org/whl\nanyio<4")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 1 @@ -5859,7 +5821,7 @@ fn index_url_from_command_line() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("--index-url https://download.pytorch.org/whl\nanyio<4")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--index-url") .arg("https://pypi.org/simple"), @r###" @@ -5891,7 +5853,7 @@ fn unsupported_scheme() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio @ bzr+https://example.com/anyio")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 2 @@ -5915,7 +5877,7 @@ fn no_deps_valid_extra() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask[dotenv]")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--no-deps"), @r###" success: true @@ -5941,7 +5903,7 @@ fn no_deps_invalid_extra() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask[empty]")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--no-deps"), @r###" success: true @@ -6010,7 +5972,7 @@ dependencies = [ editable_dir2.path().display() })?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in") .arg("--no-deps"), @r###" success: true @@ -6038,7 +6000,7 @@ fn editable_invalid_extra() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("-e ../../scripts/packages/black_editable[empty]")?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg(requirements_in.path()) .current_dir(current_dir()?), @r###" success: true @@ -6064,7 +6026,7 @@ fn no_strip_extra() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask[dotenv]")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--no-strip-extras"), @r###" success: true @@ -6107,7 +6069,7 @@ fn no_strip_extras() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio[trio]\nanyio[doc]")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--no-strip-extras"), @r###" success: true @@ -6206,7 +6168,7 @@ fn no_strip_markers() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio ; python_version > '3.11'")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--no-strip-markers") .arg("--python-platform") @@ -6242,7 +6204,7 @@ fn no_strip_markers_multiple_markers() -> Result<()> { trio ; sys_platform == 'win32' "})?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--no-strip-markers") .arg("--python-platform") @@ -6287,7 +6249,7 @@ fn no_strip_markers_transitive_marker() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("trio ; python_version > '3.11'")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--no-strip-markers") .arg("--python-platform") @@ -6334,7 +6296,7 @@ fn universal() -> Result<()> { trio ; sys_platform == 'win32' "})?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--universal"), @r###" success: true @@ -6379,7 +6341,7 @@ fn universal_conflicting() -> Result<()> { trio==0.10.0 ; sys_platform == 'win32' "})?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--universal"), @r###" success: true @@ -6429,7 +6391,7 @@ fn compile_constraints_compatible_version() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("filelock==3.8.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--constraint") .arg("constraints.txt"), @r###" @@ -6468,7 +6430,7 @@ fn compile_constraints_incompatible_version() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("filelock==3.8.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--constraint") .arg("constraints.txt"), @r###" @@ -6496,7 +6458,7 @@ fn conflicting_url_markers() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("filelock==3.8.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--constraint") .arg("constraints.txt"), @r###" @@ -6526,7 +6488,7 @@ fn editable_override() -> Result<()> { let overrides_txt = context.temp_dir.child("overrides.txt"); overrides_txt.write_str("-e ../../scripts/packages/black_editable")?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg(requirements_in.path()) .arg("--override") .arg(overrides_txt.path()) @@ -6559,7 +6521,7 @@ fn override_editable() -> Result<()> { let overrides_txt = context.temp_dir.child("overrides.txt"); overrides_txt.write_str("black==23.10.1")?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg(requirements_in.path()) .arg("--override") .arg(overrides_txt.path()) @@ -6605,7 +6567,7 @@ fn override_with_compatible_constraint() -> Result<()> { let overrides_txt = context.temp_dir.child("overrides.txt"); overrides_txt.write_str("anyio>=3.0.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--constraint") .arg("constraints.txt") @@ -6648,7 +6610,7 @@ fn override_with_incompatible_constraint() -> Result<()> { let overrides_txt = context.temp_dir.child("overrides.txt"); overrides_txt.write_str("anyio>=3.0.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--constraint") .arg("constraints.txt") @@ -6674,7 +6636,7 @@ fn unsafe_package() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--unsafe-package") .arg("jinja2") @@ -6719,7 +6681,7 @@ fn pre_release_upper_bound_exclude() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask<2.0.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--prerelease=allow"), @r###" success: true @@ -6756,7 +6718,7 @@ fn pre_release_upper_bound_include() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask<2.0.0rc4")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--prerelease=allow"), @r###" success: true @@ -6794,7 +6756,7 @@ fn pre_alias() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask<2.0.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--pre"), @r###" success: true @@ -6833,7 +6795,7 @@ fn pre_release_constraint() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("flask<=2.0.0rc2")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--constraint") .arg("constraints.txt"), @r###" @@ -6892,7 +6854,7 @@ dev = [ "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml") .arg("--extra") .arg("dev"), @r###" @@ -6942,7 +6904,7 @@ dev = [ "#, )?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("pyproject.toml") .arg("--extra") .arg("dev"), @r###" @@ -6975,7 +6937,7 @@ fn editable_direct_dependency() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("-e ../../scripts/packages/setuptools_editable")?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg(requirements_in.path()) .arg("--resolution") .arg("lowest-direct") @@ -7004,7 +6966,7 @@ fn empty_index_url_env_var() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--emit-index-url") .env("UV_INDEX_URL", ""), @r###" @@ -7037,7 +6999,7 @@ fn empty_extra_index_url_env_var() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--emit-index-url") .env("EXTRA_UV_INDEX_URL", ""), @r###" @@ -7071,7 +7033,7 @@ fn empty_index_url_env_var_override() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("--index-url https://test.pypi.org/simple\nidna")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--emit-index-url") .env("UV_INDEX_URL", ""), @r###" @@ -7100,7 +7062,7 @@ fn index_url_env_var_override() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("--index-url https://pypi.org/simple\nidna")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--emit-index-url") .env("UV_INDEX_URL", "https://test.pypi.org/simple"), @r###" @@ -7133,7 +7095,7 @@ fn expand_env_var_requirements_txt() -> Result<()> { let requirements_dev_in = context.temp_dir.child("requirements-dev.in"); requirements_dev_in.write_str("anyio")?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -7179,7 +7141,7 @@ requires-python = "<=3.8" let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str(&format!("-e {}", editable_dir.path().display()))?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 1 @@ -7230,7 +7192,7 @@ requires-python = "<=3.8" .chain(context.filters()) .collect(); - uv_snapshot!(filters, context.compile() + uv_snapshot!(filters, context.pip_compile() .arg("requirements.in") .arg("--python-version=3.11"), @r###" success: false @@ -7271,7 +7233,7 @@ dev = [ let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("-e .[dev]")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -7320,7 +7282,7 @@ dev = ["setuptools"] let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("-e .")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--resolution=lowest-direct"), @r###" success: true @@ -7352,7 +7314,7 @@ fn metadata_2_2() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("pyo3-mixed @ https://files.pythonhosted.org/packages/2b/b8/e04b783d3569d5b61b1dcdfda683ac2e3617340539aecd0f099fbade0b4a/pyo3_mixed-2.1.5.tar.gz")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -7385,7 +7347,8 @@ fn no_stream() -> Result<()> { let constraints_in = context.temp_dir.child("constraints.in"); constraints_in.write_str("protobuf==5.26.0")?; - uv_snapshot!(context.compile_without_exclude_newer() + uv_snapshot!(context + .pip_compile_without_exclude_newer() .arg("requirements.in") .arg("-c") .arg("constraints.in") @@ -7418,7 +7381,7 @@ fn not_found_direct_url() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("iniconfig @ https://files.pythonhosted.org/packages/ef/a6/fake/iniconfig-2.0.0-py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 2 @@ -7457,7 +7420,7 @@ requires-python = "<=3.8" let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str(&format!("example @ {}", editable_dir.path().display()))?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 1 @@ -7482,7 +7445,7 @@ fn compile_root_uri_editable() -> Result<()> { requirements_in.write_str("-e ${ROOT_PATH}")?; let root_path = current_dir()?.join("../../scripts/packages/root_editable"); - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in") .env("ROOT_PATH", root_path.as_os_str()), @r###" success: true @@ -7513,7 +7476,7 @@ fn compile_root_uri_non_editable() -> Result<()> { let root_path = current_dir()?.join("../../scripts/packages/root_editable"); let black_path = current_dir()?.join("../../scripts/packages/black_editable"); - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in") .env("ROOT_PATH", root_path.as_os_str()) .env("BLACK_PATH", black_path.as_os_str()), @r###" @@ -7545,7 +7508,7 @@ fn requirement_wheel_name_mismatch() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("dateutil @ https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: false exit_code: 2 @@ -7579,7 +7542,7 @@ fn preserve_hashes_no_upgrade() -> Result<()> { "})?; // Avoid adding any additional hashes to the "lockfile". - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--output-file") .arg("requirements.txt") @@ -7623,7 +7586,7 @@ fn preserve_hashes_upgrade() -> Result<()> { "})?; // Requesting an upgrade should update the hashes, even if the version didn't change. - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--output-file") .arg("requirements.txt") @@ -7712,7 +7675,7 @@ fn preserve_hashes_no_existing_hashes() -> Result<()> { "})?; // Add additional hashes to the "lockfile". - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--output-file") .arg("requirements.txt") @@ -7803,7 +7766,7 @@ fn preserve_hashes_newer_version() -> Result<()> { "})?; // Requesting a more specific version should update the hashes. - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--output-file") .arg("requirements.txt") @@ -7897,7 +7860,7 @@ fn unnamed_path_requirement() -> Result<()> { " })?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg(requirements_in.path()) .current_dir(current_dir()?), @r###" success: true @@ -7958,7 +7921,7 @@ fn unnamed_git_requirement() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("git+https://github.com/pallets/flask.git@3.0.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -7997,7 +7960,7 @@ fn unnamed_https_requirement() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("https://github.com/pallets/flask/archive/refs/tags/3.0.2.tar.gz")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -8035,7 +7998,7 @@ fn dynamic_dependencies() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("hatchling-dynamic @ ../../scripts/packages/hatchling_dynamic")?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg(requirements_in.path()) .current_dir(current_dir()?), @r###" success: true @@ -8075,7 +8038,7 @@ fn emit_marker_expression_exciting_linux() -> Result<()> { requirements_in.write_str("anyio")?; uv_snapshot!(context - .compile() + .pip_compile() .arg("requirements.in") .arg("--emit-marker-expression"), @r###" success: true @@ -8112,7 +8075,7 @@ fn emit_marker_expression_direct() -> Result<()> { requirements_in.write_str("anyio ; sys_platform == 'linux'")?; uv_snapshot!(context - .compile() + .pip_compile() .arg("requirements.in") .arg("--emit-marker-expression"), @r###" success: true @@ -8155,7 +8118,7 @@ fn emit_marker_expression_conditional() -> Result<()> { requirements_in.write_str("anyio ; sys_platform == 'macos'")?; uv_snapshot!(context - .compile() + .pip_compile() .arg("requirements.in") .arg("--emit-marker-expression"), @r###" success: true @@ -8192,7 +8155,7 @@ fn emit_marker_expression_pypy() -> Result<()> { requirements_in.write_str("pendulum")?; uv_snapshot!(context - .compile() + .pip_compile() .arg("requirements.in") .arg("--emit-marker-expression"), @r###" success: true @@ -8231,7 +8194,7 @@ fn local_version_of_remote_package() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio")?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg(requirements_in.canonicalize()?), @r###" success: true exit_code: 0 @@ -8250,7 +8213,7 @@ fn local_version_of_remote_package() -> Result<()> { "###); // Actually install the local dependency - let mut command = context.install(); + let mut command = context.pip_install(); command.arg(root_path.join("anyio_local")); uv_snapshot!( context.filters(), @@ -8268,7 +8231,7 @@ fn local_version_of_remote_package() -> Result<()> { ); // The local version should _not_ be included in the resolution - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg(requirements_in.canonicalize()?), @r###" success: true exit_code: 0 @@ -8298,7 +8261,7 @@ fn local_version_of_remote_package() -> Result<()> { // `uv pip compile` does not have access to an environment and cannot consider installed packages // We may want to allow the lock file to be preserved in this case in the future, but right now // we require the URL to always be in the input file. - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg(requirements_in.canonicalize()?) .arg("--output-file") .arg(requirements_txt.canonicalize()?), @r###" @@ -8330,7 +8293,7 @@ fn pendulum_no_tzdata_on_windows() -> Result<()> { uv_snapshot!( context.filters(), windows_filters=false, - context.compile().arg("requirements.in"), @r###" + context.pip_compile().arg("requirements.in"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -8412,7 +8375,7 @@ requires-python = ">3.8" let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("./app")?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -8496,7 +8459,7 @@ requires-python = ">3.8" let overrides_txt = context.temp_dir.child("overrides.txt"); overrides_txt.write_str("anyio==3.7.0")?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in") .arg("--override") .arg("overrides.txt"), @r###" @@ -8588,7 +8551,7 @@ requires-python = ">3.8" overrides_txt.write_str("anyio==0.0.0")?; // Ensure that resolution fails, since `0.0.0` does not exist on PyPI. - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in") .arg("--override") .arg("overrides.txt"), @r###" @@ -8608,7 +8571,7 @@ requires-python = ">3.8" let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("anyio @ ./anyio")?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in") .arg("--override") .arg("overrides.txt") @@ -8661,7 +8624,7 @@ requires-python = ">3.8" let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str(".")?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -8715,7 +8678,7 @@ requires-python = ">3.8" let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("-e .")?; - uv_snapshot!( context.compile() + uv_snapshot!( context.pip_compile() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -8758,7 +8721,7 @@ fn compile_index_url_first_match() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("jinja2==3.1.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("--index-url") .arg("https://pypi.org/simple") .arg("--extra-index-url") @@ -8790,7 +8753,7 @@ fn compile_index_url_fallback() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("jinja2==3.1.0")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("--index-strategy") .arg("unsafe-any-match") .arg("--index-url") @@ -8830,7 +8793,7 @@ fn compile_index_url_fallback_prefer_primary() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("--index-strategy") .arg("unsafe-any-match") .arg("--index-url") @@ -8870,7 +8833,7 @@ fn compile_index_url_unsafe_highest() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("--index-strategy") .arg("unsafe-best-match") .arg("--index-url") @@ -8910,7 +8873,7 @@ fn compile_index_url_unsafe_lowest() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("--resolution") .arg("lowest") .arg("--index-strategy") @@ -8946,7 +8909,7 @@ fn emit_index_annotation_hide_password() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("requests")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--emit-index-annotation") .env("UV_INDEX_URL", "https://test-user:test-password@pypi.org/simple"), @r###" @@ -8987,7 +8950,7 @@ fn emit_index_annotation_pypi_org_simple() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("requests")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--emit-index-annotation"), @r###" success: true @@ -9030,7 +8993,7 @@ fn emit_index_annotation_no_annotate() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("requests")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--emit-index-annotation") .arg("--no-annotate"), @r###" @@ -9066,7 +9029,7 @@ fn emit_index_annotation_line() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("requests")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--emit-index-annotation") .arg("--annotation-style") @@ -9103,7 +9066,7 @@ fn emit_index_annotation_multiple_indexes() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("uv\nrequests")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") .arg("--extra-index-url") .arg("https://test.pypi.org/simple") @@ -9136,7 +9099,7 @@ fn no_version_for_direct_dependency() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("pypyp==1,>=1.2")?; - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("requirements.in") // Must error before we make any network requests .arg("--offline"), @r###" @@ -9163,7 +9126,7 @@ fn python_platform() -> Result<()> { uv_snapshot!(context.filters(), windows_filters=false, - context.compile() + context.pip_compile() .arg("requirements.in") .arg("--python-platform") .arg("aarch64-unknown-linux-gnu"), @r###" @@ -9192,7 +9155,7 @@ fn python_platform() -> Result<()> { uv_snapshot!(context.filters(), windows_filters=false, - context.compile() + context.pip_compile() .arg("requirements.in") .arg("--python-platform") .arg("x86_64-pc-windows-msvc"), @r###" @@ -9249,7 +9212,7 @@ fn git_source_default_branch() -> Result<()> { .chain(context.filters()) .collect(); - uv_snapshot!(filters, context.compile() + uv_snapshot!(filters, context.pip_compile() .arg("--preview") .arg("pyproject.toml"), @r###" success: true @@ -9286,7 +9249,7 @@ fn git_source_branch() -> Result<()> { uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", branch = "test-branch" } "#})?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("--preview") .arg("pyproject.toml"), @r###" success: true @@ -9323,7 +9286,7 @@ fn git_source_tag() -> Result<()> { uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", tag = "test-tag" } "#})?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("--preview") .arg("pyproject.toml"), @r###" success: true @@ -9360,7 +9323,7 @@ fn git_source_long_commit() -> Result<()> { uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", rev = "0dacfd662c64cb4ceb16e6cf65a157a8b715b979" } "#})?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("--preview") .arg("pyproject.toml"), @r###" success: true @@ -9397,7 +9360,7 @@ fn git_source_short_commit() -> Result<()> { uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", rev = "0dacfd6" } "#})?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("--preview") .arg("pyproject.toml"), @r###" success: true @@ -9434,7 +9397,7 @@ fn git_source_refs() -> Result<()> { uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", rev = "refs/pull/4/head" } "#})?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("--preview") .arg("pyproject.toml"), @r###" success: true @@ -9472,7 +9435,7 @@ fn git_source_missing_tag() -> Result<()> { uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", tag = "missing" } "#})?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("--preview") .arg("pyproject.toml"), @r###" success: false @@ -9511,7 +9474,7 @@ fn warn_missing_constraint() -> Result<()> { anyio = { url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl" } "#})?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("--preview") .arg("pyproject.toml"), @r###" success: true @@ -9552,7 +9515,7 @@ fn dont_warn_missing_constraint_without_sources() -> Result<()> { ] "#})?; - uv_snapshot!(context.filters(), context.compile() + uv_snapshot!(context.filters(), context.pip_compile() .arg("--preview") .arg("pyproject.toml"), @r###" success: true @@ -9621,7 +9584,7 @@ fn tool_uv_sources() -> Result<()> { )?; // Install the editable packages. - uv_snapshot!(context.compile() + uv_snapshot!(context.pip_compile() .arg("--preview") .arg(require_path) .arg("--extra") @@ -9674,7 +9637,7 @@ fn dynamic_pyproject_toml() -> Result<()> { let bird_feeder = context.temp_dir.child("bird_feeder/__init__.py"); bird_feeder.write_str("__all__= []")?; - uv_snapshot!(context.compile().arg("pyproject.toml"), @r###" + uv_snapshot!(context.pip_compile().arg("pyproject.toml"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -9698,7 +9661,7 @@ fn file_url() -> Result<()> { let url = Url::from_file_path(requirements_txt.simple_canonicalize()?).expect("valid file URL"); - uv_snapshot!(context.filters(), context.compile().arg(url.to_string()), @r###" + uv_snapshot!(context.filters(), context.pip_compile().arg(url.to_string()), @r###" success: true exit_code: 0 ----- stdout ----- @@ -9721,7 +9684,8 @@ fn no_binary_only_binary() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("source-distribution")?; - uv_snapshot!(context.compile_without_exclude_newer() + uv_snapshot!(context + .pip_compile_without_exclude_newer() .arg("requirements.in") .arg("--only-binary") .arg(":all:"), @r###" @@ -9736,7 +9700,8 @@ fn no_binary_only_binary() -> Result<()> { "### ); - uv_snapshot!(context.compile_without_exclude_newer() + uv_snapshot!(context + .pip_compile_without_exclude_newer() .arg("requirements.in") .arg("--only-binary") .arg(":all:") diff --git a/crates/uv/tests/pip_compile_scenarios.rs b/crates/uv/tests/pip_compile_scenarios.rs index a830a58fe00e..6df0e7233e48 100644 --- a/crates/uv/tests/pip_compile_scenarios.rs +++ b/crates/uv/tests/pip_compile_scenarios.rs @@ -29,19 +29,9 @@ fn command(context: &TestContext, python_versions: &[&str]) -> Command { .arg("--index-url") .arg("https://astral-sh.github.io/packse/0.3.29/simple-html/") .arg("--find-links") - .arg("https://raw.githubusercontent.com/astral-sh/packse/0.3.29/vendor/links.html") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .env("UV_TEST_PYTHON_PATH", python_path) - .current_dir(&context.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (8 * 1024 * 1024).to_string()); - } + .arg("https://raw.githubusercontent.com/astral-sh/packse/0.3.29/vendor/links.html"); + context.add_shared_args(&mut command); + command.env("UV_TEST_PYTHON_PATH", python_path); command } diff --git a/crates/uv/tests/pip_freeze.rs b/crates/uv/tests/pip_freeze.rs index d4e340e7b2c1..5d963977d5d7 100644 --- a/crates/uv/tests/pip_freeze.rs +++ b/crates/uv/tests/pip_freeze.rs @@ -7,44 +7,15 @@ use assert_cmd::prelude::*; use assert_fs::fixture::ChildPath; use assert_fs::prelude::*; -use crate::common::{get_bin, uv_snapshot, TestContext, EXCLUDE_NEWER}; +use crate::common::{get_bin, uv_snapshot, TestContext}; mod common; /// Create a `pip freeze` command with options shared across scenarios. fn command(context: &TestContext) -> Command { let mut command = Command::new(get_bin()); - command - .arg("pip") - .arg("freeze") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir); - command -} - -/// Create a `pip install` command with options shared across scenarios. -fn sync_command(context: &TestContext) -> Command { - let mut command = Command::new(get_bin()); - command - .arg("pip") - .arg("sync") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string()); - } - + command.arg("pip").arg("freeze"); + context.add_shared_args(&mut command); command } @@ -57,7 +28,8 @@ fn freeze_many() -> Result<()> { requirements_txt.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?; // Run `pip sync`. - sync_command(&context) + context + .pip_sync() .arg(requirements_txt.path()) .assert() .success(); @@ -90,7 +62,8 @@ fn freeze_duplicate() -> Result<()> { requirements_txt.write_str("pip==21.3.1")?; // Run `pip sync`. - sync_command(&context1) + context1 + .pip_sync() .arg(requirements_txt.path()) .assert() .success(); @@ -101,7 +74,8 @@ fn freeze_duplicate() -> Result<()> { requirements_txt.write_str("pip==22.1.1")?; // Run `pip sync`. - sync_command(&context2) + context2 + .pip_sync() .arg(requirements_txt.path()) .assert() .success(); @@ -139,7 +113,8 @@ fn freeze_url() -> Result<()> { requirements_txt.write_str("anyio\niniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl")?; // Run `pip sync`. - sync_command(&context) + context + .pip_sync() .arg(requirements_txt.path()) .assert() .success(); @@ -176,7 +151,8 @@ fn freeze_with_editable() -> Result<()> { ))?; // Run `pip sync`. - sync_command(&context) + context + .pip_sync() .arg(requirements_txt.path()) .assert() .success(); diff --git a/crates/uv/tests/pip_install.rs b/crates/uv/tests/pip_install.rs index 5d69ec048880..8d03703219aa 100644 --- a/crates/uv/tests/pip_install.rs +++ b/crates/uv/tests/pip_install.rs @@ -48,32 +48,12 @@ fn decode_token(content: &[&str]) -> String { token } -/// Create a `pip uninstall` command with options shared across scenarios. -fn uninstall_command(context: &TestContext) -> Command { - let mut command = Command::new(get_bin()); - command - .arg("pip") - .arg("uninstall") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .current_dir(&context.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string()); - } - - command -} - #[test] fn missing_requirements_txt() { let context = TestContext::new("3.12"); let requirements_txt = context.temp_dir.child("requirements.txt"); - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -95,7 +75,7 @@ fn empty_requirements_txt() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.touch()?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -116,7 +96,7 @@ fn empty_requirements_txt() -> Result<()> { fn missing_pyproject_toml() { let context = TestContext::new("3.12"); - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("pyproject.toml"), @r###" success: false @@ -135,7 +115,7 @@ fn invalid_pyproject_toml_syntax() -> Result<()> { let pyproject_toml = context.temp_dir.child("pyproject.toml"); pyproject_toml.write_str("123 - 456")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("pyproject.toml"), @r###" success: false @@ -163,7 +143,7 @@ fn invalid_pyproject_toml_schema() -> Result<()> { let pyproject_toml = context.temp_dir.child("pyproject.toml"); pyproject_toml.write_str("[project]")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("pyproject.toml"), @r###" success: false @@ -212,7 +192,7 @@ dependencies = ["flask==1.0.x"] .chain(context.filters()) .collect::>(); - uv_snapshot!(filters, context.install() + uv_snapshot!(filters, context.pip_install() .arg("-r") .arg("requirements.txt"), @r###" success: false @@ -301,7 +281,7 @@ fn missing_pip() { fn no_solution() { let context = TestContext::new("3.12"); - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("flask>=3.0.2") .arg("WerkZeug<1.0.0") .arg("--strict"), @r###" @@ -322,7 +302,7 @@ fn install_package() { let context = TestContext::new("3.12"); // Install Flask. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("Flask") .arg("--strict"), @r###" success: true @@ -355,7 +335,7 @@ fn install_requirements_txt() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("Flask")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -383,7 +363,7 @@ fn install_requirements_txt() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("Jinja2")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -423,7 +403,7 @@ werkzeug==3.0.1 ", )?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -467,7 +447,7 @@ build-backend = "poetry.core.masonry.api" "#, )?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("pyproject.toml") .arg("--extra") @@ -499,7 +479,7 @@ fn respect_installed_and_reinstall() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("Flask==2.3.2")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -527,7 +507,7 @@ fn respect_installed_and_reinstall() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("Flask")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -556,7 +536,7 @@ fn respect_installed_and_reinstall() -> Result<()> { } else { context.filters() }; - uv_snapshot!(filters, context.install() + uv_snapshot!(filters, context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -578,7 +558,7 @@ fn respect_installed_and_reinstall() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("Flask")?; - uv_snapshot!(filters, context.install() + uv_snapshot!(filters, context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--reinstall-package") @@ -602,7 +582,7 @@ fn respect_installed_and_reinstall() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("Flask")?; - uv_snapshot!(filters, context.install() + uv_snapshot!(filters, context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--reinstall-package") @@ -633,7 +613,7 @@ fn reinstall_extras() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("httpx")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -661,7 +641,7 @@ fn reinstall_extras() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("httpx[http2]")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -693,7 +673,7 @@ fn reinstall_incomplete() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("anyio==3.7.0")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt"), @r###" success: true @@ -717,7 +697,7 @@ fn reinstall_incomplete() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("anyio==4.0.0")?; - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-r") .arg("requirements.txt"), @r###" success: true @@ -747,7 +727,7 @@ fn allow_incompatibilities() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("Flask")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -775,7 +755,7 @@ fn allow_incompatibilities() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("jinja2==2.11.3")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -805,7 +785,7 @@ fn install_editable() { let context = TestContext::new("3.12"); // Install the editable package. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r###" success: true @@ -824,7 +804,7 @@ fn install_editable() { ); // Install it again (no-op). - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r###" success: true @@ -837,7 +817,7 @@ fn install_editable() { ); // Add another, non-editable dependency. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/poetry_editable")) .arg("black"), @r###" @@ -864,7 +844,7 @@ fn install_editable_and_registry() { let context = TestContext::new("3.12"); // Install the registry-based version of Black. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("black"), @r###" success: true exit_code: 0 @@ -884,7 +864,7 @@ fn install_editable_and_registry() { ); // Install the editable version of Black. This should remove the registry-based version. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/black_editable")), @r###" success: true @@ -903,7 +883,7 @@ fn install_editable_and_registry() { // Re-install the registry-based version of Black. This should be a no-op, since we have a // version of Black installed (the editable version) that satisfies the requirements. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("black") .arg("--strict"), @r###" success: true @@ -925,7 +905,7 @@ fn install_editable_and_registry() { .collect(); // Re-install Black at a specific version. This should replace the editable version. - uv_snapshot!(filters, context.install() + uv_snapshot!(filters, context.pip_install() .arg("black==23.10.0"), @r###" success: true exit_code: 0 @@ -947,7 +927,7 @@ fn install_editable_no_binary() { let context = TestContext::new("3.12"); // Install the editable package with no-binary enabled - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/black_editable")) .arg("--no-binary") @@ -973,7 +953,7 @@ fn install_editable_compatible_constraint() -> Result<()> { constraints_txt.write_str("black==0.1.0")?; // Install the editable package with a compatible constraint. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/black_editable")) .arg("--constraint") @@ -1001,7 +981,7 @@ fn install_editable_incompatible_constraint_version() -> Result<()> { constraints_txt.write_str("black>0.1.0")?; // Install the editable package with an incompatible constraint. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/black_editable")) .arg("--constraint") @@ -1027,7 +1007,7 @@ fn install_editable_incompatible_constraint_url() -> Result<()> { constraints_txt.write_str("black @ https://files.pythonhosted.org/packages/0f/89/294c9a6b6c75a08da55e9d05321d0707e9418735e3062b12ef0f54c33474/black-24.4.2-py3-none-any.whl")?; // Install the editable package with an incompatible constraint. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/black_editable")) .arg("--constraint") @@ -1057,7 +1037,7 @@ fn install_editable_pep_508_requirements_txt() -> Result<()> { workspace_root = context.workspace_root.simplified_display(), })?; - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-r") .arg("requirements.txt"), @r###" success: true @@ -1086,7 +1066,7 @@ fn install_editable_pep_508_requirements_txt() -> Result<()> { fn install_editable_pep_508_cli() { let context = TestContext::new("3.12"); - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(format!("black[d] @ file://{workspace_root}/scripts/packages/black_editable", workspace_root = context.workspace_root.simplified_display())), @r###" success: true @@ -1115,7 +1095,7 @@ fn install_editable_bare_cli() { let packages_dir = context.workspace_root.join("scripts/packages"); - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg("black_editable") .current_dir(&packages_dir), @r###" @@ -1141,7 +1121,7 @@ fn install_editable_bare_requirements_txt() -> Result<()> { let packages_dir = context.workspace_root.join("scripts/packages"); - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-r") .arg(requirements_txt.path()) .current_dir(&packages_dir), @r###" @@ -1167,7 +1147,7 @@ fn invalid_editable_no_url() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("-e black==0.1.0")?; - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-r") .arg("requirements.txt"), @r###" success: false @@ -1190,7 +1170,7 @@ fn invalid_editable_unnamed_https_url() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("-e https://files.pythonhosted.org/packages/0f/89/294c9a6b6c75a08da55e9d05321d0707e9418735e3062b12ef0f54c33474/black-24.4.2-py3-none-any.whl")?; - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-r") .arg("requirements.txt"), @r###" success: false @@ -1213,7 +1193,7 @@ fn invalid_editable_named_https_url() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("-e black @ https://files.pythonhosted.org/packages/0f/89/294c9a6b6c75a08da55e9d05321d0707e9418735e3062b12ef0f54c33474/black-24.4.2-py3-none-any.whl")?; - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-r") .arg("requirements.txt"), @r###" success: false @@ -1243,7 +1223,7 @@ fn reinstall_build_system() -> Result<()> { " })?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("--reinstall") .arg("-r") .arg("requirements.txt") @@ -1275,7 +1255,7 @@ fn reinstall_build_system() -> Result<()> { fn install_no_index() { let context = TestContext::new("3.12"); - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("Flask") .arg("--no-index"), @r###" success: false @@ -1299,7 +1279,7 @@ fn install_no_index() { fn install_no_index_version() { let context = TestContext::new("3.12"); - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("Flask==3.0.0") .arg("--no-index"), @r###" success: false @@ -1334,7 +1314,7 @@ fn install_no_index_version() { fn install_extra_index_url_has_priority() { let context = TestContext::new("3.12"); - uv_snapshot!(context.install_without_exclude_newer() + uv_snapshot!(context.pip_install_without_exclude_newer() .arg("--index-url") .arg("https://test.pypi.org/simple") .arg("--extra-index-url") @@ -1373,7 +1353,7 @@ fn install_git_public_https() { uv_snapshot!( context - .install() + .pip_install() .arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage"), @r###" success: true @@ -1401,7 +1381,7 @@ fn install_git_public_https_missing_branch_or_tag() { filters.push(("`git fetch .*`", "`git fetch [...]`")); filters.push(("exit status", "exit code")); - uv_snapshot!(filters, context.install() + uv_snapshot!(filters, context.pip_install() // 2.0.0 does not exist .arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@2.0.0"), @r###" success: false @@ -1437,7 +1417,7 @@ fn install_git_public_https_missing_commit() { "", )); - uv_snapshot!(filters, context.install() + uv_snapshot!(filters, context.pip_install() // 2.0.0 does not exist .arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@79a935a7a1a0ad6d0bdf72dce0e16cb0a24a1b3b") , @r###" @@ -1473,7 +1453,7 @@ fn install_git_private_https_pat() { "uv-private-pypackage@ git+https://{token}@github.com/astral-test/uv-private-pypackage" ); - uv_snapshot!(filters, context.install().arg(package) + uv_snapshot!(filters, context.pip_install().arg(package) , @r###" success: true exit_code: 0 @@ -1506,7 +1486,7 @@ fn install_git_private_https_pat_mixed_with_public() { "uv-private-pypackage @ git+https://{token}@github.com/astral-test/uv-private-pypackage" ); - uv_snapshot!(filters, context.install().arg(package).arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage"), + uv_snapshot!(filters, context.pip_install().arg(package).arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage"), @r###" success: true exit_code: 0 @@ -1543,7 +1523,7 @@ fn install_git_private_https_multiple_pat() { "uv-private-pypackage-2 @ git+https://{token_2}@github.com/astral-test/uv-private-pypackage-2" ); - uv_snapshot!(filters, context.install().arg(package_1).arg(package_2) + uv_snapshot!(filters, context.pip_install().arg(package_1).arg(package_2) , @r###" success: true exit_code: 0 @@ -1583,7 +1563,7 @@ fn install_git_private_https_pat_at_ref() { }; let package = format!("uv-private-pypackage @ git+https://{user}{token}@github.com/astral-test/uv-private-pypackage@6c09ce9ae81f50670a60abd7d95f30dd416d00ac"); - uv_snapshot!(filters, context.install() + uv_snapshot!(filters, context.pip_install() .arg(package), @r###" success: true exit_code: 0 @@ -1617,7 +1597,7 @@ fn install_git_private_https_pat_and_username() { .chain(context.filters()) .collect(); - uv_snapshot!(filters, context.install().arg(format!("uv-private-pypackage @ git+https://{user}:{token}@github.com/astral-test/uv-private-pypackage")) + uv_snapshot!(filters, context.pip_install().arg(format!("uv-private-pypackage @ git+https://{user}:{token}@github.com/astral-test/uv-private-pypackage")) , @r###" success: true exit_code: 0 @@ -1647,7 +1627,7 @@ fn install_git_private_https_pat_not_authorized() { // We provide a username otherwise (since the token is invalid), the git cli will prompt for a password // and hang the test - uv_snapshot!(filters, context.install() + uv_snapshot!(filters, context.pip_install() .arg(format!("uv-private-pypackage @ git+https://git:{token}@github.com/astral-test/uv-private-pypackage")) , @r###" success: false @@ -1686,7 +1666,7 @@ fn install_github_artifact_private_https_pat_mixed_with_public() { ); let public_package = "uv-public-pypackage @ https://raw.githubusercontent.com/astral-test/uv-public-pypackage/main/dist/uv_public_pypackage-0.1.0-py3-none-any.whl"; - uv_snapshot!(filters, context.install().arg(private_package).arg(public_package), + uv_snapshot!(filters, context.pip_install().arg(private_package).arg(public_package), @r###" success: true exit_code: 0 @@ -1724,7 +1704,7 @@ fn install_github_artifact_private_https_multiple_pat() { "uv-private-pypackage-2 @ https://astral-test-bot:{token_2}@raw.githubusercontent.com/astral-test/uv-private-pypackage-2/main/dist/uv_private_pypackage_2-0.1.0-py3-none-any.whl" ); - uv_snapshot!(filters, context.install().arg(package_1).arg(package_2) + uv_snapshot!(filters, context.pip_install().arg(package_1).arg(package_2) , @r###" success: true exit_code: 0 @@ -1747,7 +1727,7 @@ fn reinstall_no_binary() { let context = TestContext::new("3.12"); // The first installation should use a pre-built wheel - let mut command = context.install(); + let mut command = context.pip_install(); command.arg("anyio").arg("--strict"); uv_snapshot!( command, @@ -1770,7 +1750,7 @@ fn reinstall_no_binary() { // Running installation again with `--no-binary` should be a no-op // The first installation should use a pre-built wheel - let mut command = context.install(); + let mut command = context.pip_install(); command .arg("anyio") .arg("--no-binary") @@ -1800,7 +1780,7 @@ fn reinstall_no_binary() { context.filters() }; - let mut command = context.install(); + let mut command = context.pip_install(); command .arg("anyio") .arg("--no-binary") @@ -1831,7 +1811,7 @@ fn install_no_binary_overrides_only_binary_all() { let context = TestContext::new("3.12"); // The specific `--no-binary` should override the less specific `--only-binary` - let mut command = context.install(); + let mut command = context.pip_install(); command .arg("anyio") .arg("--only-binary") @@ -1865,7 +1845,7 @@ fn install_only_binary_overrides_no_binary_all() { let context = TestContext::new("3.12"); // The specific `--only-binary` should override the less specific `--no-binary` - let mut command = context.install(); + let mut command = context.pip_install(); command .arg("anyio") .arg("--no-binary") @@ -1900,7 +1880,7 @@ fn install_only_binary_all_and_no_binary_all() { let context = TestContext::new("3.12"); // With both as `:all:` we can't install - let mut command = context.install(); + let mut command = context.pip_install(); command .arg("anyio") .arg("--no-binary") @@ -2034,7 +2014,7 @@ fn only_binary_requirements_txt() { }) .unwrap(); - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -2055,7 +2035,7 @@ fn only_binary_editable() { let context = TestContext::new("3.12"); // Install the editable package. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("--only-binary") .arg(":all:") .arg("-e") @@ -2082,7 +2062,7 @@ fn only_binary_dependent_editables() { .join("scripts/packages/dependent_locals"); // Install the editable package. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("--only-binary") .arg(":all:") .arg("-e") @@ -2109,7 +2089,7 @@ fn only_binary_editable_setup_py() { let context = TestContext::new("3.12"); // Install the editable package. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("--only-binary") .arg(":all:") .arg("-e") @@ -2142,7 +2122,7 @@ fn only_binary_editable_setup_py() { fn install_executable() { let context = TestContext::new("3.12"); - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("pylint==3.0.0"), @r###" success: true exit_code: 0 @@ -2176,7 +2156,7 @@ fn install_executable() { fn install_executable_copy() { let context = TestContext::new("3.12"); - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("pylint==3.0.0") .arg("--link-mode") .arg("copy"), @r###" @@ -2212,7 +2192,7 @@ fn install_executable_copy() { fn install_executable_hardlink() { let context = TestContext::new("3.12"); - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("pylint==3.0.0") .arg("--link-mode") .arg("hardlink"), @r###" @@ -2248,7 +2228,7 @@ fn no_deps() { let context = TestContext::new("3.12"); // Install Flask. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("Flask") .arg("--no-deps") .arg("--strict"), @r###" @@ -2279,7 +2259,7 @@ fn no_deps_editable() { let context = TestContext::new("3.12"); // Install the editable version of Black. This should remove the registry-based version. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("--no-deps") .arg("-e") .arg(context.workspace_root.join("scripts/packages/black_editable[dev]")), @r###" @@ -2305,7 +2285,7 @@ fn install_upgrade() { let context = TestContext::new("3.12"); // Install an old version of anyio and httpcore. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("anyio==3.6.2") .arg("httpcore==0.16.3") .arg("--strict"), @r###" @@ -2329,7 +2309,7 @@ fn install_upgrade() { context.assert_command("import anyio").success(); // Upgrade anyio. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("anyio") .arg("--upgrade-package") .arg("anyio"), @r###" @@ -2348,7 +2328,7 @@ fn install_upgrade() { ); // Upgrade anyio again, should not reinstall. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("anyio") .arg("--upgrade-package") .arg("anyio"), @r###" @@ -2363,7 +2343,7 @@ fn install_upgrade() { ); // Install httpcore, request anyio upgrade should not reinstall - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("httpcore") .arg("--upgrade-package") .arg("anyio"), @r###" @@ -2378,7 +2358,7 @@ fn install_upgrade() { ); // Upgrade httpcore with global flag - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("httpcore") .arg("--upgrade"), @r###" success: true @@ -2406,7 +2386,7 @@ fn install_constraints_txt() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("idna<3.4")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--constraint") @@ -2438,7 +2418,7 @@ fn install_constraints_inline() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("idna<3.4")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt"), @r###" success: true @@ -2463,7 +2443,7 @@ fn install_constraints_inline() -> Result<()> { fn install_constraints_remote() { let context = TestContext::new("3.12"); - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-c") .arg("https://raw.githubusercontent.com/apache/airflow/constraints-2-6/constraints-3.11.txt") .arg("typing_extensions>=4.0"), @r###" @@ -2488,7 +2468,7 @@ fn install_constraints_inline_remote() -> Result<()> { let requirementstxt = context.temp_dir.child("requirements.txt"); requirementstxt.write_str("typing-extensions>=4.0\n-c https://raw.githubusercontent.com/apache/airflow/constraints-2-6/constraints-3.11.txt")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt"), @r###" success: true @@ -2517,7 +2497,7 @@ fn install_constraints_extra() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("python-dotenv==1.0.0")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("-c") @@ -2548,7 +2528,7 @@ fn install_constraints_extra() -> Result<()> { fn install_constraints_respects_offline_mode() { let context = TestContext::new("3.12"); - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("--offline") .arg("-r") .arg("http://example.com/requirements.txt"), @r###" @@ -2576,7 +2556,7 @@ fn install_pinned_polars_invalid_metadata() { let context = TestContext::new("3.12"); // Install Flask. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("polars==0.14.0"), @r###" success: true @@ -2602,7 +2582,7 @@ fn install_sdist_resolution_lowest() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.in") .arg("--resolution=lowest-direct"), @r###" @@ -2635,7 +2615,7 @@ fn direct_url_zip_file_bunk_permissions() -> Result<()> { "opensafely-pipeline @ https://github.com/opensafely-core/pipeline/archive/refs/tags/v2023.11.06.145820.zip", )?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -2674,7 +2654,7 @@ fn launcher() -> Result<()> { uv_snapshot!( filters, - context.install() + context.pip_install() .arg(format!("simple_launcher@{}", project_root.join("scripts/links/simple_launcher-0.1.0-py3-none-any.whl").display())) .arg("--strict"), @r###" success: true @@ -2719,7 +2699,7 @@ fn launcher_with_symlink() -> Result<()> { ]; uv_snapshot!(filters, - context.install() + context.pip_install() .arg(format!("simple_launcher@{}", project_root.join("scripts/links/simple_launcher-0.1.0-py3-none-any.whl").display())) .arg("--strict"), @r###" @@ -2777,7 +2757,7 @@ fn config_settings() { let context = TestContext::new("3.12"); // Install the editable package. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/setuptools_editable")), @r###" success: true @@ -2802,7 +2782,7 @@ fn config_settings() { // Install the editable package with `--editable_mode=compat`. let context = TestContext::new("3.12"); - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/setuptools_editable")) .arg("-C") @@ -2840,7 +2820,7 @@ fn reinstall_duplicate() -> Result<()> { // Run `pip sync`. context1 - .install() + .pip_install() .arg("-r") .arg(requirements_txt.path()) .assert() @@ -2853,7 +2833,7 @@ fn reinstall_duplicate() -> Result<()> { // Run `pip sync`. context2 - .install() + .pip_install() .arg("-r") .arg(requirements_txt.path()) .assert() @@ -2866,7 +2846,7 @@ fn reinstall_duplicate() -> Result<()> { )?; // Run `pip install`. - uv_snapshot!(context1.install() + uv_snapshot!(context1.pip_install() .arg("pip") .arg("--reinstall"), @r###" @@ -2893,7 +2873,7 @@ fn reinstall_duplicate() -> Result<()> { fn install_symlink() { let context = TestContext::new("3.12"); - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("pgpdump==1.5") .arg("--strict"), @r###" success: true @@ -2910,7 +2890,8 @@ fn install_symlink() { context.assert_command("import pgpdump").success(); - uv_snapshot!(uninstall_command(&context) + uv_snapshot!(context + .pip_uninstall() .arg("pgpdump"), @r###" success: true exit_code: 0 @@ -2942,7 +2923,7 @@ requires-python = ">=3.8" "#, )?; - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("--editable") .arg(editable_dir.path()), @r###" success: true @@ -2961,7 +2942,7 @@ requires-python = ">=3.8" ); // Re-installing should be a no-op. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("--editable") .arg(editable_dir.path()), @r###" success: true @@ -2986,7 +2967,7 @@ requires-python = ">=3.8" )?; // Re-installing should update the package. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("--editable") .arg(editable_dir.path()), @r###" success: true @@ -3032,7 +3013,7 @@ dependencies = {file = ["requirements.txt"]} let requirements_txt = editable_dir.child("requirements.txt"); requirements_txt.write_str("anyio==4.0.0")?; - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("--editable") .arg(editable_dir.path()), @r###" success: true @@ -3051,7 +3032,7 @@ dependencies = {file = ["requirements.txt"]} ); // Re-installing should re-install. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("--editable") .arg(editable_dir.path()), @r###" success: true @@ -3072,7 +3053,7 @@ dependencies = {file = ["requirements.txt"]} requirements_txt.write_str("anyio==3.7.1")?; // Re-installing should update the package. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("--editable") .arg(editable_dir.path()), @r###" success: true @@ -3113,7 +3094,7 @@ requires-python = ">=3.8" "#, )?; - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("example @ .") .current_dir(editable_dir.path()), @r###" success: true @@ -3132,7 +3113,7 @@ requires-python = ">=3.8" ); // Re-installing should be a no-op. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("example @ .") .current_dir(editable_dir.path()), @r###" success: true @@ -3157,7 +3138,7 @@ requires-python = ">=3.8" )?; // Re-installing should update the package. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("example @ .") .current_dir(editable_dir.path()), @r###" success: true @@ -3184,7 +3165,7 @@ requires-python = ">=3.8" fn path_name_version_change() { let context = TestContext::new("3.12"); - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(context.workspace_root.join("scripts/links/ok-1.0.0-py3-none-any.whl")), @r###" success: true exit_code: 0 @@ -3199,7 +3180,7 @@ fn path_name_version_change() { ); // Installing the same path again should be a no-op - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(context.workspace_root.join("scripts/links/ok-1.0.0-py3-none-any.whl")), @r###" success: true exit_code: 0 @@ -3211,7 +3192,7 @@ fn path_name_version_change() { ); // Installing a new path should succeed - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(context.workspace_root.join("scripts/links/ok-2.0.0-py3-none-any.whl")), @r###" success: true exit_code: 0 @@ -3228,7 +3209,7 @@ fn path_name_version_change() { ); // Installing a new path should succeed regardless of which version is "newer" - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(context.workspace_root.join("scripts/links/ok-1.0.0-py3-none-any.whl")), @r###" success: true exit_code: 0 @@ -3264,7 +3245,7 @@ fn path_changes_with_same_name() -> Result<()> { fs_err::copy(&wheel, &one_wheel)?; fs_err::copy(&wheel, &two_wheel)?; - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(one_wheel.as_os_str()), @r###" success: true exit_code: 0 @@ -3278,7 +3259,7 @@ fn path_changes_with_same_name() -> Result<()> { "### ); - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(two_wheel.as_os_str()), @r###" success: true exit_code: 0 @@ -3318,7 +3299,7 @@ requires-python = ">=3.11,<3.13" "#, )?; - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("--editable") .arg(editable_dir.path()), @r###" success: true @@ -3359,7 +3340,7 @@ requires-python = "<=3.8" "#, )?; - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("--editable") .arg(editable_dir.path()), @r###" success: false @@ -3387,7 +3368,7 @@ fn no_build_isolation() -> Result<()> { let filters = std::iter::once((r"exit code: 1", "exit status: 1")) .chain(context.filters()) .collect::>(); - uv_snapshot!(filters, context.install() + uv_snapshot!(filters, context.pip_install() .arg("-r") .arg("requirements.in") .arg("--no-build-isolation"), @r###" @@ -3410,7 +3391,7 @@ fn no_build_isolation() -> Result<()> { ); // Install `setuptools` and `wheel`. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("setuptools") .arg("wheel"), @r###" success: true @@ -3426,7 +3407,7 @@ fn no_build_isolation() -> Result<()> { "###); // We expect the build to succeed, since `setuptools` is now installed. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.in") .arg("--no-build-isolation"), @r###" @@ -3458,7 +3439,7 @@ fn respect_no_build_isolation_env_var() -> Result<()> { let filters = std::iter::once((r"exit code: 1", "exit status: 1")) .chain(context.filters()) .collect::>(); - uv_snapshot!(filters, context.install() + uv_snapshot!(filters, context.pip_install() .arg("-r") .arg("requirements.in") .env("UV_NO_BUILD_ISOLATION", "yes"), @r###" @@ -3481,7 +3462,7 @@ fn respect_no_build_isolation_env_var() -> Result<()> { ); // Install `setuptools` and `wheel`. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("setuptools") .arg("wheel"), @r###" success: true @@ -3497,7 +3478,7 @@ fn respect_no_build_isolation_env_var() -> Result<()> { "###); // We expect the build to succeed, since `setuptools` is now installed. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.in") .env("UV_NO_BUILD_ISOLATION", "yes"), @r###" @@ -3527,7 +3508,7 @@ fn install_utf16le_requirements() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_binary(&utf8_to_utf16_with_bom_le("tomli"))?; - uv_snapshot!(context.install_without_exclude_newer() + uv_snapshot!(context.pip_install_without_exclude_newer() .arg("-r") .arg("requirements.txt"), @r###" success: true @@ -3553,7 +3534,7 @@ fn install_utf16be_requirements() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_binary(&utf8_to_utf16_with_bom_be("tomli"))?; - uv_snapshot!(context.install_without_exclude_newer() + uv_snapshot!(context.pip_install_without_exclude_newer() .arg("-r") .arg("requirements.txt"), @r###" success: true @@ -3596,7 +3577,7 @@ fn dry_run_install() -> std::result::Result<(), Box> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("httpx==0.25.1")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--dry-run") @@ -3628,7 +3609,7 @@ fn dry_run_install_url_dependency() -> std::result::Result<(), Box std::result::Result<(), Box std::result::Result<(), Box std::result::Result<(), Box std::result::Result<(), Box std::result::Result<(), Box std::result::Result<(), Box Result<()> { let netrc = context.temp_dir.child(".netrc"); netrc.write_str("default login public password heron")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("anyio") .arg("--index-url") .arg("https://pypi-proxy.fly.dev/basic-auth/simple") @@ -3955,7 +3936,7 @@ fn install_package_basic_auth_from_netrc() -> Result<()> { let netrc = context.temp_dir.child(".netrc"); netrc.write_str("machine pypi-proxy.fly.dev login public password heron")?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("anyio") .arg("--index-url") .arg("https://pypi-proxy.fly.dev/basic-auth/simple") @@ -3996,7 +3977,7 @@ anyio ", )?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .env("NETRC", netrc.to_str().unwrap()) @@ -4025,7 +4006,7 @@ anyio fn install_index_with_relative_links() { let context = TestContext::new("3.12"); - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("anyio") .arg("--index-url") .arg("https://pypi-proxy.fly.dev/relative/simple") @@ -4054,7 +4035,7 @@ fn install_package_basic_auth_from_keyring() { // Install our keyring plugin context - .install() + .pip_install() .arg( context .workspace_root @@ -4065,7 +4046,7 @@ fn install_package_basic_auth_from_keyring() { .assert() .success(); - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("anyio") .arg("--index-url") .arg("https://public@pypi-proxy.fly.dev/basic-auth/simple") @@ -4101,7 +4082,7 @@ fn install_package_basic_auth_from_keyring_wrong_password() { // Install our keyring plugin context - .install() + .pip_install() .arg( context .workspace_root @@ -4112,7 +4093,7 @@ fn install_package_basic_auth_from_keyring_wrong_password() { .assert() .success(); - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("anyio") .arg("--index-url") .arg("https://public@pypi-proxy.fly.dev/basic-auth/simple") @@ -4142,7 +4123,7 @@ fn install_package_basic_auth_from_keyring_wrong_username() { // Install our keyring plugin context - .install() + .pip_install() .arg( context .workspace_root @@ -4153,7 +4134,7 @@ fn install_package_basic_auth_from_keyring_wrong_username() { .assert() .success(); - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("anyio") .arg("--index-url") .arg("https://public@pypi-proxy.fly.dev/basic-auth/simple") @@ -4180,7 +4161,7 @@ fn install_package_basic_auth_from_keyring_wrong_username() { fn install_index_with_relative_links_authenticated() { let context = TestContext::new("3.12"); - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("anyio") .arg("--index-url") .arg("https://public:heron@pypi-proxy.fly.dev/basic-auth/relative/simple") @@ -4218,7 +4199,7 @@ fn install_site_packages_mtime_updated() -> Result<()> { let pre_mtime_ns = metadata.mtime_nsec(); // Install a package. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("anyio") .arg("--strict"), @r###" success: true @@ -4258,7 +4239,7 @@ fn deptry_gitignore() { .workspace_root .join("scripts/packages/deptry_reproducer"); - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(format!("deptry_reproducer @ {}", source_dist_dir.join("deptry_reproducer-0.1.0.tar.gz").simplified_display())) .arg("--strict") .current_dir(source_dist_dir), @r###" @@ -4288,7 +4269,7 @@ fn reinstall_no_index() { let context = TestContext::new("3.12"); // Install anyio - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("anyio") .arg("--strict"), @r###" success: true @@ -4306,7 +4287,7 @@ fn reinstall_no_index() { ); // Install anyio again - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("anyio") .arg("--no-index") .arg("--strict"), @r###" @@ -4322,7 +4303,7 @@ fn reinstall_no_index() { // Reinstall // We should not consider the already installed package as a source and // should attempt to pull from the index - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("anyio") .arg("--no-index") .arg("--reinstall") @@ -4345,7 +4326,7 @@ fn already_installed_remote_dependencies() { let context = TestContext::new("3.12"); // Install anyio's dependencies. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("idna") .arg("sniffio") .arg("--strict"), @r###" @@ -4363,7 +4344,7 @@ fn already_installed_remote_dependencies() { ); // Install anyio. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("anyio") .arg("--strict"), @r###" success: true @@ -4388,7 +4369,7 @@ fn already_installed_dependent_editable() { .join("scripts/packages/dependent_locals"); // Install the first editable - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(root_path.join("first_local")), @r###" success: true @@ -4405,7 +4386,7 @@ fn already_installed_dependent_editable() { // Install the second editable which depends on the first editable // The already installed first editable package should satisfy the requirement - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(root_path.join("second_local")) // Disable the index to guard this test against dependency confusion attacks @@ -4426,7 +4407,7 @@ fn already_installed_dependent_editable() { // Request install of the first editable by full path again // We should audit the installed package - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(root_path.join("first_local")), @r###" success: true @@ -4440,7 +4421,7 @@ fn already_installed_dependent_editable() { // Request reinstallation of the first package during install of the second // It's not available on an index and the user has not specified the path so we fail. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(root_path.join("second_local")) .arg("--reinstall-package") @@ -4462,7 +4443,7 @@ fn already_installed_dependent_editable() { // Request reinstallation of the first package // We include it in the install command with a full path so we should succeed - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(root_path.join("first_local")) .arg("--reinstall-package") @@ -4491,7 +4472,7 @@ fn already_installed_local_path_dependent() { .join("scripts/packages/dependent_locals"); // Install the first local - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(root_path.join("first_local")), @r###" success: true exit_code: 0 @@ -4507,7 +4488,7 @@ fn already_installed_local_path_dependent() { // Install the second local which depends on the first local // The already installed first local package should satisfy the requirement - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(root_path.join("second_local")) // Disable the index to guard this test against dependency confusion attacks .arg("--no-index") @@ -4527,7 +4508,7 @@ fn already_installed_local_path_dependent() { // Request install of the first local by full path again // We should audit the installed package - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(root_path.join("first_local")), @r###" success: true exit_code: 0 @@ -4540,7 +4521,7 @@ fn already_installed_local_path_dependent() { // Request reinstallation of the first package during install of the second // It's not available on an index and the user has not specified the path so we fail - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(root_path.join("second_local")) .arg("--reinstall-package") .arg("first-local") @@ -4561,7 +4542,7 @@ fn already_installed_local_path_dependent() { // Request reinstallation of the first package // We include it in the install command with a full path so we succeed - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(root_path.join("second_local")) .arg(root_path.join("first_local")) .arg("--reinstall-package") @@ -4582,7 +4563,7 @@ fn already_installed_local_path_dependent() { // Request upgrade of the first package // It's not available on an index and the user has not specified the path so we fail - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(root_path.join("second_local")) .arg("--upgrade-package") .arg("first-local") @@ -4603,7 +4584,7 @@ fn already_installed_local_path_dependent() { // Request upgrade of the first package // A full path is specified and there's nothing to upgrade to so we should just audit - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(root_path.join("first_local")) .arg(root_path.join("second_local")) .arg("--upgrade-package") @@ -4630,7 +4611,7 @@ fn already_installed_local_version_of_remote_package() { let root_path = context.workspace_root.join("scripts/packages"); // Install the local anyio first - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(root_path.join("anyio_local")), @r###" success: true exit_code: 0 @@ -4645,7 +4626,7 @@ fn already_installed_local_version_of_remote_package() { ); // Install again without specifying a local path — this should not pull from the index - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("anyio"), @r###" success: true exit_code: 0 @@ -4659,7 +4640,7 @@ fn already_installed_local_version_of_remote_package() { // Request install with a different version // We should attempt to pull from the index since the installed version does not match // but we disable it here to preserve this dependency for future tests - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("anyio==4.2.0") .arg("--no-index"), @r###" success: false @@ -4676,7 +4657,7 @@ fn already_installed_local_version_of_remote_package() { // Request reinstallation with the local version segment — this should fail since it is not available // in the index and the path was not provided - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("anyio==4.3.0+foo") .arg("--reinstall"), @r###" success: false @@ -4691,7 +4672,7 @@ fn already_installed_local_version_of_remote_package() { // Request reinstall with the full path, this should reinstall from the path // and not pull from the index - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(root_path.join("anyio_local")) .arg("--reinstall") .arg("anyio"), @r###" @@ -4711,7 +4692,7 @@ fn already_installed_local_version_of_remote_package() { // Request reinstallation with just the name, this should pull from the index // and replace the path dependency - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("anyio") .arg("--reinstall"), @r###" success: true @@ -4731,7 +4712,7 @@ fn already_installed_local_version_of_remote_package() { ); // Install the local anyio again so we can test upgrades - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(root_path.join("anyio_local")), @r###" success: true exit_code: 0 @@ -4749,7 +4730,7 @@ fn already_installed_local_version_of_remote_package() { // Request upgrade with just the name // We shouldn't pull from the index because the local version is "newer" - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("anyio") .arg("--upgrade"), @r###" success: true @@ -4764,7 +4745,7 @@ fn already_installed_local_version_of_remote_package() { // Install something that depends on anyio // We shouldn't overwrite our local version with the remote anyio here - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("httpx"), @r###" success: true exit_code: 0 @@ -4790,12 +4771,12 @@ fn already_installed_multiple_versions() -> Result<()> { use crate::common::copy_dir_all; // Install into the base environment - context.install().arg("anyio==3.7.0").assert().success(); + context.pip_install().arg("anyio==3.7.0").assert().success(); // Install another version into another environment let context_duplicate = TestContext::new("3.12"); context_duplicate - .install() + .pip_install() .arg("anyio==4.0.0") .assert() .success(); @@ -4817,7 +4798,7 @@ fn already_installed_multiple_versions() -> Result<()> { // Request the second anyio version again // Should remove both previous versions and reinstall the second one - uv_snapshot!(context.filters(), context.install().arg("anyio==4.0.0"), @r###" + uv_snapshot!(context.filters(), context.pip_install().arg("anyio==4.0.0"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -4839,7 +4820,7 @@ fn already_installed_multiple_versions() -> Result<()> { // Request the anyio without a version specifier // This is loosely a regression test for the ordering of the installation preferences // from existing site-packages - uv_snapshot!(context.filters(), context.install().arg("anyio"), @r###" + uv_snapshot!(context.filters(), context.pip_install().arg("anyio"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -4865,7 +4846,7 @@ fn already_installed_remote_url() { let context = TestContext::new("3.8"); // First, install from the remote URL - uv_snapshot!(context.filters(), context.install().arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage"), @r###" + uv_snapshot!(context.filters(), context.pip_install().arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -4882,7 +4863,7 @@ fn already_installed_remote_url() { // Request installation again with a different URL, but the same _canonical_ URL. We should // resolve the package (since we installed a specific commit, but are now requesting the default // branch), but not reinstall the package. - uv_snapshot!(context.filters(), context.install().arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git"), @r###" + uv_snapshot!(context.filters(), context.pip_install().arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -4895,7 +4876,7 @@ fn already_installed_remote_url() { // Request installation again with a different URL, but the same _canonical_ URL and the same // commit. We should neither resolve nor reinstall the package, since it's already installed // at this precise commit. - uv_snapshot!(context.filters(), context.install().arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389"), @r###" + uv_snapshot!(context.filters(), context.pip_install().arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -4907,7 +4888,7 @@ fn already_installed_remote_url() { // Request installation again with just the name // We should just audit the URL package since it fulfills this requirement uv_snapshot!( - context.install().arg("uv-public-pypackage"), @r###" + context.pip_install().arg("uv-public-pypackage"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -4919,7 +4900,7 @@ fn already_installed_remote_url() { // Request reinstallation // We should fail since the URL was not provided uv_snapshot!( - context.install() + context.pip_install() .arg("uv-public-pypackage") .arg("--no-index") .arg("--reinstall"), @r###" @@ -4937,7 +4918,7 @@ fn already_installed_remote_url() { // Request installation again with just the full URL // We should just audit the existing package uv_snapshot!( - context.install().arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage"), @r###" + context.pip_install().arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -4950,7 +4931,7 @@ fn already_installed_remote_url() { // Request reinstallation with the full URL // We should reinstall successfully uv_snapshot!( - context.install() + context.pip_install() .arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage") .arg("--reinstall"), @r###" success: true @@ -4968,7 +4949,7 @@ fn already_installed_remote_url() { // Request installation again with a different version // We should attempt to pull from the index since the local version does not match uv_snapshot!( - context.install().arg("uv-public-pypackage==0.2.0").arg("--no-index"), @r###" + context.pip_install().arg("uv-public-pypackage==0.2.0").arg("--no-index"), @r###" success: false exit_code: 1 ----- stdout ----- @@ -4991,7 +4972,7 @@ fn find_links() -> Result<()> { tqdm "})?; - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("tqdm") .arg("--find-links") .arg(context.workspace_root.join("scripts/links/")), @r###" @@ -5020,7 +5001,7 @@ fn find_links_no_binary() -> Result<()> { tqdm "})?; - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("tqdm") .arg("--no-binary") .arg(":all:") @@ -5062,7 +5043,7 @@ fn require_hashes() -> Result<()> { # via anyio "})?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--require-hashes"), @r###" @@ -5096,7 +5077,7 @@ fn require_hashes_no_deps() -> Result<()> { --hash=sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a "})?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--no-deps") @@ -5128,7 +5109,7 @@ fn require_hashes_mismatch() -> Result<()> { )?; // Raise an error. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--require-hashes"), @r###" @@ -5156,7 +5137,7 @@ fn require_hashes_missing_dependency() -> Result<()> { )?; // Install without error when `--require-hashes` is omitted. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--require-hashes"), @r###" @@ -5185,7 +5166,7 @@ fn require_hashes_editable() -> Result<()> { })?; // Install the editable packages. - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-r") .arg(requirements_txt.path()) .arg("--require-hashes"), @r###" @@ -5214,7 +5195,7 @@ fn require_hashes_constraint() -> Result<()> { constraints_txt.write_str("anyio==4.0.0 --hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f")?; // Install the editable packages. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg(requirements_txt.path()) .arg("--require-hashes") @@ -5239,7 +5220,7 @@ fn require_hashes_constraint() -> Result<()> { constraints_txt.write_str("anyio==4.0.0")?; // Install the editable packages. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg(requirements_txt.path()) .arg("--require-hashes") @@ -5276,7 +5257,7 @@ fn require_hashes_unnamed() -> Result<()> { # via anyio "})?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--require-hashes"), @r###" @@ -5321,7 +5302,7 @@ fn require_hashes_unnamed_repeated() -> Result<()> { # via anyio "})?; - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--require-hashes"), @r###" @@ -5357,7 +5338,7 @@ fn require_hashes_override() -> Result<()> { overrides_txt.write_str("anyio==4.0.0 --hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f")?; // Install the editable packages. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg(requirements_txt.path()) .arg("--require-hashes") @@ -5382,7 +5363,7 @@ fn require_hashes_override() -> Result<()> { overrides_txt.write_str("anyio==4.0.0")?; // Install the editable packages. - uv_snapshot!(context.install() + uv_snapshot!(context.pip_install() .arg("-r") .arg(requirements_txt.path()) .arg("--require-hashes") @@ -5447,7 +5428,7 @@ fn tool_uv_sources() -> Result<()> { )?; // Install the editable packages. - uv_snapshot!(context.filters(), windows_filters=false, context.install() + uv_snapshot!(context.filters(), windows_filters=false, context.pip_install() .arg("--preview") .arg("-r") .arg(require_path) @@ -5474,7 +5455,7 @@ fn tool_uv_sources() -> Result<()> { ); // Re-install the editable packages. - uv_snapshot!(context.filters(), windows_filters=false, context.install() + uv_snapshot!(context.filters(), windows_filters=false, context.pip_install() .arg("--preview") .arg("-r") .arg(require_path) @@ -5509,7 +5490,7 @@ fn tool_uv_sources_is_in_preview() -> Result<()> { "#})?; // Install the editable packages. - uv_snapshot!(context.filters(), windows_filters=false, context.install() + uv_snapshot!(context.filters(), windows_filters=false, context.pip_install() .arg("-r") .arg("pyproject.toml") .arg("--extra") @@ -5548,7 +5529,7 @@ fn recursive_extra_transitive_url() -> Result<()> { ] "#})?; - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg(".[all]"), @r###" success: true exit_code: 0 @@ -5570,7 +5551,7 @@ fn recursive_extra_transitive_url() -> Result<()> { fn prefer_editable() -> Result<()> { let context = TestContext::new("3.12"); - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/black_editable")) .arg(context.workspace_root.join("scripts/packages/black_editable")), @r###" @@ -5598,7 +5579,7 @@ fn prefer_editable() -> Result<()> { context.workspace_root.simplified_display() ))?; - uv_snapshot!(context.filters(), context.install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/black_editable")) .arg("-r") @@ -5652,7 +5633,7 @@ fn local_index_absolute() -> Result<()> { "#, Url::from_directory_path(context.workspace_root.join("scripts/links/")).unwrap().as_str()})?; - uv_snapshot!(context.filters(), context.install_without_exclude_newer() + uv_snapshot!(context.filters(), context.pip_install_without_exclude_newer() .arg("tqdm") .arg("--index-url") .arg(Url::from_directory_path(root).unwrap().as_str()), @r###" @@ -5702,7 +5683,7 @@ fn local_index_relative() -> Result<()> { "#, Url::from_directory_path(context.workspace_root.join("scripts/links/")).unwrap().as_str()})?; - uv_snapshot!(context.filters(), context.install_without_exclude_newer() + uv_snapshot!(context.filters(), context.pip_install_without_exclude_newer() .arg("tqdm") .arg("--index-url") .arg("./simple-html"), @r###" @@ -5758,7 +5739,7 @@ fn local_index_requirements_txt_absolute() -> Result<()> { tqdm "#, Url::from_directory_path(root).unwrap().as_str()})?; - uv_snapshot!(context.filters(), context.install_without_exclude_newer() + uv_snapshot!(context.filters(), context.pip_install_without_exclude_newer() .arg("-r") .arg("requirements.txt"), @r###" success: true @@ -5815,7 +5796,7 @@ fn local_index_requirements_txt_relative() -> Result<()> { ", )?; - uv_snapshot!(context.filters(), context.install_without_exclude_newer() + uv_snapshot!(context.filters(), context.pip_install_without_exclude_newer() .arg("-r") .arg("requirements.txt"), @r###" success: true diff --git a/crates/uv/tests/pip_install_scenarios.rs b/crates/uv/tests/pip_install_scenarios.rs index 1bd6585d6fa3..ed1f8b63d77f 100644 --- a/crates/uv/tests/pip_install_scenarios.rs +++ b/crates/uv/tests/pip_install_scenarios.rs @@ -48,19 +48,8 @@ fn command(context: &TestContext) -> Command { .arg("--index-url") .arg("https://astral-sh.github.io/packse/0.3.29/simple-html/") .arg("--find-links") - .arg("https://raw.githubusercontent.com/astral-sh/packse/0.3.29/vendor/links.html") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (8 * 1024 * 1024).to_string()); - } - + .arg("https://raw.githubusercontent.com/astral-sh/packse/0.3.29/vendor/links.html"); + context.add_shared_args(&mut command); command } diff --git a/crates/uv/tests/pip_list.rs b/crates/uv/tests/pip_list.rs index 84d217b04273..ebe06b670f3c 100644 --- a/crates/uv/tests/pip_list.rs +++ b/crates/uv/tests/pip_list.rs @@ -8,30 +8,14 @@ use assert_fs::prelude::*; use common::uv_snapshot; -use crate::common::{get_bin, TestContext, EXCLUDE_NEWER}; +use crate::common::{get_bin, TestContext}; mod common; -/// Create a `pip install` command with options shared across scenarios. -fn install_command(context: &TestContext) -> Command { +fn list_command(context: &TestContext) -> Command { let mut command = Command::new(get_bin()); - command - .arg("pip") - .arg("install") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string()); - } - + command.arg("pip").arg("list"); + context.add_shared_args(&mut command); command } @@ -39,16 +23,9 @@ fn install_command(context: &TestContext) -> Command { fn list_empty_columns() { let context = TestContext::new("3.12"); - uv_snapshot!(Command::new(get_bin()) - .arg("pip") - .arg("list") + uv_snapshot!(list_command(&context) .arg("--format") - .arg("columns") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + .arg("columns"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -62,16 +39,9 @@ fn list_empty_columns() { fn list_empty_freeze() { let context = TestContext::new("3.12"); - uv_snapshot!(Command::new(get_bin()) - .arg("pip") - .arg("list") + uv_snapshot!(list_command(&context) .arg("--format") - .arg("freeze") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + .arg("freeze"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -85,16 +55,9 @@ fn list_empty_freeze() { fn list_empty_json() { let context = TestContext::new("3.12"); - uv_snapshot!(Command::new(get_bin()) - .arg("pip") - .arg("list") + uv_snapshot!(list_command(&context) .arg("--format") - .arg("json") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + .arg("json"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -112,7 +75,7 @@ fn list_single_no_editable() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("MarkupSafe==2.1.3")?; - uv_snapshot!(install_command(&context) + uv_snapshot!(context.pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -130,14 +93,7 @@ fn list_single_no_editable() -> Result<()> { context.assert_command("import markupsafe").success(); - uv_snapshot!(Command::new(get_bin()) - .arg("pip") - .arg("list") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(list_command(&context), @r###" success: true exit_code: 0 ----- stdout ----- @@ -157,7 +113,7 @@ fn list_editable() { let context = TestContext::new("3.12"); // Install the editable package. - uv_snapshot!(context.filters(), install_command(&context) + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r###" success: true @@ -181,14 +137,7 @@ fn list_editable() { .chain(vec![(r"\-\-\-\-\-\-+.*", "[UNDERLINE]"), (" +", " ")]) .collect::>(); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("list") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(filters, list_command(&context), @r###" success: true exit_code: 0 ----- stdout ----- @@ -209,7 +158,7 @@ fn list_editable_only() { let context = TestContext::new("3.12"); // Install the editable package. - uv_snapshot!(context.filters(), install_command(&context) + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r###" success: true @@ -233,15 +182,8 @@ fn list_editable_only() { .chain(vec![(r"\-\-\-\-\-\-+.*", "[UNDERLINE]"), (" +", " ")]) .collect::>(); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("list") - .arg("--editable") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(filters, list_command(&context) + .arg("--editable"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -253,15 +195,8 @@ fn list_editable_only() { "### ); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("list") - .arg("--exclude-editable") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(filters, list_command(&context) + .arg("--exclude-editable"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -275,16 +210,9 @@ fn list_editable_only() { "### ); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("list") + uv_snapshot!(filters, list_command(&context) .arg("--editable") - .arg("--exclude-editable") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + .arg("--exclude-editable"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -299,7 +227,7 @@ fn list_exclude() { let context = TestContext::new("3.12"); // Install the editable package. - uv_snapshot!(context.filters(), install_command(&context) + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r###" success: true @@ -323,16 +251,9 @@ fn list_exclude() { .chain(vec![(r"\-\-\-\-\-\-+.*", "[UNDERLINE]"), (" +", " ")]) .collect::>(); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("list") + uv_snapshot!(filters, list_command(&context) .arg("--exclude") - .arg("numpy") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + .arg("numpy"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -347,16 +268,9 @@ fn list_exclude() { "### ); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("list") + uv_snapshot!(filters, list_command(&context) .arg("--exclude") - .arg("poetry-editable") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + .arg("poetry-editable"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -370,18 +284,11 @@ fn list_exclude() { "### ); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("list") + uv_snapshot!(filters, list_command(&context) .arg("--exclude") .arg("numpy") .arg("--exclude") - .arg("poetry-editable") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + .arg("poetry-editable"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -402,7 +309,7 @@ fn list_format_json() { let context = TestContext::new("3.12"); // Install the editable package. - uv_snapshot!(context.filters(), install_command(&context) + uv_snapshot!(context.filters(), context.pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r###" success: true @@ -426,15 +333,8 @@ fn list_format_json() { .chain(vec![(r"\-\-\-\-\-\-+.*", "[UNDERLINE]"), (" +", " ")]) .collect(); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("list") - .arg("--format=json") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(filters, list_command(&context) + .arg("--format=json"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -444,16 +344,9 @@ fn list_format_json() { "### ); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("list") + uv_snapshot!(filters, list_command(&context) .arg("--format=json") - .arg("--editable") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + .arg("--editable"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -463,16 +356,9 @@ fn list_format_json() { "### ); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("list") + uv_snapshot!(filters, list_command(&context) .arg("--format=json") - .arg("--exclude-editable") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + .arg("--exclude-editable"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -482,17 +368,10 @@ fn list_format_json() { "### ); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("list") + uv_snapshot!(filters, list_command(&context) .arg("--format=json") .arg("--editable") - .arg("--exclude-editable") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + .arg("--exclude-editable"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -508,7 +387,8 @@ fn list_format_freeze() { let context = TestContext::new("3.12"); // Install the editable package. - uv_snapshot!(context.filters(), install_command(&context) + uv_snapshot!(context.filters(), context + .pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r###" success: true @@ -532,15 +412,8 @@ fn list_format_freeze() { .chain(vec![(r"\-\-\-\-\-\-+.*", "[UNDERLINE]"), (" +", " ")]) .collect::>(); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("list") - .arg("--format=freeze") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(filters, list_command(&context) + .arg("--format=freeze"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -553,16 +426,9 @@ fn list_format_freeze() { "### ); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("list") + uv_snapshot!(filters, list_command(&context) .arg("--format=freeze") - .arg("--editable") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + .arg("--editable"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -572,16 +438,9 @@ fn list_format_freeze() { "### ); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("list") + uv_snapshot!(filters, list_command(&context) .arg("--format=freeze") - .arg("--exclude-editable") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + .arg("--exclude-editable"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -593,17 +452,10 @@ fn list_format_freeze() { "### ); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("list") + uv_snapshot!(filters, list_command(&context) .arg("--format=freeze") .arg("--editable") - .arg("--exclude-editable") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + .arg("--exclude-editable"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -649,15 +501,8 @@ Version: 0.22.0 .chain(vec![(r"\-\-\-\-\-\-+.*", "[UNDERLINE]"), (" +", " ")]) .collect::>(); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("list") - .arg("--editable") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(filters, list_command(&context) + .arg("--editable"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -699,15 +544,8 @@ Version: 0.1-bulbasaur .chain(vec![(r"\-\-\-\-\-\-+.*", "[UNDERLINE]"), (" +", " ")]) .collect::>(); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("list") - .arg("--editable") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(filters, list_command(&context) + .arg("--editable"), @r###" success: false exit_code: 2 ----- stdout ----- diff --git a/crates/uv/tests/pip_show.rs b/crates/uv/tests/pip_show.rs index f61687de1c53..f49067af7fd3 100644 --- a/crates/uv/tests/pip_show.rs +++ b/crates/uv/tests/pip_show.rs @@ -9,30 +9,14 @@ use indoc::indoc; use common::uv_snapshot; -use crate::common::{get_bin, TestContext, EXCLUDE_NEWER}; +use crate::common::{get_bin, TestContext}; mod common; -/// Create a `pip install` command with options shared across scenarios. -fn install_command(context: &TestContext) -> Command { +fn show_command(context: &TestContext) -> Command { let mut command = Command::new(get_bin()); - command - .arg("pip") - .arg("install") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string()); - } - + command.arg("pip").arg("show"); + context.add_shared_args(&mut command); command } @@ -40,14 +24,7 @@ fn install_command(context: &TestContext) -> Command { fn show_empty() { let context = TestContext::new("3.12"); - uv_snapshot!(Command::new(get_bin()) - .arg("pip") - .arg("show") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(show_command(&context), @r###" success: false exit_code: 1 ----- stdout ----- @@ -65,7 +42,8 @@ fn show_requires_multiple() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("requests==2.31.0")?; - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -86,15 +64,8 @@ fn show_requires_multiple() -> Result<()> { ); context.assert_command("import requests").success(); - uv_snapshot!(context.filters(), Command::new(get_bin()) - .arg("pip") - .arg("show") - .arg("requests") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(context.filters(), show_command(&context) + .arg("requests"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -120,7 +91,8 @@ fn show_python_version_marker() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("click==8.1.7")?; - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -143,15 +115,8 @@ fn show_python_version_marker() -> Result<()> { filters.push(("Requires: colorama", "Requires:")); } - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("show") - .arg("click") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(filters, show_command(&context) + .arg("click"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -175,7 +140,8 @@ fn show_found_single_package() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("MarkupSafe==2.1.3")?; - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -193,15 +159,8 @@ fn show_found_single_package() -> Result<()> { context.assert_command("import markupsafe").success(); - uv_snapshot!(context.filters(), Command::new(get_bin()) - .arg("pip") - .arg("show") - .arg("markupsafe") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(context.filters(), show_command(&context) + .arg("markupsafe"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -229,7 +188,8 @@ fn show_found_multiple_packages() -> Result<()> { " })?; - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -248,16 +208,9 @@ fn show_found_multiple_packages() -> Result<()> { context.assert_command("import markupsafe").success(); - uv_snapshot!(context.filters(), Command::new(get_bin()) - .arg("pip") - .arg("show") + uv_snapshot!(context.filters(), show_command(&context) .arg("markupsafe") - .arg("pip") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + .arg("pip"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -291,7 +244,8 @@ fn show_found_one_out_of_three() -> Result<()> { " })?; - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -310,17 +264,10 @@ fn show_found_one_out_of_three() -> Result<()> { context.assert_command("import markupsafe").success(); - uv_snapshot!(context.filters(), Command::new(get_bin()) - .arg("pip") - .arg("show") + uv_snapshot!(context.filters(), show_command(&context) .arg("markupsafe") .arg("flask") - .arg("django") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + .arg("django"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -349,7 +296,8 @@ fn show_found_one_out_of_two_quiet() -> Result<()> { " })?; - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -369,17 +317,10 @@ fn show_found_one_out_of_two_quiet() -> Result<()> { context.assert_command("import markupsafe").success(); // Flask isn't installed, but markupsafe is, so the command should succeed. - uv_snapshot!(Command::new(get_bin()) - .arg("pip") - .arg("show") + uv_snapshot!(show_command(&context) .arg("markupsafe") .arg("flask") - .arg("--quiet") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + .arg("--quiet"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -402,7 +343,8 @@ fn show_empty_quiet() -> Result<()> { " })?; - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -422,16 +364,9 @@ fn show_empty_quiet() -> Result<()> { context.assert_command("import markupsafe").success(); // Flask isn't installed, so the command should fail. - uv_snapshot!(Command::new(get_bin()) - .arg("pip") - .arg("show") + uv_snapshot!(show_command(&context) .arg("flask") - .arg("--quiet") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + .arg("--quiet"), @r###" success: false exit_code: 1 ----- stdout ----- @@ -448,7 +383,8 @@ fn show_editable() -> Result<()> { let context = TestContext::new("3.12"); // Install the editable package. - install_command(&context) + context + .pip_install() .arg("-e") .arg("../../scripts/packages/poetry_editable") .current_dir(current_dir()?) @@ -459,15 +395,8 @@ fn show_editable() -> Result<()> { .assert() .success(); - uv_snapshot!(context.filters(), Command::new(get_bin()) - .arg("pip") - .arg("show") - .arg("poetry-editable") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(context.filters(), show_command(&context) + .arg("poetry-editable"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -496,7 +425,8 @@ fn show_required_by_multiple() -> Result<()> { " })?; - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -521,15 +451,8 @@ fn show_required_by_multiple() -> Result<()> { context.assert_command("import requests").success(); // idna is required by anyio and requests - uv_snapshot!(context.filters(), Command::new(get_bin()) - .arg("pip") - .arg("show") - .arg("idna") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(context.filters(), show_command(&context) + .arg("idna"), @r###" success: true exit_code: 0 ----- stdout ----- diff --git a/crates/uv/tests/pip_sync.rs b/crates/uv/tests/pip_sync.rs index af0adb7c5427..7e99e984c653 100644 --- a/crates/uv/tests/pip_sync.rs +++ b/crates/uv/tests/pip_sync.rs @@ -1,6 +1,5 @@ #![cfg(all(feature = "python", feature = "pypi"))] -use fs_err as fs; use std::env::consts::EXE_SUFFIX; use std::path::Path; use std::process::Command; @@ -9,16 +8,16 @@ use anyhow::Result; use assert_cmd::prelude::*; use assert_fs::fixture::ChildPath; use assert_fs::prelude::*; +use fs_err as fs; use indoc::indoc; +use insta::assert_snapshot; use predicates::Predicate; use url::Url; use common::{uv_snapshot, venv_to_interpreter}; use uv_fs::Simplified; -use crate::common::{ - copy_dir_all, get_bin, run_and_format, site_packages_path, TestContext, EXCLUDE_NEWER, -}; +use crate::common::{copy_dir_all, run_and_format, site_packages_path, TestContext}; mod common; @@ -34,67 +33,13 @@ fn check_command(venv: &Path, command: &str, temp_dir: &Path) { .success(); } -// TODO(zanieb): This belongs in the `TestContext` -/// Create a `pip sync` command with options shared across scenarios. -fn sync_without_exclude_newer(context: &TestContext) -> Command { - let mut command = Command::new(get_bin()); - command - .arg("pip") - .arg("sync") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .env("UV_TEST_PYTHON_PATH", &context.python_path()) - .env("UV_TOOLCHAIN_DIR", "") - .current_dir(&context.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (8 * 1024 * 1024).to_string()); - } - - command -} - -/// Create a `pip sync` command with options shared across scenarios. -pub fn sync(context: &TestContext) -> Command { - let mut command = sync_without_exclude_newer(context); - command.arg("--exclude-newer").arg(EXCLUDE_NEWER); - command -} - -/// Create a `pip uninstall` command with options shared across scenarios. -fn uninstall_command(context: &TestContext) -> Command { - let mut command = Command::new(get_bin()); - command - .arg("pip") - .arg("uninstall") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_TEST_PYTHON_PATH", &context.python_path()) - .env("UV_TOOLCHAIN_DIR", "") - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (8 * 1024 * 1024).to_string()); - } - - command -} - #[test] fn missing_requirements_txt() { let context = TestContext::new("3.12"); let requirements_txt = context.temp_dir.child("requirements.txt"); - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: false @@ -115,7 +60,7 @@ fn missing_venv() -> Result<()> { requirements.write_str("anyio")?; fs::remove_dir_all(&context.venv)?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context).arg("requirements.txt"), @r###" + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer().arg("requirements.txt"), @r###" success: false exit_code: 2 ----- stdout ----- @@ -138,7 +83,7 @@ fn install() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("MarkupSafe==2.1.3")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -179,7 +124,7 @@ fn install_copy() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("MarkupSafe==2.1.3")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--link-mode") .arg("copy") @@ -214,7 +159,7 @@ fn install_hardlink() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("MarkupSafe==2.1.3")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--link-mode") .arg("hardlink") @@ -249,7 +194,7 @@ fn install_many() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -280,13 +225,14 @@ fn noop() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("MarkupSafe==2.1.3")?; - sync_without_exclude_newer(&context) + context + .pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") .assert() .success(); - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -314,7 +260,8 @@ fn link() -> Result<()> { let requirements_txt = context1.temp_dir.child("requirements.txt"); requirements_txt.write_str("iniconfig==2.0.0")?; - sync_without_exclude_newer(&context1) + context1 + .pip_sync_without_exclude_newer() .arg(requirements_txt.path()) .arg("--strict") .assert() @@ -322,22 +269,10 @@ fn link() -> Result<()> { // Create a separate virtual environment, but reuse the same cache. let context2 = TestContext::new("3.12"); - let mut cmd = Command::new(get_bin()); - cmd.arg("pip") - .arg("sync") - .arg("--cache-dir") - .arg(context1.cache_dir.path()) - .env("VIRTUAL_ENV", context2.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string()) + let mut cmd = context1.pip_sync_without_exclude_newer(); + cmd.env("VIRTUAL_ENV", context2.venv.as_os_str()) .current_dir(&context2.temp_dir); - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - cmd.env("UV_STACK_SIZE", (8 * 1024 * 1024).to_string()); - } - uv_snapshot!(cmd .arg(requirements_txt.path()) .arg("--strict"), @r###" @@ -366,7 +301,8 @@ fn add_remove() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("iniconfig==2.0.0")?; - sync_without_exclude_newer(&context) + context + .pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") .assert() @@ -375,7 +311,7 @@ fn add_remove() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("tomli==2.0.1")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -407,7 +343,8 @@ fn install_sequential() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("iniconfig==2.0.0")?; - sync_without_exclude_newer(&context) + context + .pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") .assert() @@ -416,7 +353,7 @@ fn install_sequential() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("iniconfig==2.0.0\ntomli==2.0.1")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -447,7 +384,8 @@ fn upgrade() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("tomli==2.0.0")?; - sync_without_exclude_newer(&context) + context + .pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") .assert() @@ -456,7 +394,7 @@ fn upgrade() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("tomli==2.0.1")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -486,7 +424,7 @@ fn install_url() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -515,7 +453,7 @@ fn install_git_commit() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -548,7 +486,7 @@ fn install_git_tag() -> Result<()> { "uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@test-tag", )?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -579,7 +517,7 @@ fn install_git_subdirectories() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a\nexample-pkg-b @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -610,7 +548,7 @@ fn install_sdist() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("source-distribution==0.0.1")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -640,7 +578,7 @@ fn install_sdist_url() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("source-distribution @ https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -676,7 +614,7 @@ fn install_sdist_archive_type_bz2() -> Result<()> { .display() ))?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -703,13 +641,14 @@ fn install_url_then_install_url() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - sync_without_exclude_newer(&context) + context + .pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") .assert() .success(); - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -736,7 +675,8 @@ fn install_url_then_install_version() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - sync_without_exclude_newer(&context) + context + .pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") .assert() @@ -745,7 +685,7 @@ fn install_url_then_install_version() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("werkzeug==2.0.0")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -772,7 +712,8 @@ fn install_version_then_install_url() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("werkzeug==2.0.0")?; - sync_without_exclude_newer(&context) + context + .pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") .assert() @@ -781,7 +722,7 @@ fn install_version_then_install_url() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -812,7 +753,7 @@ fn install_numpy_py38() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("numpy")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -840,7 +781,7 @@ fn install_no_index() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("iniconfig==2.0.0")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--no-index") .arg("--strict"), @r###" @@ -870,7 +811,7 @@ fn install_no_index_cached() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("iniconfig==2.0.0")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -887,12 +828,9 @@ fn install_no_index_cached() -> Result<()> { context.assert_command("import iniconfig").success(); - uninstall_command(&context) - .arg("iniconfig") - .assert() - .success(); + context.pip_uninstall().arg("iniconfig").assert().success(); - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--no-index") .arg("--strict"), @r###" @@ -921,7 +859,7 @@ fn warn_on_yanked() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.txt"); requirements_in.write_str("colorama==0.4.2")?; - uv_snapshot!(context.filters(), windows_filters=false, sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), windows_filters=false, context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -948,7 +886,7 @@ fn warn_on_yanked_dry_run() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.txt"); requirements_in.write_str("colorama==0.4.2")?; - uv_snapshot!(context.filters(), windows_filters=false, sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), windows_filters=false, context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--dry-run") .arg("--strict"), @r###" @@ -985,7 +923,7 @@ fn install_local_wheel() -> Result<()> { Url::from_file_path(archive.path()).unwrap() ))?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -1006,7 +944,7 @@ fn install_local_wheel() -> Result<()> { context.reset_venv(); // Reinstall. The wheel should come from the cache, so there shouldn't be a "download". - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") , @r###" @@ -1031,7 +969,7 @@ fn install_local_wheel() -> Result<()> { filetime::set_file_mtime(&archive, filetime::FileTime::now()).unwrap(); // Reinstall. The wheel should be "downloaded" again. - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") , @r###" @@ -1053,7 +991,7 @@ fn install_local_wheel() -> Result<()> { filetime::set_file_mtime(&archive, filetime::FileTime::now()).unwrap(); // Reinstall into the same virtual environment. The wheel should be reinstalled. - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -1071,7 +1009,7 @@ fn install_local_wheel() -> Result<()> { ); // Reinstall into the same virtual environment. The wheel should _not_ be reinstalled. - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -1090,7 +1028,7 @@ fn install_local_wheel() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str(&format!("{}", Url::from_file_path(archive.path()).unwrap()))?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -1125,7 +1063,7 @@ fn mismatched_version() -> Result<()> { Url::from_file_path(archive.path()).unwrap() ))?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: false @@ -1160,7 +1098,7 @@ fn mismatched_name() -> Result<()> { Url::from_file_path(archive.path()).unwrap() ))?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: false @@ -1196,7 +1134,7 @@ fn install_local_source_distribution() -> Result<()> { Url::from_file_path(archive.path()).unwrap() ))?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -1235,7 +1173,7 @@ fn install_build_system_no_backend() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("build-system-no-backend @ https://files.pythonhosted.org/packages/ec/25/1e531108ca027dc3a3b37d351f4b86d811df4884c6a81cd99e73b8b589f5/build-system-no-backend-0.1.0.tar.gz")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -1265,7 +1203,7 @@ fn install_url_source_dist_cached() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("source_distribution @ https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -1287,7 +1225,7 @@ fn install_url_source_dist_cached() -> Result<()> { // Re-run the installation in a new virtual environment. context.reset_venv(); - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") , @r###" @@ -1309,13 +1247,8 @@ fn install_url_source_dist_cached() -> Result<()> { // Clear the cache, then re-run the installation in a new virtual environment. context.reset_venv(); - uv_snapshot!(Command::new(get_bin()) - .arg("clean") - .arg("source_distribution") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(context.clean() + .arg("source_distribution"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -1325,7 +1258,7 @@ fn install_url_source_dist_cached() -> Result<()> { "### ); - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") , @r###" @@ -1357,7 +1290,7 @@ fn install_git_source_dist_cached() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -1379,7 +1312,7 @@ fn install_git_source_dist_cached() -> Result<()> { // Re-run the installation in a new virtual environment. context.reset_venv(); - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") , @r###" @@ -1409,13 +1342,8 @@ fn install_git_source_dist_cached() -> Result<()> { } else { context.filters() }; - uv_snapshot!(filters, Command::new(get_bin()) - .arg("clean") - .arg("werkzeug") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(filters, context.clean() + .arg("werkzeug"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -1425,7 +1353,7 @@ fn install_git_source_dist_cached() -> Result<()> { "### ); - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") , @r###" @@ -1455,7 +1383,7 @@ fn install_registry_source_dist_cached() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("source_distribution==0.0.1")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -1477,7 +1405,7 @@ fn install_registry_source_dist_cached() -> Result<()> { // Re-run the installation in a new virtual environment. context.reset_venv(); - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") , @r###" @@ -1512,13 +1440,8 @@ fn install_registry_source_dist_cached() -> Result<()> { .chain(context.filters()) .collect() }; - uv_snapshot!(filters, Command::new(get_bin()) - .arg("clean") - .arg("source_distribution") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(filters, context.clean() + .arg("source_distribution"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -1528,7 +1451,7 @@ fn install_registry_source_dist_cached() -> Result<()> { "### ); - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") , @r###" @@ -1568,7 +1491,7 @@ fn install_path_source_dist_cached() -> Result<()> { Url::from_file_path(archive.path()).unwrap() ))?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -1590,7 +1513,7 @@ fn install_path_source_dist_cached() -> Result<()> { // Re-run the installation in a new virtual environment. context.reset_venv(); - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") , @r###" @@ -1612,13 +1535,8 @@ fn install_path_source_dist_cached() -> Result<()> { // Clear the cache, then re-run the installation in a new virtual environment. context.reset_venv(); - uv_snapshot!(Command::new(get_bin()) - .arg("clean") - .arg("source-distribution") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(context.clean() + .arg("source-distribution"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -1628,7 +1546,7 @@ fn install_path_source_dist_cached() -> Result<()> { "### ); - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") , @r###" @@ -1666,7 +1584,7 @@ fn install_path_built_dist_cached() -> Result<()> { let url = Url::from_file_path(archive.path()).unwrap(); requirements_txt.write_str(&format!("tomli @ {url}"))?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -1686,7 +1604,7 @@ fn install_path_built_dist_cached() -> Result<()> { // Re-run the installation in a new virtual environment. context.reset_venv(); - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") , @r###" @@ -1718,13 +1636,8 @@ fn install_path_built_dist_cached() -> Result<()> { } else { context.filters() }; - uv_snapshot!(filters, Command::new(get_bin()) - .arg("clean") - .arg("tomli") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(filters, context.clean() + .arg("tomli"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -1734,7 +1647,7 @@ fn install_path_built_dist_cached() -> Result<()> { "### ); - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") , @r###" @@ -1771,7 +1684,7 @@ fn install_url_built_dist_cached() -> Result<()> { } else { context.filters() }; - uv_snapshot!(filters, sync_without_exclude_newer(&context) + uv_snapshot!(filters, context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -1791,7 +1704,7 @@ fn install_url_built_dist_cached() -> Result<()> { // Re-run the installation in a new virtual environment. context.reset_venv(); - uv_snapshot!(filters, sync_without_exclude_newer(&context) + uv_snapshot!(filters, context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") , @r###" @@ -1811,13 +1724,8 @@ fn install_url_built_dist_cached() -> Result<()> { // Clear the cache, then re-run the installation in a new virtual environment. context.reset_venv(); - uv_snapshot!(Command::new(get_bin()) - .arg("clean") - .arg("tqdm") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(context.clean() + .arg("tqdm"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -1827,7 +1735,7 @@ fn install_url_built_dist_cached() -> Result<()> { "### ); - uv_snapshot!(filters, sync_without_exclude_newer(&context) + uv_snapshot!(filters, context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") , @r###" @@ -1856,7 +1764,7 @@ fn duplicate_package_overlap() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("MarkupSafe==2.1.3\nMarkupSafe==2.1.2")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: false @@ -1880,7 +1788,7 @@ fn duplicate_package_disjoint() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("MarkupSafe==2.1.3\nMarkupSafe==2.1.2 ; python_version < '3.6'")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -1906,7 +1814,7 @@ fn reinstall() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -1926,7 +1834,7 @@ fn reinstall() -> Result<()> { context.assert_command("import tomli").success(); // Re-run the installation with `--reinstall`. - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--strict"), @r###" @@ -1959,7 +1867,7 @@ fn reinstall_package() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -1979,7 +1887,7 @@ fn reinstall_package() -> Result<()> { context.assert_command("import tomli").success(); // Re-run the installation with `--reinstall`. - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall-package") .arg("tomli") @@ -2012,7 +1920,7 @@ fn reinstall_git() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -2032,7 +1940,7 @@ fn reinstall_git() -> Result<()> { .success(); // Re-run the installation with `--reinstall`. - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall-package") .arg("uv-public-pypackage") @@ -2065,7 +1973,7 @@ fn refresh() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -2088,7 +1996,7 @@ fn refresh() -> Result<()> { // latest versions of the packages. context.reset_venv(); - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--refresh") .arg("--strict") @@ -2120,7 +2028,7 @@ fn refresh_package() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: true @@ -2143,7 +2051,7 @@ fn refresh_package() -> Result<()> { // latest versions of the packages. context.reset_venv(); - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--refresh-package") .arg("tomli") @@ -2191,7 +2099,7 @@ fn sync_editable() -> Result<()> { })?; // Install the editable packages. - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg(requirements_txt.path()), @r###" success: true exit_code: 0 @@ -2208,7 +2116,7 @@ fn sync_editable() -> Result<()> { ); // Reinstall the editable packages. - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg(requirements_txt.path()) .arg("--reinstall-package") .arg("poetry-editable"), @r###" @@ -2259,7 +2167,7 @@ fn sync_editable() -> Result<()> { "}; context.assert_command(check_installed).success(); - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg(requirements_txt.path()), @r###" success: true exit_code: 0 @@ -2293,7 +2201,7 @@ fn sync_editable_and_registry() -> Result<()> { " })?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg(requirements_txt.path()) .arg("--strict"), @r###" success: true @@ -2321,7 +2229,7 @@ fn sync_editable_and_registry() -> Result<()> { " })?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg(requirements_txt.path()), @r###" success: true exit_code: 0 @@ -2345,7 +2253,7 @@ fn sync_editable_and_registry() -> Result<()> { " })?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg(requirements_txt.path()), @r###" success: true exit_code: 0 @@ -2364,7 +2272,7 @@ fn sync_editable_and_registry() -> Result<()> { " })?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg(requirements_txt.path()) .arg("--strict"), @r###" success: true @@ -2408,7 +2316,7 @@ fn sync_editable_and_local() -> Result<()> { " })?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg(requirements_txt.path()), @r###" success: true exit_code: 0 @@ -2429,7 +2337,7 @@ fn sync_editable_and_local() -> Result<()> { " })?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg(requirements_txt.path()), @r###" success: true exit_code: 0 @@ -2452,7 +2360,7 @@ fn sync_editable_and_local() -> Result<()> { " })?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg(requirements_txt.path()), @r###" success: true exit_code: 0 @@ -2480,7 +2388,7 @@ fn incompatible_wheel() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str(&format!("foo @ {}", wheel.path().simplified_display()))?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict"), @r###" success: false @@ -2505,7 +2413,7 @@ fn sync_legacy_sdist_pep_517() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -2530,7 +2438,7 @@ fn sync_legacy_sdist_setuptools() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.in") .arg("--legacy-setup-py"), @r###" success: true @@ -2561,7 +2469,7 @@ fn find_links() -> Result<()> { werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl "})?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--find-links") .arg(context.workspace_root.join("scripts/links/")), @r###" @@ -2593,7 +2501,7 @@ fn find_links_no_index_match() -> Result<()> { tqdm==1000.0.0 "})?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--no-index") .arg("--find-links") @@ -2623,7 +2531,7 @@ fn find_links_offline_match() -> Result<()> { tqdm==1000.0.0 "})?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--offline") .arg("--find-links") @@ -2654,7 +2562,7 @@ fn find_links_offline_no_match() -> Result<()> { tqdm==1000.0.0 "})?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--offline") .arg("--find-links") @@ -2685,7 +2593,7 @@ fn find_links_wheel_cache() -> Result<()> { "})?; // Install `tqdm`. - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--find-links") .arg(context.workspace_root.join("scripts/links/")), @r###" @@ -2702,7 +2610,7 @@ fn find_links_wheel_cache() -> Result<()> { ); // Reinstall `tqdm` with `--reinstall`. Ensure that the wheel is reused. - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--find-links") @@ -2735,7 +2643,7 @@ fn find_links_source_cache() -> Result<()> { "})?; // Install `tqdm`. - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--find-links") .arg(context.workspace_root.join("scripts/links/")), @r###" @@ -2752,7 +2660,7 @@ fn find_links_source_cache() -> Result<()> { ); // Reinstall `tqdm` with `--reinstall`. Ensure that the wheel is reused. - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--find-links") @@ -2781,7 +2689,7 @@ fn offline() -> Result<()> { requirements_in.write_str("black==23.10.1")?; // Install with `--offline` with an empty cache. - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.in") .arg("--offline"), @r###" success: false @@ -2797,7 +2705,7 @@ fn offline() -> Result<()> { ); // Populate the cache. - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -2814,7 +2722,7 @@ fn offline() -> Result<()> { // Install with `--offline` with a populated cache. context.reset_venv(); - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.in") .arg("--offline") , @r###" @@ -2842,7 +2750,7 @@ fn compatible_constraint() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("anyio==3.7.0")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--constraint") .arg("constraints.txt"), @r###" @@ -2871,7 +2779,7 @@ fn incompatible_constraint() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("anyio==3.6.0")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--constraint") .arg("constraints.txt"), @r###" @@ -2898,7 +2806,7 @@ fn irrelevant_constraint() -> Result<()> { let constraints_txt = context.temp_dir.child("constraints.txt"); constraints_txt.write_str("black==23.10.1")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--constraint") .arg("constraints.txt"), @r###" @@ -2924,7 +2832,7 @@ fn repeat_requirement_identical() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio\nanyio")?; - uv_snapshot!(sync(&context) + uv_snapshot!(context.pip_sync() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -2947,7 +2855,7 @@ fn repeat_requirement_compatible() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio\nanyio==4.0.0")?; - uv_snapshot!(sync(&context) + uv_snapshot!(context.pip_sync() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -2970,7 +2878,7 @@ fn repeat_requirement_incompatible() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("anyio<4.0.0\nanyio==4.0.0")?; - uv_snapshot!(sync(&context) + uv_snapshot!(context.pip_sync() .arg("requirements.in"), @r###" success: false exit_code: 1 @@ -2993,7 +2901,7 @@ fn tar_dont_preserve_mtime() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("tomli @ https://files.pythonhosted.org/packages/c0/3f/d7af728f075fb08564c5949a9c95e44352e23dee646869fa104a3b2060a3/tomli-2.0.1.tar.gz")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt"), @r###" success: true exit_code: 0 @@ -3016,7 +2924,7 @@ fn set_read_permissions() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("databricks==0.2")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -3050,7 +2958,8 @@ fn pip_entrypoints() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str(pip_requirement)?; - sync_without_exclude_newer(&context) + context + .pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--strict") .output() @@ -3095,7 +3004,7 @@ requires-python = ">=3.8" let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str(&format!("-e {}", editable_dir.path().display()))?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -3110,7 +3019,7 @@ requires-python = ">=3.8" ); // Re-installing should be a no-op. - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -3135,7 +3044,7 @@ requires-python = ">=3.8" )?; // Re-installing should update the package. - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -3162,7 +3071,7 @@ fn compile() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("MarkupSafe==2.1.3")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--compile") .arg("--strict"), @r###" @@ -3215,7 +3124,7 @@ fn compile_invalid_pyc_invalidation_mode() -> Result<()> { // Retry test if we run into a broken pipe (https://github.com/astral-sh/uv/issues/2672). // TODO(konsti): Why is this happening in the first place? let run_test = || { - let mut command = sync_without_exclude_newer(&context); + let mut command = context.pip_sync_without_exclude_newer(); command .arg("requirements.txt") .arg("--compile") @@ -3229,7 +3138,7 @@ fn compile_invalid_pyc_invalidation_mode() -> Result<()> { snapshot = run_test(); } - ::insta::assert_snapshot!(snapshot, @r###" + assert_snapshot!(snapshot, @r###" success: false exit_code: 2 ----- stdout ----- @@ -3272,7 +3181,7 @@ requires-python = "<=3.5" let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str(&format!("-e {}", editable_dir.path().display()))?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.in"), @r###" success: false exit_code: 1 @@ -3299,7 +3208,7 @@ fn no_stream() -> Result<()> { requirements_txt .write_str("hashb_foxglove_protocolbuffers_python==25.3.0.1.20240226043130+465630478360")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--index-url") .arg("https://buf.build/gen/python"), @r###" @@ -3342,7 +3251,7 @@ requires-python = "<=3.5" let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str(&format!("example @ {}", editable_dir.path().display()))?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.in"), @r###" success: false exit_code: 1 @@ -3368,7 +3277,7 @@ fn require_hashes_unknown_algorithm() -> Result<()> { "anyio==4.0.0 --hash=foo:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f", )?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false @@ -3392,7 +3301,7 @@ fn require_hashes_missing_hash() -> Result<()> { requirements_txt.write_str("anyio==4.0.0")?; // Install without error when `--require-hashes` is omitted. - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt"), @r###" success: true exit_code: 0 @@ -3407,7 +3316,7 @@ fn require_hashes_missing_hash() -> Result<()> { ); // Error when `--require-hashes` is provided. - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false @@ -3433,7 +3342,7 @@ fn require_hashes_missing_version() -> Result<()> { )?; // Install without error when `--require-hashes` is omitted. - uv_snapshot!(sync(&context) + uv_snapshot!(context.pip_sync() .arg("requirements.txt"), @r###" success: true exit_code: 0 @@ -3448,7 +3357,7 @@ fn require_hashes_missing_version() -> Result<()> { ); // Error when `--require-hashes` is provided. - uv_snapshot!(sync(&context) + uv_snapshot!(context.pip_sync() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false @@ -3474,7 +3383,7 @@ fn require_hashes_invalid_operator() -> Result<()> { )?; // Install without error when `--require-hashes` is omitted. - uv_snapshot!(sync(&context) + uv_snapshot!(context.pip_sync() .arg("requirements.txt"), @r###" success: true exit_code: 0 @@ -3489,7 +3398,7 @@ fn require_hashes_invalid_operator() -> Result<()> { ); // Error when `--require-hashes` is provided. - uv_snapshot!(sync(&context) + uv_snapshot!(context.pip_sync() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false @@ -3513,7 +3422,7 @@ fn require_hashes_wheel_no_binary() -> Result<()> { requirements_txt .write_str("anyio==4.0.0 --hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--no-binary") .arg(":all:") @@ -3548,7 +3457,7 @@ fn require_hashes_wheel_only_binary() -> Result<()> { requirements_txt .write_str("anyio==4.0.0 --hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--only-binary") .arg(":all:") @@ -3577,7 +3486,7 @@ fn require_hashes_source_no_binary() -> Result<()> { requirements_txt .write_str("source-distribution==0.0.1 --hash=sha256:1f83ed7498336c7f2ab9b002cf22583d91115ebc624053dc4eb3a45694490106")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--no-binary") .arg(":all:") @@ -3606,7 +3515,7 @@ fn require_hashes_source_only_binary() -> Result<()> { requirements_txt .write_str("anyio==4.0.0 --hash=sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--only-binary") .arg(":all:") @@ -3641,7 +3550,7 @@ fn require_hashes_wrong_digest() -> Result<()> { requirements_txt .write_str("anyio==4.0.0 --hash=sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false @@ -3674,7 +3583,7 @@ fn require_hashes_wrong_algorithm() -> Result<()> { requirements_txt .write_str("anyio==4.0.0 --hash=sha512:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false @@ -3707,7 +3616,7 @@ fn require_hashes_source_url() -> Result<()> { requirements_txt .write_str("source-distribution @ https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz --hash=sha256:1f83ed7498336c7f2ab9b002cf22583d91115ebc624053dc4eb3a45694490106")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: true @@ -3723,7 +3632,7 @@ fn require_hashes_source_url() -> Result<()> { ); // Reinstall with the right hash, and verify that it's reused. - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--require-hashes"), @r###" @@ -3745,7 +3654,7 @@ fn require_hashes_source_url() -> Result<()> { requirements_txt .write_str("source-distribution @ https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz --hash=sha256:a7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--require-hashes"), @r###" @@ -3777,7 +3686,7 @@ fn require_hashes_source_url_mismatch() -> Result<()> { requirements_txt .write_str("source-distribution @ https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz --hash=sha256:a7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false @@ -3808,7 +3717,7 @@ fn require_hashes_wheel_url() -> Result<()> { requirements_txt .write_str("anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl --hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: true @@ -3824,7 +3733,7 @@ fn require_hashes_wheel_url() -> Result<()> { ); // Reinstall with the right hash, and verify that it's reused. - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--require-hashes"), @r###" @@ -3846,7 +3755,7 @@ fn require_hashes_wheel_url() -> Result<()> { requirements_txt .write_str("anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl --hash=sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--require-hashes"), @r###" @@ -3875,7 +3784,7 @@ fn require_hashes_wheel_url() -> Result<()> { requirements_txt .write_str("anyio==4.0.0 --hash=sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f\niniconfig==2.0.0 --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: true @@ -3902,7 +3811,7 @@ fn require_hashes_wheel_url_mismatch() -> Result<()> { requirements_txt .write_str("anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl --hash=sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false @@ -3935,7 +3844,7 @@ fn require_hashes_git() -> Result<()> { requirements_txt .write_str("anyio @ git+https://github.com/agronholm/anyio@4a23745badf5bf5ef7928f1e346e9986bd696d82 --hash=sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false @@ -3965,7 +3874,7 @@ fn require_hashes_source_tree() -> Result<()> { .display() ))?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false @@ -3990,7 +3899,7 @@ fn require_hashes_re_download() -> Result<()> { requirements_txt.write_str("anyio==4.0.0")?; // Install without `--require-hashes`. - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt"), @r###" success: true exit_code: 0 @@ -4009,7 +3918,7 @@ fn require_hashes_re_download() -> Result<()> { requirements_txt .write_str("anyio==4.0.0 --hash=sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--require-hashes"), @r###" @@ -4036,7 +3945,7 @@ fn require_hashes_re_download() -> Result<()> { requirements_txt .write_str("anyio==4.0.0 --hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--require-hashes"), @r###" @@ -4070,7 +3979,7 @@ fn require_hashes_wheel_path() -> Result<()> { .display() ))?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: true @@ -4102,7 +4011,7 @@ fn require_hashes_wheel_path_mismatch() -> Result<()> { .display() ))?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false @@ -4140,7 +4049,7 @@ fn require_hashes_source_path() -> Result<()> { .display() ))?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: true @@ -4172,7 +4081,7 @@ fn require_hashes_source_path_mismatch() -> Result<()> { .display() ))?; - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false @@ -4205,7 +4114,7 @@ fn require_hashes_unnamed() -> Result<()> { https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl --hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f "} )?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: true @@ -4236,7 +4145,7 @@ fn require_hashes_editable() -> Result<()> { })?; // Install the editable packages. - uv_snapshot!(context.filters(), sync_without_exclude_newer(&context) + uv_snapshot!(context.filters(), context.pip_sync_without_exclude_newer() .arg(requirements_txt.path()) .arg("--require-hashes"), @r###" success: false @@ -4260,7 +4169,7 @@ fn require_hashes_repeated_dependency() -> Result<()> { requirements_txt .write_str("anyio==4.0.0 --hash=sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a\nanyio")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false @@ -4277,7 +4186,7 @@ fn require_hashes_repeated_dependency() -> Result<()> { requirements_txt .write_str("anyio\nanyio==4.0.0 --hash=sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false @@ -4305,7 +4214,7 @@ fn require_hashes_repeated_hash() -> Result<()> { anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl --hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f " })?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: true @@ -4328,7 +4237,7 @@ fn require_hashes_repeated_hash() -> Result<()> { anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl --hash=sha512:f30761c1e8725b49c498273b90dba4b05c0fd157811994c806183062cb6647e773364ce45f0e1ff0b10e32fe6d0232ea5ad39476ccf37109d6b49603a09c11c2 " })?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes") .arg("--reinstall"), @r###" @@ -4354,7 +4263,7 @@ fn require_hashes_repeated_hash() -> Result<()> { anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl --hash=md5:420d85e19168705cdf0223621b18831a " })?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes") .arg("--reinstall"), @r###" @@ -4380,7 +4289,7 @@ fn require_hashes_repeated_hash() -> Result<()> { anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl --hash=md5:520d85e19168705cdf0223621b18831a " })?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes") .arg("--reinstall"), @r###" @@ -4415,7 +4324,7 @@ fn require_hashes_at_least_one() -> Result<()> { requirements_txt .write_str("anyio==4.0.0 --hash=sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes"), @r###" success: true @@ -4436,7 +4345,7 @@ fn require_hashes_at_least_one() -> Result<()> { requirements_txt .write_str("anyio==4.0.0 --hash=sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a --hash=md5:420d85e19168705cdf0223621b18831a")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--require-hashes"), @r###" @@ -4458,7 +4367,7 @@ fn require_hashes_at_least_one() -> Result<()> { requirements_txt .write_str("anyio==4.0.0 --hash=sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a --hash=md5:1234")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--require-hashes"), @r###" @@ -4488,7 +4397,7 @@ fn require_hashes_find_links_no_hash() -> Result<()> { requirements_txt .write_str("example-a-961b4c22==1.0.0 --hash=sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--require-hashes") @@ -4510,7 +4419,7 @@ fn require_hashes_find_links_no_hash() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("example-a-961b4c22==1.0.0 --hash=sha256:123")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--require-hashes") @@ -4540,7 +4449,7 @@ fn require_hashes_find_links_no_hash() -> Result<()> { requirements_txt .write_str("example-a-961b4c22==1.0.0 --hash=sha256:294e788dbe500fdc39e8b88e82652ab67409a1dc9dd06543d0fe0ae31b713eb3")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--require-hashes") @@ -4569,7 +4478,7 @@ fn require_hashes_find_links_no_hash() -> Result<()> { requirements_txt .write_str("example-a-961b4c22==1.0.0 --hash=sha256:294e788dbe500fdc39e8b88e82652ab67409a1dc9dd06543d0fe0ae31b713eb3")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--no-binary") .arg(":all:") @@ -4603,7 +4512,7 @@ fn require_hashes_find_links_valid_hash() -> Result<()> { requirements_txt .write_str("example-a-961b4c22==1.0.0 --hash=sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes") .arg("--find-links") @@ -4632,7 +4541,7 @@ fn require_hashes_find_links_invalid_hash() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("example-a-961b4c22==1.0.0 --hash=sha256:123")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--require-hashes") @@ -4661,7 +4570,7 @@ fn require_hashes_find_links_invalid_hash() -> Result<()> { requirements_txt .write_str("example-a-961b4c22==1.0.0 --hash=sha256:8838f9d005ff0432b258ba648d9cabb1cbdf06ac29d14f788b02edae544032ea")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--require-hashes") @@ -4691,7 +4600,7 @@ fn require_hashes_find_links_invalid_hash() -> Result<()> { requirements_txt .write_str("example-a-961b4c22==1.0.0 --hash=sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--require-hashes") @@ -4714,7 +4623,7 @@ fn require_hashes_find_links_invalid_hash() -> Result<()> { requirements_txt .write_str("example-a-961b4c22==1.0.0 --hash=sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--refresh") .arg("--reinstall") @@ -4741,7 +4650,7 @@ fn require_hashes_find_links_invalid_hash() -> Result<()> { requirements_txt .write_str("example-a-961b4c22==1.0.0 --hash=sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e --hash=sha256:a3cf07a05aac526131a2e8b6e4375ee6c6eaac8add05b88035e960ac6cd999ee")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--refresh") .arg("--reinstall") @@ -4777,7 +4686,7 @@ fn require_hashes_registry_no_hash() -> Result<()> { requirements_txt .write_str("example-a-961b4c22==1.0.0 --hash=sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes") .arg("--index-url") @@ -4806,7 +4715,7 @@ fn require_hashes_registry_valid_hash() -> Result<()> { requirements_txt .write_str("example-a-961b4c22==1.0.0 --hash=sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--require-hashes") .arg("--find-links") @@ -4833,7 +4742,7 @@ fn require_hashes_registry_invalid_hash() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("example-a-961b4c22==1.0.0 --hash=sha256:123")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--require-hashes") @@ -4862,7 +4771,7 @@ fn require_hashes_registry_invalid_hash() -> Result<()> { requirements_txt .write_str("example-a-961b4c22==1.0.0 --hash=sha256:8838f9d005ff0432b258ba648d9cabb1cbdf06ac29d14f788b02edae544032ea")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--require-hashes") @@ -4892,7 +4801,7 @@ fn require_hashes_registry_invalid_hash() -> Result<()> { requirements_txt .write_str("example-a-961b4c22==1.0.0 --hash=sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--reinstall") .arg("--require-hashes") @@ -4915,7 +4824,7 @@ fn require_hashes_registry_invalid_hash() -> Result<()> { requirements_txt .write_str("example-a-961b4c22==1.0.0 --hash=sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--refresh") .arg("--reinstall") @@ -4942,7 +4851,7 @@ fn require_hashes_registry_invalid_hash() -> Result<()> { requirements_txt .write_str("example-a-961b4c22==1.0.0 --hash=sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e --hash=sha256:a3cf07a05aac526131a2e8b6e4375ee6c6eaac8add05b88035e960ac6cd999ee")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt") .arg("--refresh") .arg("--reinstall") @@ -4978,7 +4887,7 @@ fn target_built_distribution() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("iniconfig==2.0.0")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.in") .arg("--target") .arg("target"), @r###" @@ -5013,7 +4922,7 @@ fn target_built_distribution() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("iniconfig==1.1.1")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.in") .arg("--target") .arg("target"), @r###" @@ -5034,7 +4943,7 @@ fn target_built_distribution() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flask")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.in") .arg("--target") .arg("target"), @r###" @@ -5070,7 +4979,7 @@ fn target_source_distribution() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("iniconfig==2.0.0")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.in") .arg("--no-binary") .arg("iniconfig") @@ -5119,7 +5028,7 @@ fn target_no_build_isolation() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("flit_core")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.in"), @r###" success: true exit_code: 0 @@ -5136,7 +5045,7 @@ fn target_no_build_isolation() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("wheel")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.in") .arg("--no-build-isolation") .arg("--no-binary") @@ -5185,7 +5094,7 @@ fn prefix() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("iniconfig==2.0.0")?; - uv_snapshot!(sync(&context) + uv_snapshot!(context.pip_sync() .arg("requirements.in") .arg("--prefix") .arg("prefix"), @r###" @@ -5220,7 +5129,7 @@ fn prefix() -> Result<()> { let requirements_in = context.temp_dir.child("requirements.in"); requirements_in.write_str("iniconfig==1.1.1")?; - uv_snapshot!(sync(&context) + uv_snapshot!(context.pip_sync() .arg("requirements.in") .arg("--prefix") .arg("prefix"), @r###" @@ -5248,7 +5157,7 @@ fn preserve_markers() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("anyio ; python_version > '3.7'")?; - uv_snapshot!(sync_without_exclude_newer(&context) + uv_snapshot!(context.pip_sync_without_exclude_newer() .arg("requirements.txt"), @r###" success: true exit_code: 0 diff --git a/crates/uv/tests/pip_tree.rs b/crates/uv/tests/pip_tree.rs index fba2eb5719c8..6d0da83aabfb 100644 --- a/crates/uv/tests/pip_tree.rs +++ b/crates/uv/tests/pip_tree.rs @@ -5,30 +5,14 @@ use assert_fs::fixture::PathChild; use common::uv_snapshot; -use crate::common::{get_bin, TestContext, EXCLUDE_NEWER}; +use crate::common::{get_bin, TestContext}; mod common; -/// Create a `pip install` command with options shared across scenarios. -fn install_command(context: &TestContext) -> Command { +fn tree_command(context: &TestContext) -> Command { let mut command = Command::new(get_bin()); - command - .arg("pip") - .arg("install") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string()); - } - + command.arg("pip").arg("tree"); + context.add_shared_args(&mut command); command } @@ -36,14 +20,7 @@ fn install_command(context: &TestContext) -> Command { fn no_package() { let context = TestContext::new("3.12"); - uv_snapshot!(context.filters(), Command::new(get_bin()) - .arg("pip") - .arg("tree") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(context.filters(), tree_command(&context), @r###" success: true exit_code: 0 ----- stdout ----- @@ -61,7 +38,8 @@ fn single_package() { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("requests==2.31.0").unwrap(); - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -82,14 +60,7 @@ fn single_package() { ); context.assert_command("import requests").success(); - uv_snapshot!(context.filters(), Command::new(get_bin()) - .arg("pip") - .arg("tree") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(context.filters(), tree_command(&context), @r###" success: true exit_code: 0 ----- stdout ----- @@ -113,7 +84,8 @@ fn nested_dependencies() { .write_str("scikit-learn==1.4.1.post1") .unwrap(); - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -133,14 +105,7 @@ fn nested_dependencies() { "### ); - uv_snapshot!(context.filters(), Command::new(get_bin()) - .arg("pip") - .arg("tree") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(context.filters(), tree_command(&context), @r###" success: true exit_code: 0 ----- stdout ----- @@ -165,7 +130,8 @@ fn nested_dependencies_more_complex() { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("packse").unwrap(); - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -212,14 +178,7 @@ fn nested_dependencies_more_complex() { "### ); - uv_snapshot!(context.filters(), Command::new(get_bin()) - .arg("pip") - .arg("tree") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(context.filters(), tree_command(&context), @r###" success: true exit_code: 0 ----- stdout ----- @@ -280,30 +239,18 @@ fn cyclic_dependency() { .write_str("uv-cyclic-dependencies-c") .unwrap(); - let mut command = Command::new(get_bin()); + let mut command = context.pip_install_without_exclude_newer(); command - .arg("pip") - .arg("install") .arg("-r") .arg("requirements.txt") - .arg("--cache-dir") - .arg(context.cache_dir.path()) .arg("--index-url") - .arg("https://test.pypi.org/simple/") - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir); - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string()); - } + .arg("https://test.pypi.org/simple/"); uv_snapshot!(context.filters(), command, @r###" success: true exit_code: 0 ----- stdout ----- - + ----- stderr ----- Resolved 3 packages in [TIME] Prepared 3 packages in [TIME] @@ -314,14 +261,7 @@ fn cyclic_dependency() { "### ); - uv_snapshot!(context.filters(), Command::new(get_bin()) - .arg("pip") - .arg("tree") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(context.filters(), tree_command(&context), @r###" success: true exit_code: 0 ----- stdout ----- @@ -344,7 +284,8 @@ fn removed_dependency() { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("requests==2.31.0").unwrap(); - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -364,15 +305,9 @@ fn removed_dependency() { "### ); - uv_snapshot!(context.filters(), Command::new(get_bin()) - .arg("pip") - .arg("uninstall") - .arg("requests") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(context.filters(), context + .pip_uninstall() + .arg("requests"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -383,14 +318,7 @@ fn removed_dependency() { "### ); - uv_snapshot!(context.filters(), Command::new(get_bin()) - .arg("pip") - .arg("tree") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(context.filters(), tree_command(&context), @r###" success: true exit_code: 0 ----- stdout ----- @@ -418,7 +346,8 @@ fn multiple_packages() { ) .unwrap(); - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -444,14 +373,7 @@ fn multiple_packages() { filters.push(("colorama v0.4.6\n", "")); } context.assert_command("import requests").success(); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("tree") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(filters, tree_command(&context), @r###" success: true exit_code: 0 ----- stdout ----- @@ -483,7 +405,8 @@ fn multiple_packages_shared_descendant() { ) .unwrap(); - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -509,14 +432,7 @@ fn multiple_packages_shared_descendant() { "### ); - uv_snapshot!(context.filters(), Command::new(get_bin()) - .arg("pip") - .arg("tree") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(context.filters(), tree_command(&context), @r###" success: true exit_code: 0 ----- stdout ----- @@ -558,7 +474,8 @@ fn no_dedupe_and_cycle() { ) .unwrap(); - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -584,23 +501,11 @@ fn no_dedupe_and_cycle() { "### ); - let mut command = Command::new(get_bin()); + let mut command = context.pip_install_without_exclude_newer(); command - .arg("pip") - .arg("install") .arg("uv-cyclic-dependencies-c==0.1.0") - .arg("--cache-dir") - .arg(context.cache_dir.path()) .arg("--index-url") - .arg("https://test.pypi.org/simple/") - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir); - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string()); - } + .arg("https://test.pypi.org/simple/"); uv_snapshot!(context.filters(), command, @r###" success: true @@ -617,15 +522,8 @@ fn no_dedupe_and_cycle() { "### ); - uv_snapshot!(context.filters(), Command::new(get_bin()) - .arg("pip") - .arg("tree") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .arg("--no-dedupe") - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(context.filters(), tree_command(&context) + .arg("--no-dedupe"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -674,7 +572,8 @@ fn no_dedupe() { ) .unwrap(); - uv_snapshot!(install_command(&context) + uv_snapshot!(context + .pip_install() .arg("-r") .arg("requirements.txt") .arg("--strict"), @r###" @@ -700,15 +599,8 @@ fn no_dedupe() { "### ); - uv_snapshot!(context.filters(), Command::new(get_bin()) - .arg("pip") - .arg("tree") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .arg("--no-dedupe") - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(context.filters(), tree_command(&context) + .arg("--no-dedupe"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -742,7 +634,8 @@ fn with_editable() { let context = TestContext::new("3.12"); // Install the editable package. - uv_snapshot!(context.filters(), install_command(&context) + uv_snapshot!(context.filters(), context + .pip_install() .arg("-e") .arg(context.workspace_root.join("scripts/packages/hatchling_editable")), @r###" success: true @@ -764,14 +657,7 @@ fn with_editable() { .chain(vec![(r"\-\-\-\-\-\-+.*", "[UNDERLINE]"), (" +", " ")]) .collect::>(); - uv_snapshot!(filters, Command::new(get_bin()) - .arg("pip") - .arg("tree") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir), @r###" + uv_snapshot!(filters, tree_command(&context), @r###" success: true exit_code: 0 ----- stdout ----- diff --git a/crates/uv/tests/pip_uninstall.rs b/crates/uv/tests/pip_uninstall.rs index 23762dd3dd6f..94d164b6f317 100644 --- a/crates/uv/tests/pip_uninstall.rs +++ b/crates/uv/tests/pip_uninstall.rs @@ -11,48 +11,6 @@ use crate::common::{get_bin, venv_to_interpreter, TestContext}; mod common; -/// Create a `pip uninstall` command with options shared across scenarios. -fn uninstall_command(context: &TestContext) -> Command { - let mut command = Command::new(get_bin()); - command - .arg("pip") - .arg("uninstall") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string()); - } - - command -} - -/// Create a `pip sync` command with options shared across scenarios. -fn sync_command(context: &TestContext) -> Command { - let mut command = Command::new(get_bin()); - command - .arg("pip") - .arg("sync") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (8 * 1024 * 1024).to_string()); - } - - command -} - #[test] fn no_arguments() -> Result<()> { let temp_dir = assert_fs::TempDir::new()?; @@ -156,7 +114,8 @@ fn uninstall() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("MarkupSafe==2.1.3")?; - sync_command(&context) + context + .pip_sync() .arg("requirements.txt") .assert() .success(); @@ -168,7 +127,7 @@ fn uninstall() -> Result<()> { .assert() .success(); - uv_snapshot!(uninstall_command(&context) + uv_snapshot!(context.pip_uninstall() .arg("MarkupSafe"), @r###" success: true exit_code: 0 @@ -197,7 +156,8 @@ fn missing_record() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("MarkupSafe==2.1.3")?; - sync_command(&context) + context + .pip_sync() .arg("requirements.txt") .assert() .success(); @@ -213,7 +173,7 @@ fn missing_record() -> Result<()> { let dist_info = context.site_packages().join("MarkupSafe-2.1.3.dist-info"); fs_err::remove_file(dist_info.join("RECORD"))?; - uv_snapshot!(context.filters(), uninstall_command(&context) + uv_snapshot!(context.filters(), context.pip_uninstall() .arg("MarkupSafe"), @r###" success: false exit_code: 2 @@ -241,7 +201,8 @@ fn uninstall_editable_by_name() -> Result<()> { .to_str() .expect("Path is valid unicode") ))?; - sync_command(&context) + context + .pip_sync() .arg(requirements_txt.path()) .assert() .success(); @@ -253,7 +214,7 @@ fn uninstall_editable_by_name() -> Result<()> { .success(); // Uninstall the editable by name. - uv_snapshot!(context.filters(), uninstall_command(&context) + uv_snapshot!(context.filters(), context.pip_uninstall() .arg("poetry-editable"), @r###" success: true exit_code: 0 @@ -288,7 +249,8 @@ fn uninstall_by_path() -> Result<()> { .expect("Path is valid unicode"), )?; - sync_command(&context) + context + .pip_sync() .arg(requirements_txt.path()) .assert() .success(); @@ -300,7 +262,7 @@ fn uninstall_by_path() -> Result<()> { .success(); // Uninstall the editable by path. - uv_snapshot!(context.filters(), uninstall_command(&context) + uv_snapshot!(context.filters(), context.pip_uninstall() .arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r###" success: true exit_code: 0 @@ -335,7 +297,8 @@ fn uninstall_duplicate_by_path() -> Result<()> { .expect("Path is valid unicode"), )?; - sync_command(&context) + context + .pip_sync() .arg(requirements_txt.path()) .assert() .success(); @@ -347,7 +310,7 @@ fn uninstall_duplicate_by_path() -> Result<()> { .success(); // Uninstall the editable by both path and name. - uv_snapshot!(context.filters(), uninstall_command(&context) + uv_snapshot!(context.filters(), context.pip_uninstall() .arg("poetry-editable") .arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r###" success: true @@ -380,7 +343,8 @@ fn uninstall_duplicate() -> Result<()> { requirements_txt.write_str("pip==21.3.1")?; // Run `pip sync`. - sync_command(&context1) + context1 + .pip_sync() .arg(requirements_txt.path()) .assert() .success(); @@ -391,7 +355,8 @@ fn uninstall_duplicate() -> Result<()> { requirements_txt.write_str("pip==22.1.1")?; // Run `pip sync`. - sync_command(&context2) + context2 + .pip_sync() .arg(requirements_txt.path()) .assert() .success(); @@ -403,7 +368,7 @@ fn uninstall_duplicate() -> Result<()> { )?; // Run `pip uninstall`. - uv_snapshot!(uninstall_command(&context1) + uv_snapshot!(context1.pip_uninstall() .arg("pip"), @r###" success: true exit_code: 0 @@ -459,7 +424,7 @@ fn uninstall_egg_info() -> Result<()> { .write_str("")?; // Run `pip uninstall`. - uv_snapshot!(uninstall_command(&context) + uv_snapshot!(context.pip_uninstall() .arg("zstandard"), @r###" success: true exit_code: 0 @@ -513,7 +478,7 @@ Version: 0.22.0 ))?; // Run `pip uninstall`. - uv_snapshot!(uninstall_command(&context) + uv_snapshot!(context.pip_uninstall() .arg("zstandard"), @r###" success: true exit_code: 0 diff --git a/crates/uv/tests/self_update.rs b/crates/uv/tests/self_update.rs index 277f987999af..ddb2536b1977 100644 --- a/crates/uv/tests/self_update.rs +++ b/crates/uv/tests/self_update.rs @@ -1,11 +1,13 @@ #![cfg(feature = "self-update")] -use crate::common::get_bin; +use std::process::Command; + use axoupdater::{ test::helpers::{perform_runtest, RuntestArgs}, ReleaseSourceType, }; -use std::process::Command; + +use crate::common::get_bin; mod common; diff --git a/crates/uv/tests/show_settings.rs b/crates/uv/tests/show_settings.rs index 15e889d80331..323b38c83af1 100644 --- a/crates/uv/tests/show_settings.rs +++ b/crates/uv/tests/show_settings.rs @@ -11,7 +11,7 @@ mod common; /// Create a `pip compile` command, overwriting defaults for any settings that vary based on machine /// and operating system. fn command(context: &TestContext) -> Command { - let mut command = context.compile(); + let mut command = context.pip_compile(); command .env("UV_LINK_MODE", "clone") .env("UV_CONCURRENT_DOWNLOADS", "50") diff --git a/crates/uv/tests/venv.rs b/crates/uv/tests/venv.rs index 4adacf612a2c..4bfa6119c866 100644 --- a/crates/uv/tests/venv.rs +++ b/crates/uv/tests/venv.rs @@ -637,8 +637,10 @@ fn path_with_trailing_space_gives_proper_error() { let context = TestContext::new_with_versions(&["3.12"]); // Set a custom cache directory with a trailing space - uv_snapshot!(context.filters(), context.venv() - .env("UV_CACHE_DIR", format!("{} ", context.cache_dir.path().display())), @r###" + let path_with_trailing_slash = format!("{} ", context.cache_dir.path().display()); + uv_snapshot!(context.filters(), std::process::Command::new(crate::common::get_bin()) + .arg("venv") + .env("UV_CACHE_DIR", path_with_trailing_slash), @r###" success: false exit_code: 2 ----- stdout ----- diff --git a/crates/uv/tests/workspace.rs b/crates/uv/tests/workspace.rs index b57d18ce07a4..f1f25a0ad6d4 100644 --- a/crates/uv/tests/workspace.rs +++ b/crates/uv/tests/workspace.rs @@ -1,84 +1,18 @@ use std::env; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use std::process::Command; use anyhow::Result; use assert_cmd::assert::OutputAssertExt; -use crate::common::{copy_dir_ignore, get_bin, uv_snapshot, TestContext, EXCLUDE_NEWER}; +use crate::common::{copy_dir_ignore, uv_snapshot, TestContext}; mod common; -/// A `pip install` command for workspaces. -/// -/// The goal of the workspace tests is to resolve local workspace packages correctly. We add some -/// non-workspace dependencies to ensure that transitive non-workspace dependencies are also -/// correctly resolved. -fn install_workspace(context: &TestContext) -> Command { - let mut command = Command::new(get_bin()); - command - .arg("pip") - .arg("install") - .arg("--preview") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .arg("-e") - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (4 * 1024 * 1024).to_string()); - } - - command -} - -/// A `uv run` command. -fn run_workspace(context: &TestContext) -> Command { - let mut command = Command::new(get_bin()); - command - .arg("run") - .arg("--preview") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .arg("--python") - .arg(context.interpreter()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("UV_NO_WRAP", "1"); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (4 * 1024 * 1024).to_string()); - } - command -} - -/// A `uv lock` command. -fn lock_workspace(context: &TestContext) -> Command { - let mut command = Command::new(get_bin()); - command - .arg("lock") - .arg("--preview") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .arg("--python") - .arg(context.interpreter()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("UV_NO_WRAP", "1"); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (4 * 1024 * 1024).to_string()); - } +/// `pip install --preview -e ` +fn install_workspace(context: &TestContext, current_dir: &Path) -> Command { + let mut command = context.pip_install(); + command.arg("--preview").arg("-e").arg(current_dir); command } @@ -101,7 +35,7 @@ fn test_albatross_in_examples_bird_feeder() { .join("examples") .join("bird-feeder"); - uv_snapshot!(context.filters(), install_workspace(&context).arg(¤t_dir), @r###" + uv_snapshot!(context.filters(), install_workspace(&context, ¤t_dir), @r###" success: true exit_code: 0 ----- stdout ----- @@ -118,7 +52,7 @@ fn test_albatross_in_examples_bird_feeder() { ); context.assert_file(current_dir.join("check_installed_bird_feeder.py")); - uv_snapshot!(context.filters(), install_workspace(&context).arg(¤t_dir), @r###" + uv_snapshot!(context.filters(), install_workspace(&context, ¤t_dir), @r###" success: true exit_code: 0 ----- stdout ----- @@ -136,7 +70,7 @@ fn test_albatross_in_examples() { let context = TestContext::new("3.12"); let current_dir = workspaces_dir().join("albatross-in-example"); - uv_snapshot!(context.filters(), install_workspace(&context).arg(¤t_dir), @r###" + uv_snapshot!(context.filters(), install_workspace(&context, ¤t_dir), @r###" success: true exit_code: 0 ----- stdout ----- @@ -151,7 +85,7 @@ fn test_albatross_in_examples() { ); context.assert_file(current_dir.join("check_installed_albatross.py")); - uv_snapshot!(context.filters(), install_workspace(&context).arg(¤t_dir), @r###" + uv_snapshot!(context.filters(), install_workspace(&context, ¤t_dir), @r###" success: true exit_code: 0 ----- stdout ----- @@ -169,7 +103,7 @@ fn test_albatross_just_project() { let context = TestContext::new("3.12"); let current_dir = workspaces_dir().join("albatross-just-project"); - uv_snapshot!(context.filters(), install_workspace(&context).arg(¤t_dir), @r###" + uv_snapshot!(context.filters(), install_workspace(&context, ¤t_dir), @r###" success: true exit_code: 0 ----- stdout ----- @@ -184,7 +118,7 @@ fn test_albatross_just_project() { ); context.assert_file(current_dir.join("check_installed_albatross.py")); - uv_snapshot!(context.filters(), install_workspace(&context).arg(¤t_dir), @r###" + uv_snapshot!(context.filters(), install_workspace(&context, ¤t_dir), @r###" success: true exit_code: 0 ----- stdout ----- @@ -205,7 +139,7 @@ fn test_albatross_project_in_excluded() { .join("excluded") .join("bird-feeder"); - uv_snapshot!(context.filters(), install_workspace(&context).arg(¤t_dir), @r###" + uv_snapshot!(context.filters(), install_workspace(&context, ¤t_dir), @r###" success: true exit_code: 0 ----- stdout ----- @@ -222,7 +156,7 @@ fn test_albatross_project_in_excluded() { ); context.assert_file(current_dir.join("check_installed_bird_feeder.py")); - uv_snapshot!(context.filters(), install_workspace(&context).arg(¤t_dir), @r###" + uv_snapshot!(context.filters(), install_workspace(&context, ¤t_dir), @r###" success: true exit_code: 0 ----- stdout ----- @@ -240,7 +174,7 @@ fn test_albatross_root_workspace() { let context = TestContext::new("3.12"); let current_dir = workspaces_dir().join("albatross-root-workspace"); - uv_snapshot!(context.filters(), install_workspace(&context).arg(¤t_dir), @r###" + uv_snapshot!(context.filters(), install_workspace(&context, ¤t_dir), @r###" success: true exit_code: 0 ----- stdout ----- @@ -260,7 +194,7 @@ fn test_albatross_root_workspace() { ); context.assert_file(current_dir.join("check_installed_albatross.py")); - uv_snapshot!(context.filters(), install_workspace(&context).arg(¤t_dir), @r###" + uv_snapshot!(context.filters(), install_workspace(&context, ¤t_dir), @r###" success: true exit_code: 0 ----- stdout ----- @@ -281,7 +215,7 @@ fn test_albatross_root_workspace_bird_feeder() { .join("packages") .join("bird-feeder"); - uv_snapshot!(context.filters(), install_workspace(&context).arg(¤t_dir), @r###" + uv_snapshot!(context.filters(), install_workspace(&context, ¤t_dir), @r###" success: true exit_code: 0 ----- stdout ----- @@ -299,7 +233,7 @@ fn test_albatross_root_workspace_bird_feeder() { ); context.assert_file(current_dir.join("check_installed_bird_feeder.py")); - uv_snapshot!(context.filters(), install_workspace(&context).arg(¤t_dir), @r###" + uv_snapshot!(context.filters(), install_workspace(&context, ¤t_dir), @r###" success: true exit_code: 0 ----- stdout ----- @@ -320,7 +254,7 @@ fn test_albatross_root_workspace_albatross() { .join("packages") .join("bird-feeder"); - uv_snapshot!(context.filters(), install_workspace(&context).arg(¤t_dir), @r###" + uv_snapshot!(context.filters(), install_workspace(&context, ¤t_dir), @r###" success: true exit_code: 0 ----- stdout ----- @@ -338,7 +272,7 @@ fn test_albatross_root_workspace_albatross() { ); context.assert_file(current_dir.join("check_installed_albatross.py")); - uv_snapshot!(context.filters(), install_workspace(&context).arg(¤t_dir), @r###" + uv_snapshot!(context.filters(), install_workspace(&context, ¤t_dir), @r###" success: true exit_code: 0 ----- stdout ----- @@ -359,7 +293,7 @@ fn test_albatross_virtual_workspace() { .join("packages") .join("bird-feeder"); - uv_snapshot!(context.filters(), install_workspace(&context).arg(¤t_dir), @r###" + uv_snapshot!(context.filters(), install_workspace(&context, ¤t_dir), @r###" success: true exit_code: 0 ----- stdout ----- @@ -377,7 +311,7 @@ fn test_albatross_virtual_workspace() { ); context.assert_file(current_dir.join("check_installed_bird_feeder.py")); - uv_snapshot!(context.filters(), install_workspace(&context).arg(¤t_dir), @r###" + uv_snapshot!(context.filters(), install_workspace(&context, ¤t_dir), @r###" success: true exit_code: 0 ----- stdout ----- @@ -407,7 +341,9 @@ fn test_uv_run_with_package_virtual_workspace() -> Result<()> { "Using Python 3.12.[X] interpreter at: [PYTHON]", )); - uv_snapshot!(filters, run_workspace(&context) + uv_snapshot!(filters, context + .run() + .arg("--preview") .arg("--package") .arg("bird-feeder") .arg("packages/bird-feeder/check_installed_bird_feeder.py") @@ -431,7 +367,9 @@ fn test_uv_run_with_package_virtual_workspace() -> Result<()> { "### ); - uv_snapshot!(context.filters(), universal_windows_filters=true, run_workspace(&context) + uv_snapshot!(context.filters(), universal_windows_filters=true, context + .run() + .arg("--preview") .arg("--package") .arg("albatross") .arg("packages/albatross/check_installed_albatross.py") @@ -467,7 +405,9 @@ fn test_uv_run_with_package_root_workspace() -> Result<()> { "Using Python 3.12.[X] interpreter at: [PYTHON]", )); - uv_snapshot!(filters, run_workspace(&context) + uv_snapshot!(filters, context + .run() + .arg("--preview") .arg("--package") .arg("bird-feeder") .arg("packages/bird-feeder/check_installed_bird_feeder.py") @@ -491,7 +431,9 @@ fn test_uv_run_with_package_root_workspace() -> Result<()> { "### ); - uv_snapshot!(context.filters(), universal_windows_filters=true, run_workspace(&context) + uv_snapshot!(context.filters(), universal_windows_filters=true, context + .run() + .arg("--preview") .arg("--package") .arg("albatross") .arg("check_installed_albatross.py") @@ -523,7 +465,9 @@ fn workspace_lock_idempotence(workspace: &str, subdirectories: &[&str]) -> Resul copy_dir_ignore(workspaces_dir().join(workspace), &work_dir)?; - lock_workspace(&context) + context + .lock() + .arg("--preview") .current_dir(&work_dir.join(dir)) .assert() .success(); diff --git a/scripts/scenarios/templates/compile.mustache b/scripts/scenarios/templates/compile.mustache index 7084da6bef67..2842d4523c40 100644 --- a/scripts/scenarios/templates/compile.mustache +++ b/scripts/scenarios/templates/compile.mustache @@ -29,19 +29,9 @@ fn command(context: &TestContext, python_versions: &[&str]) -> Command { .arg("--index-url") .arg("{{index_url}}") .arg("--find-links") - .arg("{{vendor_links}}") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .env("UV_TEST_PYTHON_PATH", python_path) - .current_dir(&context.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (8 * 1024 * 1024).to_string()); - } + .arg("{{vendor_links}}"); + context.add_shared_args(&mut command); + command.env("UV_TEST_PYTHON_PATH", python_path); command } diff --git a/scripts/scenarios/templates/install.mustache b/scripts/scenarios/templates/install.mustache index 51e309ea064e..9b075f37dbe7 100644 --- a/scripts/scenarios/templates/install.mustache +++ b/scripts/scenarios/templates/install.mustache @@ -49,19 +49,8 @@ fn command(context: &TestContext) -> Command { .arg("--index-url") .arg("{{index_url}}") .arg("--find-links") - .arg("{{vendor_links}}") - .arg("--cache-dir") - .arg(context.cache_dir.path()) - .env("VIRTUAL_ENV", context.venv.as_os_str()) - .env("UV_NO_WRAP", "1") - .current_dir(&context.temp_dir); - - if cfg!(all(windows, debug_assertions)) { - // TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the - // default windows stack of 1MB - command.env("UV_STACK_SIZE", (8 * 1024 * 1024).to_string()); - } - + .arg("{{vendor_links}}"); + context.add_shared_args(&mut command); command }