diff -Nru cargo-0.52.0/Cargo.toml cargo-0.54.0/Cargo.toml --- cargo-0.52.0/Cargo.toml 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/Cargo.toml 2021-04-27 14:35:53.000000000 +0000 @@ -1,6 +1,6 @@ [package] name = "cargo" -version = "0.52.0" +version = "0.54.0" edition = "2018" authors = ["Yehuda Katz ", "Carl Lerche ", @@ -22,9 +22,9 @@ atty = "0.2" bytesize = "1.0" cargo-platform = { path = "crates/cargo-platform", version = "0.1.1" } +cargo-util = { path = "crates/cargo-util", version = "0.1.0" } crates-io = { path = "crates/crates-io", version = "0.33.0" } crossbeam-utils = "0.8" -crypto-hash = "0.3.1" curl = { version = "0.4.23", features = ["http2"] } curl-sys = "0.4.22" env_logger = "0.8.1" @@ -50,9 +50,8 @@ opener = "0.4" percent-encoding = "2.0" rustfix = "0.5.0" -same-file = "1" semver = { version = "0.10", features = ["serde"] } -serde = { version = "1.0.82", features = ["derive"] } +serde = { version = "1.0.123", features = ["derive"] } serde_ignored = "0.1.0" serde_json = { version = "1.0.30", features = ["raw_value"] } shell-escape = "0.1.4" @@ -73,12 +72,9 @@ # See the `src/tools/rustc-workspace-hack/README.md` file in `rust-lang/rust` # for more information. rustc-workspace-hack = "1.0.0" - -[target.'cfg(target_os = "macos")'.dependencies] -core-foundation = { version = "0.9.0", features = ["mac_os_10_7_support"] } +rand = "0.8.3" [target.'cfg(windows)'.dependencies] -miow = "0.3.6" fwdansi = "1.1.0" [target.'cfg(windows)'.dependencies.winapi] diff -Nru cargo-0.52.0/CHANGELOG.md cargo-0.54.0/CHANGELOG.md --- cargo-0.52.0/CHANGELOG.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/CHANGELOG.md 2021-04-27 14:35:53.000000000 +0000 @@ -1,15 +1,203 @@ # Changelog +## Cargo 1.53 (2021-06-17) +[90691f2b...HEAD](https://github.com/rust-lang/cargo/compare/90691f2b...HEAD) + +### Added + +### Changed + +- 🔥 Cargo now supports git repositories where the default `HEAD` branch is not + "master". This also includes a switch to the version 3 `Cargo.lock` format + which can handle default branches correctly. + [#9133](https://github.com/rust-lang/cargo/pull/9133) +- 🔥 macOS targets now default to `unpacked` debuginfo. + [#9298](https://github.com/rust-lang/cargo/pull/9298) +- ❗ The `authors` field is no longer included in `Cargo.toml` for new + projects. + [#9282](https://github.com/rust-lang/cargo/pull/9282) +- `cargo update` may now work with the `--offline` flag. + [#9279](https://github.com/rust-lang/cargo/pull/9279) +- `cargo doc` will now erase the `doc` directory when switching between + different toolchain versions. There are shared, unversioned files (such as + the search index) that can become broken when using different versions. + [#8640](https://github.com/rust-lang/cargo/pull/8640) + +### Fixed + +### Nightly only +- Fixed config includes not working. + [#9299](https://github.com/rust-lang/cargo/pull/9299) +- Emit note when `--future-incompat-report` had nothing to report. + [#9263](https://github.com/rust-lang/cargo/pull/9263) +- Error messages for nightly features flags (like `-Z` and `cargo-features`) + should now provide more information. + [#9290](https://github.com/rust-lang/cargo/pull/9290) + +## Cargo 1.52 (2021-05-06) +[34170fcd...rust-1.52.0](https://github.com/rust-lang/cargo/compare/34170fcd...rust-1.52.0) + +### Added +- Added the `"manifest_path"` field to JSON messages for a package. + [#9022](https://github.com/rust-lang/cargo/pull/9022) + [#9247](https://github.com/rust-lang/cargo/pull/9247) + +### Changed +- Build scripts are now forbidden from setting `RUSTC_BOOTSTRAP` on stable. + [#9181](https://github.com/rust-lang/cargo/pull/9181) + [#9385](https://github.com/rust-lang/cargo/pull/9385) +- crates.io now supports SPDX 3.11 licenses. + [#9209](https://github.com/rust-lang/cargo/pull/9209) +- An error is now reported if `CARGO_TARGET_DIR` is an empty string. + [#8939](https://github.com/rust-lang/cargo/pull/8939) +- Doc tests now pass the `--message-format` flag into the test so that the + "short" format can now be used for doc tests. + [#9128](https://github.com/rust-lang/cargo/pull/9128) +- `cargo test` now prints a clearer indicator of which target is currently running. + [#9195](https://github.com/rust-lang/cargo/pull/9195) +- The `CARGO_TARGET_` environment variable will now issue a warning if + it is using lowercase letters. + [#9169](https://github.com/rust-lang/cargo/pull/9169) + +### Fixed +- Fixed publication of packages with metadata and resolver fields in `Cargo.toml`. + [#9300](https://github.com/rust-lang/cargo/pull/9300) + [#9304](https://github.com/rust-lang/cargo/pull/9304) +- Fixed logic for determining prefer-dynamic for a dylib which differed in a + workspace vs a single package. + [#9252](https://github.com/rust-lang/cargo/pull/9252) +- Fixed an issue where exclusive target-specific dependencies that overlapped + across dependency kinds (like regular and build-dependencies) would + incorrectly include the dependencies in both. + [#9255](https://github.com/rust-lang/cargo/pull/9255) +- Fixed panic with certain styles of Package IDs when passed to the `-p` flag. + [#9188](https://github.com/rust-lang/cargo/pull/9188) +- When running cargo with output not going to a TTY, and with the progress bar + and color force-enabled, the output will now correctly clear the progress + line. + [#9231](https://github.com/rust-lang/cargo/pull/9231) +- Error instead of panic when JSON may contain non-utf8 paths. + [#9226](https://github.com/rust-lang/cargo/pull/9226) +- Fixed a hang that can happen on broken stderr. + [#9201](https://github.com/rust-lang/cargo/pull/9201) +- Fixed thin-local LTO not being disabled correctly when `lto=off` is set. + [#9182](https://github.com/rust-lang/cargo/pull/9182) + +### Nightly only +- The `strip` profile option now supports `true` and `false` values. + [#9153](https://github.com/rust-lang/cargo/pull/9153) +- `cargo fix --edition` now displays a report when switching to 2021 if the + new resolver changes features. + [#9268](https://github.com/rust-lang/cargo/pull/9268) +- Added `[patch]` table support in `.cargo/config` files. + [#9204](https://github.com/rust-lang/cargo/pull/9204) +- Added `cargo describe-future-incompatibilities` for generating a report on + dependencies that contain future-incompatible warnings. + [#8825](https://github.com/rust-lang/cargo/pull/8825) +- Added easier support for testing the 2021 edition. + [#9184](https://github.com/rust-lang/cargo/pull/9184) +- Switch the default resolver to "2" in the 2021 edition. + [#9184](https://github.com/rust-lang/cargo/pull/9184) +- `cargo fix --edition` now supports 2021. + [#9184](https://github.com/rust-lang/cargo/pull/9184) +- Added `--print` flag to `cargo rustc` to pass along to `rustc` to display + information from rustc. + [#9002](https://github.com/rust-lang/cargo/pull/9002) +- Added `-Zdoctest-in-workspace` for changing the directory where doctests are + *run* versus where they are *compiled*. + [#9105](https://github.com/rust-lang/cargo/pull/9105) +- Added support for an `[env]` section in `.cargo/config.toml` to set + environment variables when running cargo. + [#9175](https://github.com/rust-lang/cargo/pull/9175) +- Added a schema field and `features2` field to the index. + [#9161](https://github.com/rust-lang/cargo/pull/9161) +- Changes to JSON spec targets will now trigger a rebuild. + [#9223](https://github.com/rust-lang/cargo/pull/9223) + ## Cargo 1.51 (2021-03-25) -[75d5d8cf...HEAD](https://github.com/rust-lang/cargo/compare/75d5d8cf...HEAD) +[75d5d8cf...rust-1.51.0](https://github.com/rust-lang/cargo/compare/75d5d8cf...rust-1.51.0) ### Added +- 🔥 Added the `split-debuginfo` profile option. + [docs](https://doc.rust-lang.org/nightly/cargo/reference/profiles.html#split-debuginfo) + [#9112](https://github.com/rust-lang/cargo/pull/9112) +- Added the `path` field to `cargo metadata` for the package dependencies list + to show the path for "path" dependencies. + [#8994](https://github.com/rust-lang/cargo/pull/8994) +- 🔥 Added a new feature resolver, and new CLI feature flag behavior. See the + new [features](https://doc.rust-lang.org/nightly/cargo/reference/features.html#feature-resolver-version-2) + and [resolver](https://doc.rust-lang.org/nightly/cargo/reference/resolver.html#feature-resolver-version-2) + documentation for the `resolver = "2"` option. See the + [CLI](https://doc.rust-lang.org/nightly/cargo/reference/features.html#command-line-feature-options) + and [resolver 2 CLI](https://doc.rust-lang.org/nightly/cargo/reference/features.html#resolver-version-2-command-line-flags) + options for the new CLI behavior. And, finally, see + [RFC 2957](https://github.com/rust-lang/rfcs/blob/master/text/2957-cargo-features2.md) + for a detailed look at what has changed. + [#8997](https://github.com/rust-lang/cargo/pull/8997) ### Changed +- `cargo install --locked` now emits a warning if `Cargo.lock` is not found. + [#9108](https://github.com/rust-lang/cargo/pull/9108) +- Unknown or ambiguous package IDs passed on the command-line now display + suggestions for the correct package ID. + [#9095](https://github.com/rust-lang/cargo/pull/9095) +- Slightly optimize `cargo vendor` + [#8937](https://github.com/rust-lang/cargo/pull/8937) + [#9131](https://github.com/rust-lang/cargo/pull/9131) + [#9132](https://github.com/rust-lang/cargo/pull/9132) ### Fixed +- Fixed environment variables and cfg settings emitted by a build script that + are set for `cargo test` and `cargo run` when the build script runs multiple + times during the same build session. + [#9122](https://github.com/rust-lang/cargo/pull/9122) +- Fixed a panic with `cargo doc` and the new feature resolver. This also + introduces some heuristics to try to avoid path collisions with `rustdoc` by + only documenting one variant of a package if there are multiple (such as + multiple versions, or the same package shared for host and target + platforms). + [#9077](https://github.com/rust-lang/cargo/pull/9077) +- Fixed a bug in Cargo's cyclic dep graph detection that caused a stack + overflow. + [#9075](https://github.com/rust-lang/cargo/pull/9075) +- Fixed build script `links` environment variables (`DEP_*`) not showing up + for testing packages in some cases. + [#9065](https://github.com/rust-lang/cargo/pull/9065) +- Fixed features being selected in a nondeterministic way for a specific + scenario when building an entire workspace with all targets with a + proc-macro in the workspace with `resolver="2"`. + [#9059](https://github.com/rust-lang/cargo/pull/9059) +- Fixed to use `http.proxy` setting in `~/.gitconfig`. + [#8986](https://github.com/rust-lang/cargo/pull/8986) +- Fixed --feature pkg/feat for V1 resolver for non-member. + [#9275](https://github.com/rust-lang/cargo/pull/9275) + [#9277](https://github.com/rust-lang/cargo/pull/9277) +- Fixed panic in `cargo doc` when there are colliding output filenames in a workspace. + [#9276](https://github.com/rust-lang/cargo/pull/9276) + [#9277](https://github.com/rust-lang/cargo/pull/9277) +- Fixed `cargo install` from exiting with success if one of several packages + did not install successfully. + [#9185](https://github.com/rust-lang/cargo/pull/9185) + [#9196](https://github.com/rust-lang/cargo/pull/9196) +- Fix panic with doc collision orphan. + [#9142](https://github.com/rust-lang/cargo/pull/9142) + [#9196](https://github.com/rust-lang/cargo/pull/9196) ### Nightly only +- Removed the `publish-lockfile` unstable feature, it was stabilized without + the need for an explicit flag 1.5 years ago. + [#9092](https://github.com/rust-lang/cargo/pull/9092) +- Added better diagnostics, help messages, and documentation for nightly + features (such as those passed with the `-Z` flag, or specified with + `cargo-features` in `Cargo.toml`). + [#9092](https://github.com/rust-lang/cargo/pull/9092) +- Added support for Rust edition 2021. + [#8922](https://github.com/rust-lang/cargo/pull/8922) +- Added support for the `rust-version` field in project metadata. + [#8037](https://github.com/rust-lang/cargo/pull/8037) +- Added a schema field to the index. + [#9161](https://github.com/rust-lang/cargo/pull/9161) + [#9196](https://github.com/rust-lang/cargo/pull/9196) ## Cargo 1.50 (2021-02-11) [8662ab42...rust-1.50.0](https://github.com/rust-lang/cargo/compare/8662ab42...rust-1.50.0) @@ -39,13 +227,9 @@ - The `rerun-if-changed` build script directive can now point to a directory, in which case Cargo will check if any file in that directory changes. [#8973](https://github.com/rust-lang/cargo/pull/8973) -- Slightly optimize `cargo vendor` - [#8937](https://github.com/rust-lang/cargo/pull/8937) - If Cargo cannot determine the username or email address, `cargo new` will no longer fail, and instead create an empty authors list. [#8912](https://github.com/rust-lang/cargo/pull/8912) -- Add period to allowed feature name characters. - [#8932](https://github.com/rust-lang/cargo/pull/8932) - The progress bar width has been reduced to provide more room to display the crates currently being built. [#8892](https://github.com/rust-lang/cargo/pull/8892) @@ -112,6 +296,9 @@ [#8814](https://github.com/rust-lang/cargo/pull/8814) - `-p` without a value will now print a list of workspace package names. [#8808](https://github.com/rust-lang/cargo/pull/8808) +- Add period to allowed feature name characters. + [#8932](https://github.com/rust-lang/cargo/pull/8932) + [#8943](https://github.com/rust-lang/cargo/pull/8943) ### Fixed - Fixed building a library with both "dylib" and "rlib" crate types with LTO enabled. diff -Nru cargo-0.52.0/crates/cargo-test-support/build.rs cargo-0.54.0/crates/cargo-test-support/build.rs --- cargo-0.52.0/crates/cargo-test-support/build.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-test-support/build.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,6 @@ +fn main() { + println!( + "cargo:rustc-env=NATIVE_ARCH={}", + std::env::var("TARGET").unwrap() + ); +} diff -Nru cargo-0.52.0/crates/cargo-test-support/Cargo.toml cargo-0.54.0/crates/cargo-test-support/Cargo.toml --- cargo-0.52.0/crates/cargo-test-support/Cargo.toml 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/crates/cargo-test-support/Cargo.toml 2021-04-27 14:35:53.000000000 +0000 @@ -9,8 +9,9 @@ doctest = false [dependencies] -cargo = { path = "../.." } +anyhow = "1.0.34" cargo-test-macro = { path = "../cargo-test-macro" } +cargo-util = { path = "../cargo-util" } filetime = "0.2" flate2 = { version = "1.0", default-features = false, features = ["zlib"] } git2 = "0.13.16" diff -Nru cargo-0.52.0/crates/cargo-test-support/src/cross_compile.rs cargo-0.54.0/crates/cargo-test-support/src/cross_compile.rs --- cargo-0.52.0/crates/cargo-test-support/src/cross_compile.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/crates/cargo-test-support/src/cross_compile.rs 2021-04-27 14:35:53.000000000 +0000 @@ -10,8 +10,7 @@ //! These tests are all disabled on rust-lang/rust's CI, but run in Cargo's CI. use crate::{basic_manifest, main_file, project}; -use cargo::util::ProcessError; -use cargo::CargoResult; +use cargo_util::ProcessError; use std::env; use std::fmt::Write; use std::process::{Command, Output}; @@ -41,7 +40,7 @@ let cross_target = alternate(); - let run_cross_test = || -> CargoResult { + let run_cross_test = || -> anyhow::Result { let p = project() .at("cross_test") .file("Cargo.toml", &basic_manifest("cross_test", "1.0.0")) @@ -180,6 +179,23 @@ panic!("{}", message); } +/// The arch triple of the test-running host. +pub fn native() -> &'static str { + env!("NATIVE_ARCH") +} + +pub fn native_arch() -> &'static str { + match native() + .split("-") + .next() + .expect("Target triple has unexpected format") + { + "x86_64" => "x86_64", + "i686" => "x86", + _ => panic!("This test should be gated on cross_compile::disabled."), + } +} + /// The alternate target-triple to build with. /// /// Only use this function on tests that check `cross_compile::disabled`. @@ -205,6 +221,15 @@ } } +/// A target-triple that is neither the host nor the target. +/// +/// Rustc may not work with it and it's alright, apart from being a +/// valid target triple it is supposed to be used only as a +/// placeholder for targets that should not be considered. +pub fn unused() -> &'static str { + "wasm32-unknown-unknown" +} + /// Whether or not the host can run cross-compiled executables. pub fn can_run_on_host() -> bool { if disabled() { diff -Nru cargo-0.52.0/crates/cargo-test-support/src/install.rs cargo-0.54.0/crates/cargo-test-support/src/install.rs --- cargo-0.52.0/crates/cargo-test-support/src/install.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/crates/cargo-test-support/src/install.rs 2021-04-27 14:35:53.000000000 +0000 @@ -6,10 +6,12 @@ /// has been installed. Example usage: /// /// assert_has_installed_exe(cargo_home(), "foo"); +#[track_caller] pub fn assert_has_installed_exe>(path: P, name: &'static str) { assert!(check_has_installed_exe(path, name)); } +#[track_caller] pub fn assert_has_not_installed_exe>(path: P, name: &'static str) { assert!(!check_has_installed_exe(path, name)); } diff -Nru cargo-0.52.0/crates/cargo-test-support/src/lib.rs cargo-0.54.0/crates/cargo-test-support/src/lib.rs --- cargo-0.52.0/crates/cargo-test-support/src/lib.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/crates/cargo-test-support/src/lib.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2,8 +2,9 @@ //! //! See https://rust-lang.github.io/cargo/contrib/ for a guide on writing tests. -#![allow(clippy::needless_doctest_main)] // according to @ehuss this lint is fussy -#![allow(clippy::inefficient_to_string)] // this causes suggestions that result in `(*s).to_string()` +#![allow(clippy::all)] +#![warn(clippy::needless_borrow)] +#![warn(clippy::redundant_clone)] use std::env; use std::ffi::OsStr; @@ -15,7 +16,7 @@ use std::str; use std::time::{self, Duration}; -use cargo::util::{is_ci, CargoResult, ProcessBuilder, ProcessError, Rustc}; +use cargo_util::{is_ci, ProcessBuilder, ProcessError}; use serde_json::{self, Value}; use url::Url; @@ -701,7 +702,7 @@ self } - pub fn exec_with_output(&mut self) -> CargoResult { + pub fn exec_with_output(&mut self) -> anyhow::Result { self.ran = true; // TODO avoid unwrap let p = (&self.process_builder).clone().unwrap(); @@ -732,6 +733,7 @@ self } + #[track_caller] pub fn run(&mut self) { self.ran = true; let p = (&self.process_builder).clone().unwrap(); @@ -740,6 +742,7 @@ } } + #[track_caller] pub fn run_output(&mut self, output: &Output) { self.ran = true; if let Err(e) = self.match_output(output) { @@ -829,8 +832,8 @@ Some(_) => Err(format!( "exited with {:?}\n--- stdout\n{}\n--- stderr\n{}", code, - String::from_utf8_lossy(&stdout), - String::from_utf8_lossy(&stderr) + String::from_utf8_lossy(stdout), + String::from_utf8_lossy(stderr) )), } } @@ -1142,8 +1145,6 @@ } fn match_json(&self, expected: &str, line: &str) -> MatchResult { - let expected = self.normalize_matcher(expected); - let line = self.normalize_matcher(line); let actual = match line.parse() { Err(e) => return Err(format!("invalid json, {}:\n`{}`", e, line)), Ok(actual) => actual, @@ -1153,7 +1154,8 @@ Ok(expected) => expected, }; - find_json_mismatch(&expected, &actual) + let cwd = self.process_builder.as_ref().and_then(|p| p.get_cwd()); + find_json_mismatch(&expected, &actual, cwd) } fn diff_lines<'a>( @@ -1331,8 +1333,12 @@ /// as paths). You can use a `"{...}"` string literal as a wildcard for /// arbitrary nested JSON (useful for parts of object emitted by other programs /// (e.g., rustc) rather than Cargo itself). -pub fn find_json_mismatch(expected: &Value, actual: &Value) -> Result<(), String> { - match find_json_mismatch_r(expected, actual) { +pub fn find_json_mismatch( + expected: &Value, + actual: &Value, + cwd: Option<&Path>, +) -> Result<(), String> { + match find_json_mismatch_r(expected, actual, cwd) { Some((expected_part, actual_part)) => Err(format!( "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n", serde_json::to_string_pretty(expected).unwrap(), @@ -1347,12 +1353,21 @@ fn find_json_mismatch_r<'a>( expected: &'a Value, actual: &'a Value, + cwd: Option<&Path>, ) -> Option<(&'a Value, &'a Value)> { use serde_json::Value::*; match (expected, actual) { (&Number(ref l), &Number(ref r)) if l == r => None, (&Bool(l), &Bool(r)) if l == r => None, - (&String(ref l), &String(ref r)) if lines_match(l, r) => None, + (&String(ref l), _) if l == "{...}" => None, + (&String(ref l), &String(ref r)) => { + let normalized = normalize_matcher(r, cwd); + if lines_match(l, &normalized) { + None + } else { + Some((expected, actual)) + } + } (&Array(ref l), &Array(ref r)) => { if l.len() != r.len() { return Some((expected, actual)); @@ -1360,7 +1375,7 @@ l.iter() .zip(r.iter()) - .filter_map(|(l, r)| find_json_mismatch_r(l, r)) + .filter_map(|(l, r)| find_json_mismatch_r(l, r, cwd)) .next() } (&Object(ref l), &Object(ref r)) => { @@ -1371,12 +1386,11 @@ l.values() .zip(r.values()) - .filter_map(|(l, r)| find_json_mismatch_r(l, r)) + .filter_map(|(l, r)| find_json_mismatch_r(l, r, cwd)) .next() } (&Null, &Null) => None, // Magic string literal `"{...}"` acts as wildcard for any sub-JSON. - (&String(ref l), _) if l == "{...}" => None, _ => Some((expected, actual)), } } @@ -1524,6 +1538,7 @@ ("[REPLACING]", " Replacing"), ("[UNPACKING]", " Unpacking"), ("[SUMMARY]", " Summary"), + ("[FIXED]", " Fixed"), ("[FIXING]", " Fixing"), ("[EXE]", env::consts::EXE_SUFFIX), ("[IGNORED]", " Ignored"), @@ -1534,6 +1549,7 @@ ("[LOGOUT]", " Logout"), ("[YANK]", " Yank"), ("[OWNER]", " Owner"), + ("[MIGRATING]", " Migrating"), ]; let mut result = input.to_owned(); for &(pat, subst) in ¯os { @@ -1544,33 +1560,52 @@ pub mod install; -thread_local!( -pub static RUSTC: Rustc = Rustc::new( - PathBuf::from("rustc"), - None, - None, - Path::new("should be path to rustup rustc, but we don't care in tests"), - None, -).unwrap() -); +struct RustcInfo { + verbose_version: String, + host: String, +} + +impl RustcInfo { + fn new() -> RustcInfo { + let output = ProcessBuilder::new("rustc") + .arg("-vV") + .exec_with_output() + .expect("rustc should exec"); + let verbose_version = String::from_utf8(output.stdout).expect("utf8 output"); + let host = verbose_version + .lines() + .filter_map(|line| line.strip_prefix("host: ")) + .next() + .expect("verbose version has host: field") + .to_string(); + RustcInfo { + verbose_version, + host, + } + } +} + +lazy_static::lazy_static! { + static ref RUSTC_INFO: RustcInfo = RustcInfo::new(); +} /// The rustc host such as `x86_64-unknown-linux-gnu`. -pub fn rustc_host() -> String { - RUSTC.with(|r| r.host.to_string()) +pub fn rustc_host() -> &'static str { + &RUSTC_INFO.host } pub fn is_nightly() -> bool { + let vv = &RUSTC_INFO.verbose_version; env::var("CARGO_TEST_DISABLE_NIGHTLY").is_err() - && RUSTC - .with(|r| r.verbose_version.contains("-nightly") || r.verbose_version.contains("-dev")) + && (vv.contains("-nightly") || vv.contains("-dev")) } -pub fn process>(t: T) -> cargo::util::ProcessBuilder { +pub fn process>(t: T) -> ProcessBuilder { _process(t.as_ref()) } -fn _process(t: &OsStr) -> cargo::util::ProcessBuilder { - let mut p = cargo::util::process(t); +fn _process(t: &OsStr) -> ProcessBuilder { + let mut p = ProcessBuilder::new(t); // In general just clear out all cargo-specific configuration already in the // environment. Our tests all assume a "default configuration" unless @@ -1580,6 +1615,16 @@ p.env_remove(&k); } } + if env::var_os("RUSTUP_TOOLCHAIN").is_some() { + // Override the PATH to avoid executing the rustup wrapper thousands + // of times. This makes the testsuite run substantially faster. + let path = env::var_os("PATH").unwrap_or_default(); + let paths = env::split_paths(&path); + let mut outer_cargo = PathBuf::from(env::var_os("CARGO").unwrap()); + outer_cargo.pop(); + let new_path = env::join_paths(std::iter::once(outer_cargo).chain(paths)).unwrap(); + p.env("PATH", new_path); + } p.cwd(&paths::root()) .env("HOME", paths::home()) @@ -1621,7 +1666,7 @@ fn masquerade_as_nightly_cargo(&mut self) -> &mut Self; } -impl ChannelChanger for cargo::util::ProcessBuilder { +impl ChannelChanger for ProcessBuilder { fn masquerade_as_nightly_cargo(&mut self) -> &mut Self { self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly") } diff -Nru cargo-0.52.0/crates/cargo-test-support/src/publish.rs cargo-0.54.0/crates/cargo-test-support/src/publish.rs --- cargo-0.52.0/crates/cargo-test-support/src/publish.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/crates/cargo-test-support/src/publish.rs 2021-04-27 14:35:53.000000000 +0000 @@ -76,7 +76,7 @@ let actual_json = serde_json::from_slice(&json_bytes).expect("uploaded JSON should be valid"); let expected_json = serde_json::from_str(expected_json).expect("expected JSON does not parse"); - if let Err(e) = find_json_mismatch(&expected_json, &actual_json) { + if let Err(e) = find_json_mismatch(&expected_json, &actual_json, None) { panic!("{}", e); } diff -Nru cargo-0.52.0/crates/cargo-test-support/src/registry.rs cargo-0.54.0/crates/cargo-test-support/src/registry.rs --- cargo-0.52.0/crates/cargo-test-support/src/registry.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/crates/cargo-test-support/src/registry.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,10 +1,9 @@ use crate::git::repo; use crate::paths; -use cargo::sources::CRATES_IO_INDEX; -use cargo::util::Sha256; +use cargo_util::Sha256; use flate2::write::GzEncoder; use flate2::Compression; -use std::collections::HashMap; +use std::collections::BTreeMap; use std::fmt::Write as _; use std::fs::{self, File}; use std::io::{BufRead, BufReader, Write}; @@ -193,7 +192,7 @@ alt_dl_url(), self.alt_api_url .as_ref() - .map_or_else(alt_api_url, |url| Url::parse(&url).expect("valid url")), + .map_or_else(alt_api_url, |url| Url::parse(url).expect("valid url")), alt_api_path(), ); } @@ -319,7 +318,7 @@ deps: Vec, files: Vec, yanked: bool, - features: HashMap>, + features: FeatureMap, local: bool, alternative: bool, invalid_json: bool, @@ -330,6 +329,8 @@ v: Option, } +type FeatureMap = BTreeMap>; + #[derive(Clone)] pub struct Dependency { name: String, @@ -394,7 +395,7 @@ deps: Vec::new(), files: Vec::new(), yanked: false, - features: HashMap::new(), + features: BTreeMap::new(), local: false, alternative: false, invalid_json: false, @@ -558,7 +559,7 @@ /// Sets the index schema version for this package. /// - /// See [`cargo::sources::registry::RegistryPackage`] for more information. + /// See `cargo::sources::registry::RegistryPackage` for more information. pub fn schema_version(&mut self, version: u32) -> &mut Package { self.v = Some(version); self @@ -583,7 +584,9 @@ let registry_url = match (self.alternative, dep.registry.as_deref()) { (false, None) => None, (false, Some("alternative")) => Some(alt_registry_url().to_string()), - (true, None) => Some(CRATES_IO_INDEX.to_string()), + (true, None) => { + Some("https://github.com/rust-lang/crates.io-index".to_string()) + } (true, Some("alternative")) => None, _ => panic!("registry_dep currently only supports `alternative`"), }; @@ -609,15 +612,21 @@ } else { serde_json::json!(self.name) }; + // This emulates what crates.io may do in the future. + let (features, features2) = split_index_features(self.features.clone()); let mut json = serde_json::json!({ "name": name, "vers": self.vers, "deps": deps, "cksum": cksum, - "features": self.features, + "features": features, "yanked": self.yanked, "links": self.links, }); + if let Some(f2) = &features2 { + json["features2"] = serde_json::json!(f2); + json["v"] = serde_json::json!(2); + } if let Some(v) = self.v { json["v"] = serde_json::json!(v); } @@ -850,3 +859,21 @@ self } } + +fn split_index_features(mut features: FeatureMap) -> (FeatureMap, Option) { + let mut features2 = FeatureMap::new(); + for (feat, values) in features.iter_mut() { + if values + .iter() + .any(|value| value.starts_with("dep:") || value.contains("?/")) + { + let new_values = values.drain(..).collect(); + features2.insert(feat.clone(), new_values); + } + } + if features2.is_empty() { + (features, None) + } else { + (features, Some(features2)) + } +} diff -Nru cargo-0.52.0/crates/cargo-util/Cargo.toml cargo-0.54.0/crates/cargo-util/Cargo.toml --- cargo-0.52.0/crates/cargo-util/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/Cargo.toml 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,29 @@ +[package] +name = "cargo-util" +version = "0.1.0" +authors = ["The Cargo Project Developers"] +edition = "2018" +license = "MIT OR Apache-2.0" +homepage = "https://github.com/rust-lang/cargo" +repository = "https://github.com/rust-lang/cargo" +description = "Miscellaneous support code used by Cargo." + +[dependencies] +anyhow = "1.0.34" +crypto-hash = "0.3.1" +filetime = "0.2.9" +hex = "0.4.2" +jobserver = "0.1.21" +libc = "0.2.88" +log = "0.4.6" +same-file = "1.0.6" +shell-escape = "0.1.4" +tempfile = "3.1.0" +walkdir = "2.3.1" + +[target.'cfg(target_os = "macos")'.dependencies] +core-foundation = { version = "0.9.0", features = ["mac_os_10_7_support"] } + +[target.'cfg(windows)'.dependencies] +miow = "0.3.6" +winapi = { version = "0.3.9", features = ["consoleapi", "minwindef"] } diff -Nru cargo-0.52.0/crates/cargo-util/LICENSE-APACHE cargo-0.54.0/crates/cargo-util/LICENSE-APACHE --- cargo-0.52.0/crates/cargo-util/LICENSE-APACHE 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/LICENSE-APACHE 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/LICENSE-2.0 + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff -Nru cargo-0.52.0/crates/cargo-util/LICENSE-MIT cargo-0.54.0/crates/cargo-util/LICENSE-MIT --- cargo-0.52.0/crates/cargo-util/LICENSE-MIT 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/LICENSE-MIT 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,23 @@ +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff -Nru cargo-0.52.0/crates/cargo-util/src/lib.rs cargo-0.54.0/crates/cargo-util/src/lib.rs --- cargo-0.52.0/crates/cargo-util/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/src/lib.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,17 @@ +//! Miscellaneous support code used by Cargo. + +pub use self::read2::read2; +pub use process_builder::ProcessBuilder; +pub use process_error::{exit_status_to_string, is_simple_exit_code, ProcessError}; +pub use sha256::Sha256; + +pub mod paths; +mod process_builder; +mod process_error; +mod read2; +mod sha256; + +/// Whether or not this running in a Continuous Integration environment. +pub fn is_ci() -> bool { + std::env::var("CI").is_ok() || std::env::var("TF_BUILD").is_ok() +} diff -Nru cargo-0.52.0/crates/cargo-util/src/paths.rs cargo-0.54.0/crates/cargo-util/src/paths.rs --- cargo-0.52.0/crates/cargo-util/src/paths.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/src/paths.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,698 @@ +//! Various utilities for working with files and paths. + +use anyhow::{Context, Result}; +use filetime::FileTime; +use std::env; +use std::ffi::{OsStr, OsString}; +use std::fs::{self, File, OpenOptions}; +use std::io; +use std::io::prelude::*; +use std::iter; +use std::path::{Component, Path, PathBuf}; +use tempfile::Builder as TempFileBuilder; + +/// Joins paths into a string suitable for the `PATH` environment variable. +/// +/// This is equivalent to [`std::env::join_paths`], but includes a more +/// detailed error message. The given `env` argument is the name of the +/// environment variable this is will be used for, which is included in the +/// error message. +pub fn join_paths>(paths: &[T], env: &str) -> Result { + env::join_paths(paths.iter()) + .with_context(|| { + let paths = paths.iter().map(Path::new).collect::>(); + format!("failed to join path array: {:?}", paths) + }) + .with_context(|| { + format!( + "failed to join search paths together\n\ + Does ${} have an unterminated quote character?", + env + ) + }) +} + +/// Returns the name of the environment variable used for searching for +/// dynamic libraries. +pub fn dylib_path_envvar() -> &'static str { + if cfg!(windows) { + "PATH" + } else if cfg!(target_os = "macos") { + // When loading and linking a dynamic library or bundle, dlopen + // searches in LD_LIBRARY_PATH, DYLD_LIBRARY_PATH, PWD, and + // DYLD_FALLBACK_LIBRARY_PATH. + // In the Mach-O format, a dynamic library has an "install path." + // Clients linking against the library record this path, and the + // dynamic linker, dyld, uses it to locate the library. + // dyld searches DYLD_LIBRARY_PATH *before* the install path. + // dyld searches DYLD_FALLBACK_LIBRARY_PATH only if it cannot + // find the library in the install path. + // Setting DYLD_LIBRARY_PATH can easily have unintended + // consequences. + // + // Also, DYLD_LIBRARY_PATH appears to have significant performance + // penalty starting in 10.13. Cargo's testsuite ran more than twice as + // slow with it on CI. + "DYLD_FALLBACK_LIBRARY_PATH" + } else { + "LD_LIBRARY_PATH" + } +} + +/// Returns a list of directories that are searched for dynamic libraries. +/// +/// Note that some operating systems will have defaults if this is empty that +/// will need to be dealt with. +pub fn dylib_path() -> Vec { + match env::var_os(dylib_path_envvar()) { + Some(var) => env::split_paths(&var).collect(), + None => Vec::new(), + } +} + +/// Normalize a path, removing things like `.` and `..`. +/// +/// CAUTION: This does not resolve symlinks (unlike +/// [`std::fs::canonicalize`]). This may cause incorrect or surprising +/// behavior at times. This should be used carefully. Unfortunately, +/// [`std::fs::canonicalize`] can be hard to use correctly, since it can often +/// fail, or on Windows returns annoying device paths. This is a problem Cargo +/// needs to improve on. +pub fn normalize_path(path: &Path) -> PathBuf { + let mut components = path.components().peekable(); + let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { + components.next(); + PathBuf::from(c.as_os_str()) + } else { + PathBuf::new() + }; + + for component in components { + match component { + Component::Prefix(..) => unreachable!(), + Component::RootDir => { + ret.push(component.as_os_str()); + } + Component::CurDir => {} + Component::ParentDir => { + ret.pop(); + } + Component::Normal(c) => { + ret.push(c); + } + } + } + ret +} + +/// Returns the absolute path of where the given executable is located based +/// on searching the `PATH` environment variable. +/// +/// Returns an error if it cannot be found. +pub fn resolve_executable(exec: &Path) -> Result { + if exec.components().count() == 1 { + let paths = env::var_os("PATH").ok_or_else(|| anyhow::format_err!("no PATH"))?; + let candidates = env::split_paths(&paths).flat_map(|path| { + let candidate = path.join(&exec); + let with_exe = if env::consts::EXE_EXTENSION.is_empty() { + None + } else { + Some(candidate.with_extension(env::consts::EXE_EXTENSION)) + }; + iter::once(candidate).chain(with_exe) + }); + for candidate in candidates { + if candidate.is_file() { + // PATH may have a component like "." in it, so we still need to + // canonicalize. + return Ok(candidate.canonicalize()?); + } + } + + anyhow::bail!("no executable for `{}` found in PATH", exec.display()) + } else { + Ok(exec.canonicalize()?) + } +} + +/// Reads a file to a string. +/// +/// Equivalent to [`std::fs::read_to_string`] with better error messages. +pub fn read(path: &Path) -> Result { + match String::from_utf8(read_bytes(path)?) { + Ok(s) => Ok(s), + Err(_) => anyhow::bail!("path at `{}` was not valid utf-8", path.display()), + } +} + +/// Reads a file into a bytes vector. +/// +/// Equivalent to [`std::fs::read`] with better error messages. +pub fn read_bytes(path: &Path) -> Result> { + fs::read(path).with_context(|| format!("failed to read `{}`", path.display())) +} + +/// Writes a file to disk. +/// +/// Equivalent to [`std::fs::write`] with better error messages. +pub fn write, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> { + let path = path.as_ref(); + fs::write(path, contents.as_ref()) + .with_context(|| format!("failed to write `{}`", path.display())) +} + +/// Equivalent to [`write`], but does not write anything if the file contents +/// are identical to the given contents. +pub fn write_if_changed, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> { + (|| -> Result<()> { + let contents = contents.as_ref(); + let mut f = OpenOptions::new() + .read(true) + .write(true) + .create(true) + .open(&path)?; + let mut orig = Vec::new(); + f.read_to_end(&mut orig)?; + if orig != contents { + f.set_len(0)?; + f.seek(io::SeekFrom::Start(0))?; + f.write_all(contents)?; + } + Ok(()) + })() + .with_context(|| format!("failed to write `{}`", path.as_ref().display()))?; + Ok(()) +} + +/// Equivalent to [`write`], but appends to the end instead of replacing the +/// contents. +pub fn append(path: &Path, contents: &[u8]) -> Result<()> { + (|| -> Result<()> { + let mut f = OpenOptions::new() + .write(true) + .append(true) + .create(true) + .open(path)?; + + f.write_all(contents)?; + Ok(()) + })() + .with_context(|| format!("failed to write `{}`", path.display()))?; + Ok(()) +} + +/// Creates a new file. +pub fn create>(path: P) -> Result { + let path = path.as_ref(); + File::create(path).with_context(|| format!("failed to create file `{}`", path.display())) +} + +/// Opens an existing file. +pub fn open>(path: P) -> Result { + let path = path.as_ref(); + File::open(path).with_context(|| format!("failed to open file `{}`", path.display())) +} + +/// Returns the last modification time of a file. +pub fn mtime(path: &Path) -> Result { + let meta = + fs::metadata(path).with_context(|| format!("failed to stat `{}`", path.display()))?; + Ok(FileTime::from_last_modification_time(&meta)) +} + +/// Returns the maximum mtime of the given path, recursing into +/// subdirectories, and following symlinks. +pub fn mtime_recursive(path: &Path) -> Result { + let meta = + fs::metadata(path).with_context(|| format!("failed to stat `{}`", path.display()))?; + if !meta.is_dir() { + return Ok(FileTime::from_last_modification_time(&meta)); + } + let max_meta = walkdir::WalkDir::new(path) + .follow_links(true) + .into_iter() + .filter_map(|e| match e { + Ok(e) => Some(e), + Err(e) => { + // Ignore errors while walking. If Cargo can't access it, the + // build script probably can't access it, either. + log::debug!("failed to determine mtime while walking directory: {}", e); + None + } + }) + .filter_map(|e| { + if e.path_is_symlink() { + // Use the mtime of both the symlink and its target, to + // handle the case where the symlink is modified to a + // different target. + let sym_meta = match std::fs::symlink_metadata(e.path()) { + Ok(m) => m, + Err(err) => { + // I'm not sure when this is really possible (maybe a + // race with unlinking?). Regardless, if Cargo can't + // read it, the build script probably can't either. + log::debug!( + "failed to determine mtime while fetching symlink metdata of {}: {}", + e.path().display(), + err + ); + return None; + } + }; + let sym_mtime = FileTime::from_last_modification_time(&sym_meta); + // Walkdir follows symlinks. + match e.metadata() { + Ok(target_meta) => { + let target_mtime = FileTime::from_last_modification_time(&target_meta); + Some(sym_mtime.max(target_mtime)) + } + Err(err) => { + // Can't access the symlink target. If Cargo can't + // access it, the build script probably can't access + // it either. + log::debug!( + "failed to determine mtime of symlink target for {}: {}", + e.path().display(), + err + ); + Some(sym_mtime) + } + } + } else { + let meta = match e.metadata() { + Ok(m) => m, + Err(err) => { + // I'm not sure when this is really possible (maybe a + // race with unlinking?). Regardless, if Cargo can't + // read it, the build script probably can't either. + log::debug!( + "failed to determine mtime while fetching metadata of {}: {}", + e.path().display(), + err + ); + return None; + } + }; + Some(FileTime::from_last_modification_time(&meta)) + } + }) + .max() + // or_else handles the case where there are no files in the directory. + .unwrap_or_else(|| FileTime::from_last_modification_time(&meta)); + Ok(max_meta) +} + +/// Record the current time on the filesystem (using the filesystem's clock) +/// using a file at the given directory. Returns the current time. +pub fn set_invocation_time(path: &Path) -> Result { + // note that if `FileTime::from_system_time(SystemTime::now());` is determined to be sufficient, + // then this can be removed. + let timestamp = path.join("invoked.timestamp"); + write( + ×tamp, + "This file has an mtime of when this was started.", + )?; + let ft = mtime(×tamp)?; + log::debug!("invocation time for {:?} is {}", path, ft); + Ok(ft) +} + +/// Converts a path to UTF-8 bytes. +pub fn path2bytes(path: &Path) -> Result<&[u8]> { + #[cfg(unix)] + { + use std::os::unix::prelude::*; + Ok(path.as_os_str().as_bytes()) + } + #[cfg(windows)] + { + match path.as_os_str().to_str() { + Some(s) => Ok(s.as_bytes()), + None => Err(anyhow::format_err!( + "invalid non-unicode path: {}", + path.display() + )), + } + } +} + +/// Converts UTF-8 bytes to a path. +pub fn bytes2path(bytes: &[u8]) -> Result { + #[cfg(unix)] + { + use std::os::unix::prelude::*; + Ok(PathBuf::from(OsStr::from_bytes(bytes))) + } + #[cfg(windows)] + { + use std::str; + match str::from_utf8(bytes) { + Ok(s) => Ok(PathBuf::from(s)), + Err(..) => Err(anyhow::format_err!("invalid non-unicode path")), + } + } +} + +/// Returns an iterator that walks up the directory hierarchy towards the root. +/// +/// Each item is a [`Path`]. It will start with the given path, finishing at +/// the root. If the `stop_root_at` parameter is given, it will stop at the +/// given path (which will be the last item). +pub fn ancestors<'a>(path: &'a Path, stop_root_at: Option<&Path>) -> PathAncestors<'a> { + PathAncestors::new(path, stop_root_at) +} + +pub struct PathAncestors<'a> { + current: Option<&'a Path>, + stop_at: Option, +} + +impl<'a> PathAncestors<'a> { + fn new(path: &'a Path, stop_root_at: Option<&Path>) -> PathAncestors<'a> { + let stop_at = env::var("__CARGO_TEST_ROOT") + .ok() + .map(PathBuf::from) + .or_else(|| stop_root_at.map(|p| p.to_path_buf())); + PathAncestors { + current: Some(path), + //HACK: avoid reading `~/.cargo/config` when testing Cargo itself. + stop_at, + } + } +} + +impl<'a> Iterator for PathAncestors<'a> { + type Item = &'a Path; + + fn next(&mut self) -> Option<&'a Path> { + if let Some(path) = self.current { + self.current = path.parent(); + + if let Some(ref stop_at) = self.stop_at { + if path == stop_at { + self.current = None; + } + } + + Some(path) + } else { + None + } + } +} + +/// Equivalent to [`std::fs::create_dir_all`] with better error messages. +pub fn create_dir_all(p: impl AsRef) -> Result<()> { + _create_dir_all(p.as_ref()) +} + +fn _create_dir_all(p: &Path) -> Result<()> { + fs::create_dir_all(p) + .with_context(|| format!("failed to create directory `{}`", p.display()))?; + Ok(()) +} + +/// Recursively remove all files and directories at the given directory. +/// +/// This does *not* follow symlinks. +pub fn remove_dir_all>(p: P) -> Result<()> { + _remove_dir_all(p.as_ref()) +} + +fn _remove_dir_all(p: &Path) -> Result<()> { + if p.symlink_metadata() + .with_context(|| format!("could not get metadata for `{}` to remove", p.display()))? + .file_type() + .is_symlink() + { + return remove_file(p); + } + let entries = p + .read_dir() + .with_context(|| format!("failed to read directory `{}`", p.display()))?; + for entry in entries { + let entry = entry?; + let path = entry.path(); + if entry.file_type()?.is_dir() { + remove_dir_all(&path)?; + } else { + remove_file(&path)?; + } + } + remove_dir(&p) +} + +/// Equivalent to [`std::fs::remove_dir`] with better error messages. +pub fn remove_dir>(p: P) -> Result<()> { + _remove_dir(p.as_ref()) +} + +fn _remove_dir(p: &Path) -> Result<()> { + fs::remove_dir(p).with_context(|| format!("failed to remove directory `{}`", p.display()))?; + Ok(()) +} + +/// Equivalent to [`std::fs::remove_file`] with better error messages. +/// +/// If the file is readonly, this will attempt to change the permissions to +/// force the file to be deleted. +pub fn remove_file>(p: P) -> Result<()> { + _remove_file(p.as_ref()) +} + +fn _remove_file(p: &Path) -> Result<()> { + let mut err = match fs::remove_file(p) { + Ok(()) => return Ok(()), + Err(e) => e, + }; + + if err.kind() == io::ErrorKind::PermissionDenied && set_not_readonly(p).unwrap_or(false) { + match fs::remove_file(p) { + Ok(()) => return Ok(()), + Err(e) => err = e, + } + } + + Err(err).with_context(|| format!("failed to remove file `{}`", p.display()))?; + Ok(()) +} + +fn set_not_readonly(p: &Path) -> io::Result { + let mut perms = p.metadata()?.permissions(); + if !perms.readonly() { + return Ok(false); + } + perms.set_readonly(false); + fs::set_permissions(p, perms)?; + Ok(true) +} + +/// Hardlink (file) or symlink (dir) src to dst if possible, otherwise copy it. +/// +/// If the destination already exists, it is removed before linking. +pub fn link_or_copy(src: impl AsRef, dst: impl AsRef) -> Result<()> { + let src = src.as_ref(); + let dst = dst.as_ref(); + _link_or_copy(src, dst) +} + +fn _link_or_copy(src: &Path, dst: &Path) -> Result<()> { + log::debug!("linking {} to {}", src.display(), dst.display()); + if same_file::is_same_file(src, dst).unwrap_or(false) { + return Ok(()); + } + + // NB: we can't use dst.exists(), as if dst is a broken symlink, + // dst.exists() will return false. This is problematic, as we still need to + // unlink dst in this case. symlink_metadata(dst).is_ok() will tell us + // whether dst exists *without* following symlinks, which is what we want. + if fs::symlink_metadata(dst).is_ok() { + remove_file(&dst)?; + } + + let link_result = if src.is_dir() { + #[cfg(target_os = "redox")] + use std::os::redox::fs::symlink; + #[cfg(unix)] + use std::os::unix::fs::symlink; + #[cfg(windows)] + // FIXME: This should probably panic or have a copy fallback. Symlinks + // are not supported in all windows environments. Currently symlinking + // is only used for .dSYM directories on macos, but this shouldn't be + // accidentally relied upon. + use std::os::windows::fs::symlink_dir as symlink; + + let dst_dir = dst.parent().unwrap(); + let src = if src.starts_with(dst_dir) { + src.strip_prefix(dst_dir).unwrap() + } else { + src + }; + symlink(src, dst) + } else if env::var_os("__CARGO_COPY_DONT_LINK_DO_NOT_USE_THIS").is_some() { + // This is a work-around for a bug in macOS 10.15. When running on + // APFS, there seems to be a strange race condition with + // Gatekeeper where it will forcefully kill a process launched via + // `cargo run` with SIGKILL. Copying seems to avoid the problem. + // This shouldn't affect anyone except Cargo's test suite because + // it is very rare, and only seems to happen under heavy load and + // rapidly creating lots of executables and running them. + // See https://github.com/rust-lang/cargo/issues/7821 for the + // gory details. + fs::copy(src, dst).map(|_| ()) + } else { + fs::hard_link(src, dst) + }; + link_result + .or_else(|err| { + log::debug!("link failed {}. falling back to fs::copy", err); + fs::copy(src, dst).map(|_| ()) + }) + .with_context(|| { + format!( + "failed to link or copy `{}` to `{}`", + src.display(), + dst.display() + ) + })?; + Ok(()) +} + +/// Copies a file from one location to another. +/// +/// Equivalent to [`std::fs::copy`] with better error messages. +pub fn copy, Q: AsRef>(from: P, to: Q) -> Result { + let from = from.as_ref(); + let to = to.as_ref(); + fs::copy(from, to) + .with_context(|| format!("failed to copy `{}` to `{}`", from.display(), to.display())) +} + +/// Changes the filesystem mtime (and atime if possible) for the given file. +/// +/// This intentionally does not return an error, as this is sometimes not +/// supported on network filesystems. For the current uses in Cargo, this is a +/// "best effort" approach, and errors shouldn't be propagated. +pub fn set_file_time_no_err>(path: P, time: FileTime) { + let path = path.as_ref(); + match filetime::set_file_times(path, time, time) { + Ok(()) => log::debug!("set file mtime {} to {}", path.display(), time), + Err(e) => log::warn!( + "could not set mtime of {} to {}: {:?}", + path.display(), + time, + e + ), + } +} + +/// Strips `base` from `path`. +/// +/// This canonicalizes both paths before stripping. This is useful if the +/// paths are obtained in different ways, and one or the other may or may not +/// have been normalized in some way. +pub fn strip_prefix_canonical>( + path: P, + base: P, +) -> Result { + // Not all filesystems support canonicalize. Just ignore if it doesn't work. + let safe_canonicalize = |path: &Path| match path.canonicalize() { + Ok(p) => p, + Err(e) => { + log::warn!("cannot canonicalize {:?}: {:?}", path, e); + path.to_path_buf() + } + }; + let canon_path = safe_canonicalize(path.as_ref()); + let canon_base = safe_canonicalize(base.as_ref()); + canon_path.strip_prefix(canon_base).map(|p| p.to_path_buf()) +} + +/// Creates an excluded from cache directory atomically with its parents as needed. +/// +/// The atomicity only covers creating the leaf directory and exclusion from cache. Any missing +/// parent directories will not be created in an atomic manner. +/// +/// This function is idempotent and in addition to that it won't exclude ``p`` from cache if it +/// already exists. +pub fn create_dir_all_excluded_from_backups_atomic(p: impl AsRef) -> Result<()> { + let path = p.as_ref(); + if path.is_dir() { + return Ok(()); + } + + let parent = path.parent().unwrap(); + let base = path.file_name().unwrap(); + create_dir_all(parent)?; + // We do this in two steps (first create a temporary directory and exlucde + // it from backups, then rename it to the desired name. If we created the + // directory directly where it should be and then excluded it from backups + // we would risk a situation where cargo is interrupted right after the directory + // creation but before the exclusion the the directory would remain non-excluded from + // backups because we only perform exclusion right after we created the directory + // ourselves. + // + // We need the tempdir created in parent instead of $TMP, because only then we can be + // easily sure that rename() will succeed (the new name needs to be on the same mount + // point as the old one). + let tempdir = TempFileBuilder::new().prefix(base).tempdir_in(parent)?; + exclude_from_backups(tempdir.path()); + // Previously std::fs::create_dir_all() (through paths::create_dir_all()) was used + // here to create the directory directly and fs::create_dir_all() explicitly treats + // the directory being created concurrently by another thread or process as success, + // hence the check below to follow the existing behavior. If we get an error at + // rename() and suddently the directory (which didn't exist a moment earlier) exists + // we can infer from it it's another cargo process doing work. + if let Err(e) = fs::rename(tempdir.path(), path) { + if !path.exists() { + return Err(anyhow::Error::from(e)); + } + } + Ok(()) +} + +/// Marks the directory as excluded from archives/backups. +/// +/// This is recommended to prevent derived/temporary files from bloating backups. There are two +/// mechanisms used to achieve this right now: +/// +/// * A dedicated resource property excluding from Time Machine backups on macOS +/// * CACHEDIR.TAG files supported by various tools in a platform-independent way +fn exclude_from_backups(path: &Path) { + exclude_from_time_machine(path); + let _ = std::fs::write( + path.join("CACHEDIR.TAG"), + "Signature: 8a477f597d28d172789f06886806bc55 +# This file is a cache directory tag created by cargo. +# For information about cache directory tags see https://bford.info/cachedir/ +", + ); + // Similarly to exclude_from_time_machine() we ignore errors here as it's an optional feature. +} + +#[cfg(not(target_os = "macos"))] +fn exclude_from_time_machine(_: &Path) {} + +#[cfg(target_os = "macos")] +/// Marks files or directories as excluded from Time Machine on macOS +fn exclude_from_time_machine(path: &Path) { + use core_foundation::base::TCFType; + use core_foundation::{number, string, url}; + use std::ptr; + + // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey + let is_excluded_key: Result = "NSURLIsExcludedFromBackupKey".parse(); + let path = url::CFURL::from_path(path, false); + if let (Some(path), Ok(is_excluded_key)) = (path, is_excluded_key) { + unsafe { + url::CFURLSetResourcePropertyForKey( + path.as_concrete_TypeRef(), + is_excluded_key.as_concrete_TypeRef(), + number::kCFBooleanTrue as *const _, + ptr::null_mut(), + ); + } + } + // Errors are ignored, since it's an optional feature and failure + // doesn't prevent Cargo from working +} diff -Nru cargo-0.52.0/crates/cargo-util/src/process_builder.rs cargo-0.54.0/crates/cargo-util/src/process_builder.rs --- cargo-0.52.0/crates/cargo-util/src/process_builder.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/src/process_builder.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,417 @@ +use crate::process_error::ProcessError; +use crate::read2; +use anyhow::{bail, Context, Result}; +use jobserver::Client; +use shell_escape::escape; +use std::collections::BTreeMap; +use std::env; +use std::ffi::{OsStr, OsString}; +use std::fmt; +use std::iter::once; +use std::path::Path; +use std::process::{Command, Output, Stdio}; + +/// A builder object for an external process, similar to [`std::process::Command`]. +#[derive(Clone, Debug)] +pub struct ProcessBuilder { + /// The program to execute. + program: OsString, + /// A list of arguments to pass to the program. + args: Vec, + /// Any environment variables that should be set for the program. + env: BTreeMap>, + /// The directory to run the program from. + cwd: Option, + /// The `make` jobserver. See the [jobserver crate] for + /// more information. + /// + /// [jobserver crate]: https://docs.rs/jobserver/ + jobserver: Option, + /// `true` to include environment variable in display. + display_env_vars: bool, +} + +impl fmt::Display for ProcessBuilder { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "`")?; + + if self.display_env_vars { + for (key, val) in self.env.iter() { + if let Some(val) = val { + let val = escape(val.to_string_lossy()); + if cfg!(windows) { + write!(f, "set {}={}&& ", key, val)?; + } else { + write!(f, "{}={} ", key, val)?; + } + } + } + } + + write!(f, "{}", self.program.to_string_lossy())?; + + for arg in &self.args { + write!(f, " {}", escape(arg.to_string_lossy()))?; + } + + write!(f, "`") + } +} + +impl ProcessBuilder { + /// Creates a new [`ProcessBuilder`] with the given executable path. + pub fn new>(cmd: T) -> ProcessBuilder { + ProcessBuilder { + program: cmd.as_ref().to_os_string(), + args: Vec::new(), + cwd: None, + env: BTreeMap::new(), + jobserver: None, + display_env_vars: false, + } + } + + /// (chainable) Sets the executable for the process. + pub fn program>(&mut self, program: T) -> &mut ProcessBuilder { + self.program = program.as_ref().to_os_string(); + self + } + + /// (chainable) Adds `arg` to the args list. + pub fn arg>(&mut self, arg: T) -> &mut ProcessBuilder { + self.args.push(arg.as_ref().to_os_string()); + self + } + + /// (chainable) Adds multiple `args` to the args list. + pub fn args>(&mut self, args: &[T]) -> &mut ProcessBuilder { + self.args + .extend(args.iter().map(|t| t.as_ref().to_os_string())); + self + } + + /// (chainable) Replaces the args list with the given `args`. + pub fn args_replace>(&mut self, args: &[T]) -> &mut ProcessBuilder { + self.args = args.iter().map(|t| t.as_ref().to_os_string()).collect(); + self + } + + /// (chainable) Sets the current working directory of the process. + pub fn cwd>(&mut self, path: T) -> &mut ProcessBuilder { + self.cwd = Some(path.as_ref().to_os_string()); + self + } + + /// (chainable) Sets an environment variable for the process. + pub fn env>(&mut self, key: &str, val: T) -> &mut ProcessBuilder { + self.env + .insert(key.to_string(), Some(val.as_ref().to_os_string())); + self + } + + /// (chainable) Unsets an environment variable for the process. + pub fn env_remove(&mut self, key: &str) -> &mut ProcessBuilder { + self.env.insert(key.to_string(), None); + self + } + + /// Gets the executable name. + pub fn get_program(&self) -> &OsString { + &self.program + } + + /// Gets the program arguments. + pub fn get_args(&self) -> &[OsString] { + &self.args + } + + /// Gets the current working directory for the process. + pub fn get_cwd(&self) -> Option<&Path> { + self.cwd.as_ref().map(Path::new) + } + + /// Gets an environment variable as the process will see it (will inherit from environment + /// unless explicitally unset). + pub fn get_env(&self, var: &str) -> Option { + self.env + .get(var) + .cloned() + .or_else(|| Some(env::var_os(var))) + .and_then(|s| s) + } + + /// Gets all environment variables explicitly set or unset for the process (not inherited + /// vars). + pub fn get_envs(&self) -> &BTreeMap> { + &self.env + } + + /// Sets the `make` jobserver. See the [jobserver crate][jobserver_docs] for + /// more information. + /// + /// [jobserver_docs]: https://docs.rs/jobserver/0.1.6/jobserver/ + pub fn inherit_jobserver(&mut self, jobserver: &Client) -> &mut Self { + self.jobserver = Some(jobserver.clone()); + self + } + + /// Enables environment variable display. + pub fn display_env_vars(&mut self) -> &mut Self { + self.display_env_vars = true; + self + } + + /// Runs the process, waiting for completion, and mapping non-success exit codes to an error. + pub fn exec(&self) -> Result<()> { + let mut command = self.build_command(); + let exit = command.status().with_context(|| { + ProcessError::new(&format!("could not execute process {}", self), None, None) + })?; + + if exit.success() { + Ok(()) + } else { + Err(ProcessError::new( + &format!("process didn't exit successfully: {}", self), + Some(exit), + None, + ) + .into()) + } + } + + /// Replaces the current process with the target process. + /// + /// On Unix, this executes the process using the Unix syscall `execvp`, which will block + /// this process, and will only return if there is an error. + /// + /// On Windows this isn't technically possible. Instead we emulate it to the best of our + /// ability. One aspect we fix here is that we specify a handler for the Ctrl-C handler. + /// In doing so (and by effectively ignoring it) we should emulate proxying Ctrl-C + /// handling to the application at hand, which will either terminate or handle it itself. + /// According to Microsoft's documentation at + /// . + /// the Ctrl-C signal is sent to all processes attached to a terminal, which should + /// include our child process. If the child terminates then we'll reap them in Cargo + /// pretty quickly, and if the child handles the signal then we won't terminate + /// (and we shouldn't!) until the process itself later exits. + pub fn exec_replace(&self) -> Result<()> { + imp::exec_replace(self) + } + + /// Executes the process, returning the stdio output, or an error if non-zero exit status. + pub fn exec_with_output(&self) -> Result { + let mut command = self.build_command(); + + let output = command.output().with_context(|| { + ProcessError::new(&format!("could not execute process {}", self), None, None) + })?; + + if output.status.success() { + Ok(output) + } else { + Err(ProcessError::new( + &format!("process didn't exit successfully: {}", self), + Some(output.status), + Some(&output), + ) + .into()) + } + } + + /// Executes a command, passing each line of stdout and stderr to the supplied callbacks, which + /// can mutate the string data. + /// + /// If any invocations of these function return an error, it will be propagated. + /// + /// If `capture_output` is true, then all the output will also be buffered + /// and stored in the returned `Output` object. If it is false, no caching + /// is done, and the callbacks are solely responsible for handling the + /// output. + pub fn exec_with_streaming( + &self, + on_stdout_line: &mut dyn FnMut(&str) -> Result<()>, + on_stderr_line: &mut dyn FnMut(&str) -> Result<()>, + capture_output: bool, + ) -> Result { + let mut stdout = Vec::new(); + let mut stderr = Vec::new(); + + let mut cmd = self.build_command(); + cmd.stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .stdin(Stdio::null()); + + let mut callback_error = None; + let status = (|| { + let mut child = cmd.spawn()?; + let out = child.stdout.take().unwrap(); + let err = child.stderr.take().unwrap(); + read2(out, err, &mut |is_out, data, eof| { + let idx = if eof { + data.len() + } else { + match data.iter().rposition(|b| *b == b'\n') { + Some(i) => i + 1, + None => return, + } + }; + { + // scope for new_lines + let new_lines = if capture_output { + let dst = if is_out { &mut stdout } else { &mut stderr }; + let start = dst.len(); + let data = data.drain(..idx); + dst.extend(data); + &dst[start..] + } else { + &data[..idx] + }; + for line in String::from_utf8_lossy(new_lines).lines() { + if callback_error.is_some() { + break; + } + let callback_result = if is_out { + on_stdout_line(line) + } else { + on_stderr_line(line) + }; + if let Err(e) = callback_result { + callback_error = Some(e); + } + } + } + if !capture_output { + data.drain(..idx); + } + })?; + child.wait() + })() + .with_context(|| { + ProcessError::new(&format!("could not execute process {}", self), None, None) + })?; + let output = Output { + status, + stdout, + stderr, + }; + + { + let to_print = if capture_output { Some(&output) } else { None }; + if let Some(e) = callback_error { + let cx = ProcessError::new( + &format!("failed to parse process output: {}", self), + Some(output.status), + to_print, + ); + bail!(anyhow::Error::new(cx).context(e)); + } else if !output.status.success() { + bail!(ProcessError::new( + &format!("process didn't exit successfully: {}", self), + Some(output.status), + to_print, + )); + } + } + + Ok(output) + } + + /// Converts `ProcessBuilder` into a `std::process::Command`, and handles the jobserver, if + /// present. + pub fn build_command(&self) -> Command { + let mut command = Command::new(&self.program); + if let Some(cwd) = self.get_cwd() { + command.current_dir(cwd); + } + for arg in &self.args { + command.arg(arg); + } + for (k, v) in &self.env { + match *v { + Some(ref v) => { + command.env(k, v); + } + None => { + command.env_remove(k); + } + } + } + if let Some(ref c) = self.jobserver { + c.configure(&mut command); + } + command + } + + /// Wraps an existing command with the provided wrapper, if it is present and valid. + /// + /// # Examples + /// + /// ```rust + /// use cargo_util::ProcessBuilder; + /// // Running this would execute `rustc` + /// let cmd = ProcessBuilder::new("rustc"); + /// + /// // Running this will execute `sccache rustc` + /// let cmd = cmd.wrapped(Some("sccache")); + /// ``` + pub fn wrapped(mut self, wrapper: Option>) -> Self { + let wrapper = if let Some(wrapper) = wrapper.as_ref() { + wrapper.as_ref() + } else { + return self; + }; + + if wrapper.is_empty() { + return self; + } + + let args = once(self.program).chain(self.args.into_iter()).collect(); + + self.program = wrapper.to_os_string(); + self.args = args; + + self + } +} + +#[cfg(unix)] +mod imp { + use super::{ProcessBuilder, ProcessError}; + use anyhow::Result; + use std::os::unix::process::CommandExt; + + pub fn exec_replace(process_builder: &ProcessBuilder) -> Result<()> { + let mut command = process_builder.build_command(); + let error = command.exec(); + Err(anyhow::Error::from(error).context(ProcessError::new( + &format!("could not execute process {}", process_builder), + None, + None, + ))) + } +} + +#[cfg(windows)] +mod imp { + use super::{ProcessBuilder, ProcessError}; + use anyhow::Result; + use winapi::shared::minwindef::{BOOL, DWORD, FALSE, TRUE}; + use winapi::um::consoleapi::SetConsoleCtrlHandler; + + unsafe extern "system" fn ctrlc_handler(_: DWORD) -> BOOL { + // Do nothing; let the child process handle it. + TRUE + } + + pub fn exec_replace(process_builder: &ProcessBuilder) -> Result<()> { + unsafe { + if SetConsoleCtrlHandler(Some(ctrlc_handler), TRUE) == FALSE { + return Err(ProcessError::new("Could not set Ctrl-C handler.", None, None).into()); + } + } + + // Just execute the process as normal. + process_builder.exec() + } +} diff -Nru cargo-0.52.0/crates/cargo-util/src/process_error.rs cargo-0.54.0/crates/cargo-util/src/process_error.rs --- cargo-0.52.0/crates/cargo-util/src/process_error.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/src/process_error.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,194 @@ +//! Error value for [`crate::ProcessBuilder`] when a process fails. + +use std::fmt; +use std::process::{ExitStatus, Output}; +use std::str; + +#[derive(Debug)] +pub struct ProcessError { + /// A detailed description to show to the user why the process failed. + pub desc: String, + + /// The exit status of the process. + /// + /// This can be `None` if the process failed to launch (like process not + /// found) or if the exit status wasn't a code but was instead something + /// like termination via a signal. + pub code: Option, + + /// The stdout from the process. + /// + /// This can be `None` if the process failed to launch, or the output was + /// not captured. + pub stdout: Option>, + + /// The stderr from the process. + /// + /// This can be `None` if the process failed to launch, or the output was + /// not captured. + pub stderr: Option>, +} + +impl fmt::Display for ProcessError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.desc.fmt(f) + } +} + +impl std::error::Error for ProcessError {} + +impl ProcessError { + /// Creates a new [`ProcessError`]. + /// + /// * `status` can be `None` if the process did not launch. + /// * `output` can be `None` if the process did not launch, or output was not captured. + pub fn new(msg: &str, status: Option, output: Option<&Output>) -> ProcessError { + let exit = match status { + Some(s) => exit_status_to_string(s), + None => "never executed".to_string(), + }; + + Self::new_raw( + msg, + status.and_then(|s| s.code()), + &exit, + output.map(|s| s.stdout.as_slice()), + output.map(|s| s.stderr.as_slice()), + ) + } + + /// Creates a new [`ProcessError`] with the raw output data. + /// + /// * `code` can be `None` for situations like being killed by a signal on unix. + pub fn new_raw( + msg: &str, + code: Option, + status: &str, + stdout: Option<&[u8]>, + stderr: Option<&[u8]>, + ) -> ProcessError { + let mut desc = format!("{} ({})", msg, status); + + if let Some(out) = stdout { + match str::from_utf8(out) { + Ok(s) if !s.trim().is_empty() => { + desc.push_str("\n--- stdout\n"); + desc.push_str(s); + } + Ok(..) | Err(..) => {} + } + } + if let Some(out) = stderr { + match str::from_utf8(out) { + Ok(s) if !s.trim().is_empty() => { + desc.push_str("\n--- stderr\n"); + desc.push_str(s); + } + Ok(..) | Err(..) => {} + } + } + + ProcessError { + desc, + code, + stdout: stdout.map(|s| s.to_vec()), + stderr: stderr.map(|s| s.to_vec()), + } + } +} + +/// Converts an [`ExitStatus`] to a human-readable string suitable for +/// displaying to a user. +pub fn exit_status_to_string(status: ExitStatus) -> String { + return status_to_string(status); + + #[cfg(unix)] + fn status_to_string(status: ExitStatus) -> String { + use std::os::unix::process::*; + + if let Some(signal) = status.signal() { + let name = match signal as libc::c_int { + libc::SIGABRT => ", SIGABRT: process abort signal", + libc::SIGALRM => ", SIGALRM: alarm clock", + libc::SIGFPE => ", SIGFPE: erroneous arithmetic operation", + libc::SIGHUP => ", SIGHUP: hangup", + libc::SIGILL => ", SIGILL: illegal instruction", + libc::SIGINT => ", SIGINT: terminal interrupt signal", + libc::SIGKILL => ", SIGKILL: kill", + libc::SIGPIPE => ", SIGPIPE: write on a pipe with no one to read", + libc::SIGQUIT => ", SIGQUIT: terminal quit signal", + libc::SIGSEGV => ", SIGSEGV: invalid memory reference", + libc::SIGTERM => ", SIGTERM: termination signal", + libc::SIGBUS => ", SIGBUS: access to undefined memory", + #[cfg(not(target_os = "haiku"))] + libc::SIGSYS => ", SIGSYS: bad system call", + libc::SIGTRAP => ", SIGTRAP: trace/breakpoint trap", + _ => "", + }; + format!("signal: {}{}", signal, name) + } else { + status.to_string() + } + } + + #[cfg(windows)] + fn status_to_string(status: ExitStatus) -> String { + use winapi::shared::minwindef::DWORD; + use winapi::um::winnt::*; + + let mut base = status.to_string(); + let extra = match status.code().unwrap() as DWORD { + STATUS_ACCESS_VIOLATION => "STATUS_ACCESS_VIOLATION", + STATUS_IN_PAGE_ERROR => "STATUS_IN_PAGE_ERROR", + STATUS_INVALID_HANDLE => "STATUS_INVALID_HANDLE", + STATUS_INVALID_PARAMETER => "STATUS_INVALID_PARAMETER", + STATUS_NO_MEMORY => "STATUS_NO_MEMORY", + STATUS_ILLEGAL_INSTRUCTION => "STATUS_ILLEGAL_INSTRUCTION", + STATUS_NONCONTINUABLE_EXCEPTION => "STATUS_NONCONTINUABLE_EXCEPTION", + STATUS_INVALID_DISPOSITION => "STATUS_INVALID_DISPOSITION", + STATUS_ARRAY_BOUNDS_EXCEEDED => "STATUS_ARRAY_BOUNDS_EXCEEDED", + STATUS_FLOAT_DENORMAL_OPERAND => "STATUS_FLOAT_DENORMAL_OPERAND", + STATUS_FLOAT_DIVIDE_BY_ZERO => "STATUS_FLOAT_DIVIDE_BY_ZERO", + STATUS_FLOAT_INEXACT_RESULT => "STATUS_FLOAT_INEXACT_RESULT", + STATUS_FLOAT_INVALID_OPERATION => "STATUS_FLOAT_INVALID_OPERATION", + STATUS_FLOAT_OVERFLOW => "STATUS_FLOAT_OVERFLOW", + STATUS_FLOAT_STACK_CHECK => "STATUS_FLOAT_STACK_CHECK", + STATUS_FLOAT_UNDERFLOW => "STATUS_FLOAT_UNDERFLOW", + STATUS_INTEGER_DIVIDE_BY_ZERO => "STATUS_INTEGER_DIVIDE_BY_ZERO", + STATUS_INTEGER_OVERFLOW => "STATUS_INTEGER_OVERFLOW", + STATUS_PRIVILEGED_INSTRUCTION => "STATUS_PRIVILEGED_INSTRUCTION", + STATUS_STACK_OVERFLOW => "STATUS_STACK_OVERFLOW", + STATUS_DLL_NOT_FOUND => "STATUS_DLL_NOT_FOUND", + STATUS_ORDINAL_NOT_FOUND => "STATUS_ORDINAL_NOT_FOUND", + STATUS_ENTRYPOINT_NOT_FOUND => "STATUS_ENTRYPOINT_NOT_FOUND", + STATUS_CONTROL_C_EXIT => "STATUS_CONTROL_C_EXIT", + STATUS_DLL_INIT_FAILED => "STATUS_DLL_INIT_FAILED", + STATUS_FLOAT_MULTIPLE_FAULTS => "STATUS_FLOAT_MULTIPLE_FAULTS", + STATUS_FLOAT_MULTIPLE_TRAPS => "STATUS_FLOAT_MULTIPLE_TRAPS", + STATUS_REG_NAT_CONSUMPTION => "STATUS_REG_NAT_CONSUMPTION", + STATUS_HEAP_CORRUPTION => "STATUS_HEAP_CORRUPTION", + STATUS_STACK_BUFFER_OVERRUN => "STATUS_STACK_BUFFER_OVERRUN", + STATUS_ASSERTION_FAILURE => "STATUS_ASSERTION_FAILURE", + _ => return base, + }; + base.push_str(", "); + base.push_str(extra); + base + } +} + +/// Returns `true` if the given process exit code is something a normal +/// process would exit with. +/// +/// This helps differentiate from abnormal termination codes, such as +/// segmentation faults or signals. +pub fn is_simple_exit_code(code: i32) -> bool { + // Typical unix exit codes are 0 to 127. + // Windows doesn't have anything "typical", and is a + // 32-bit number (which appears signed here, but is really + // unsigned). However, most of the interesting NTSTATUS + // codes are very large. This is just a rough + // approximation of which codes are "normal" and which + // ones are abnormal termination. + code >= 0 && code <= 127 +} diff -Nru cargo-0.52.0/crates/cargo-util/src/read2.rs cargo-0.54.0/crates/cargo-util/src/read2.rs --- cargo-0.52.0/crates/cargo-util/src/read2.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/src/read2.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,178 @@ +pub use self::imp::read2; + +#[cfg(unix)] +mod imp { + use std::io; + use std::io::prelude::*; + use std::mem; + use std::os::unix::prelude::*; + use std::process::{ChildStderr, ChildStdout}; + + pub fn read2( + mut out_pipe: ChildStdout, + mut err_pipe: ChildStderr, + data: &mut dyn FnMut(bool, &mut Vec, bool), + ) -> io::Result<()> { + unsafe { + libc::fcntl(out_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); + libc::fcntl(err_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); + } + + let mut out_done = false; + let mut err_done = false; + let mut out = Vec::new(); + let mut err = Vec::new(); + + let mut fds: [libc::pollfd; 2] = unsafe { mem::zeroed() }; + fds[0].fd = out_pipe.as_raw_fd(); + fds[0].events = libc::POLLIN; + fds[1].fd = err_pipe.as_raw_fd(); + fds[1].events = libc::POLLIN; + let mut nfds = 2; + let mut errfd = 1; + + while nfds > 0 { + // wait for either pipe to become readable using `select` + let r = unsafe { libc::poll(fds.as_mut_ptr(), nfds, -1) }; + if r == -1 { + let err = io::Error::last_os_error(); + if err.kind() == io::ErrorKind::Interrupted { + continue; + } + return Err(err); + } + + // Read as much as we can from each pipe, ignoring EWOULDBLOCK or + // EAGAIN. If we hit EOF, then this will happen because the underlying + // reader will return Ok(0), in which case we'll see `Ok` ourselves. In + // this case we flip the other fd back into blocking mode and read + // whatever's leftover on that file descriptor. + let handle = |res: io::Result<_>| match res { + Ok(_) => Ok(true), + Err(e) => { + if e.kind() == io::ErrorKind::WouldBlock { + Ok(false) + } else { + Err(e) + } + } + }; + if !err_done && fds[errfd].revents != 0 && handle(err_pipe.read_to_end(&mut err))? { + err_done = true; + nfds -= 1; + } + data(false, &mut err, err_done); + if !out_done && fds[0].revents != 0 && handle(out_pipe.read_to_end(&mut out))? { + out_done = true; + fds[0].fd = err_pipe.as_raw_fd(); + errfd = 0; + nfds -= 1; + } + data(true, &mut out, out_done); + } + Ok(()) + } +} + +#[cfg(windows)] +mod imp { + use std::io; + use std::os::windows::prelude::*; + use std::process::{ChildStderr, ChildStdout}; + use std::slice; + + use miow::iocp::{CompletionPort, CompletionStatus}; + use miow::pipe::NamedPipe; + use miow::Overlapped; + use winapi::shared::winerror::ERROR_BROKEN_PIPE; + + struct Pipe<'a> { + dst: &'a mut Vec, + overlapped: Overlapped, + pipe: NamedPipe, + done: bool, + } + + pub fn read2( + out_pipe: ChildStdout, + err_pipe: ChildStderr, + data: &mut dyn FnMut(bool, &mut Vec, bool), + ) -> io::Result<()> { + let mut out = Vec::new(); + let mut err = Vec::new(); + + let port = CompletionPort::new(1)?; + port.add_handle(0, &out_pipe)?; + port.add_handle(1, &err_pipe)?; + + unsafe { + let mut out_pipe = Pipe::new(out_pipe, &mut out); + let mut err_pipe = Pipe::new(err_pipe, &mut err); + + out_pipe.read()?; + err_pipe.read()?; + + let mut status = [CompletionStatus::zero(), CompletionStatus::zero()]; + + while !out_pipe.done || !err_pipe.done { + for status in port.get_many(&mut status, None)? { + if status.token() == 0 { + out_pipe.complete(status); + data(true, out_pipe.dst, out_pipe.done); + out_pipe.read()?; + } else { + err_pipe.complete(status); + data(false, err_pipe.dst, err_pipe.done); + err_pipe.read()?; + } + } + } + + Ok(()) + } + } + + impl<'a> Pipe<'a> { + unsafe fn new(p: P, dst: &'a mut Vec) -> Pipe<'a> { + Pipe { + dst, + pipe: NamedPipe::from_raw_handle(p.into_raw_handle()), + overlapped: Overlapped::zero(), + done: false, + } + } + + unsafe fn read(&mut self) -> io::Result<()> { + let dst = slice_to_end(self.dst); + match self.pipe.read_overlapped(dst, self.overlapped.raw()) { + Ok(_) => Ok(()), + Err(e) => { + if e.raw_os_error() == Some(ERROR_BROKEN_PIPE as i32) { + self.done = true; + Ok(()) + } else { + Err(e) + } + } + } + } + + unsafe fn complete(&mut self, status: &CompletionStatus) { + let prev = self.dst.len(); + self.dst.set_len(prev + status.bytes_transferred() as usize); + if status.bytes_transferred() == 0 { + self.done = true; + } + } + } + + unsafe fn slice_to_end(v: &mut Vec) -> &mut [u8] { + if v.capacity() == 0 { + v.reserve(16); + } + if v.capacity() == v.len() { + v.reserve(1); + } + slice::from_raw_parts_mut(v.as_mut_ptr().add(v.len()), v.capacity() - v.len()) + } +} diff -Nru cargo-0.52.0/crates/cargo-util/src/sha256.rs cargo-0.54.0/crates/cargo-util/src/sha256.rs --- cargo-0.52.0/crates/cargo-util/src/sha256.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/crates/cargo-util/src/sha256.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,56 @@ +use super::paths; +use anyhow::{Context, Result}; +use crypto_hash::{Algorithm, Hasher}; +use std::fs::File; +use std::io::{self, Read, Write}; +use std::path::Path; + +pub struct Sha256(Hasher); + +impl Sha256 { + pub fn new() -> Sha256 { + let hasher = Hasher::new(Algorithm::SHA256); + Sha256(hasher) + } + + pub fn update(&mut self, bytes: &[u8]) -> &mut Sha256 { + let _ = self.0.write_all(bytes); + self + } + + pub fn update_file(&mut self, mut file: &File) -> io::Result<&mut Sha256> { + let mut buf = [0; 64 * 1024]; + loop { + let n = file.read(&mut buf)?; + if n == 0 { + break Ok(self); + } + self.update(&buf[..n]); + } + } + + pub fn update_path>(&mut self, path: P) -> Result<&mut Sha256> { + let path = path.as_ref(); + let file = paths::open(path)?; + self.update_file(&file) + .with_context(|| format!("failed to read `{}`", path.display()))?; + Ok(self) + } + + pub fn finish(&mut self) -> [u8; 32] { + let mut ret = [0u8; 32]; + let data = self.0.finish(); + ret.copy_from_slice(&data[..]); + ret + } + + pub fn finish_hex(&mut self) -> String { + hex::encode(self.finish()) + } +} + +impl Default for Sha256 { + fn default() -> Self { + Self::new() + } +} diff -Nru cargo-0.52.0/crates/crates-io/lib.rs cargo-0.54.0/crates/crates-io/lib.rs --- cargo-0.52.0/crates/crates-io/lib.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/crates/crates-io/lib.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,5 +1,4 @@ -#![allow(unknown_lints)] -#![allow(clippy::identity_op)] // used for vertical alignment +#![allow(clippy::all)] use std::collections::BTreeMap; use std::fmt; @@ -155,13 +154,13 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ResponseError::Curl(e) => write!(f, "{}", e), - ResponseError::Api { code, errors } => write!( - f, - "api errors (status {} {}): {}", - code, - reason(*code), - errors.join(", ") - ), + ResponseError::Api { code, errors } => { + f.write_str("the remote server responded with an error")?; + if *code != 200 { + write!(f, " (status {} {})", code, reason(*code))?; + }; + write!(f, ": {}", errors.join(", ")) + } ResponseError::Code { code, headers, diff -Nru cargo-0.52.0/crates/resolver-tests/Cargo.toml cargo-0.54.0/crates/resolver-tests/Cargo.toml --- cargo-0.52.0/crates/resolver-tests/Cargo.toml 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/crates/resolver-tests/Cargo.toml 2021-04-27 14:35:53.000000000 +0000 @@ -6,6 +6,7 @@ [dependencies] cargo = { path = "../.." } +cargo-util = { path = "../cargo-util" } proptest = "0.9.1" lazy_static = "1.3.0" varisat = "0.2.1" diff -Nru cargo-0.52.0/crates/resolver-tests/src/lib.rs cargo-0.54.0/crates/resolver-tests/src/lib.rs --- cargo-0.52.0/crates/resolver-tests/src/lib.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/crates/resolver-tests/src/lib.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,5 +1,4 @@ -#![allow(clippy::many_single_char_names)] -#![allow(clippy::needless_range_loop)] // false positives +#![allow(clippy::all)] use std::cell::RefCell; use std::cmp::PartialEq; @@ -123,7 +122,7 @@ struct MyRegistry<'a> { list: &'a [Summary], used: HashSet, - }; + } impl<'a> Registry for MyRegistry<'a> { fn query( &mut self, @@ -969,12 +968,14 @@ } /// Assert `xs` contains `elems` +#[track_caller] pub fn assert_contains(xs: &[A], elems: &[A]) { for elem in elems { assert!(xs.contains(elem)); } } +#[track_caller] pub fn assert_same(a: &[A], b: &[A]) { assert_eq!(a.len(), b.len()); assert_contains(b, a); diff -Nru cargo-0.52.0/crates/resolver-tests/tests/resolve.rs cargo-0.54.0/crates/resolver-tests/tests/resolve.rs --- cargo-0.52.0/crates/resolver-tests/tests/resolve.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/crates/resolver-tests/tests/resolve.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,6 +1,7 @@ use cargo::core::dependency::DepKind; -use cargo::core::{enable_nightly_features, Dependency}; -use cargo::util::{is_ci, Config}; +use cargo::core::Dependency; +use cargo::util::Config; +use cargo_util::is_ci; use resolver_tests::{ assert_contains, assert_same, dep, dep_kind, dep_loc, dep_req, dep_req_kind, loc_names, names, @@ -55,9 +56,8 @@ fn prop_minimum_version_errors_the_same( PrettyPrintRegistry(input) in registry_strategy(50, 20, 60) ) { - enable_nightly_features(); - let mut config = Config::default().unwrap(); + config.nightly_features_allowed = true; config .configure( 1, @@ -553,11 +553,6 @@ #[test] fn test_resolving_minimum_version_with_transitive_deps() { - enable_nightly_features(); // -Z minimal-versions - // When the minimal-versions config option is specified then the lowest - // possible version of a package should be selected. "util 1.0.0" can't be - // selected because of the requirements of "bar", so the minimum version - // must be 1.1.1. let reg = registry(vec![ pkg!(("util", "1.2.2")), pkg!(("util", "1.0.0")), @@ -567,6 +562,12 @@ ]); let mut config = Config::default().unwrap(); + // -Z minimal-versions + // When the minimal-versions config option is specified then the lowest + // possible version of a package should be selected. "util 1.0.0" can't be + // selected because of the requirements of "bar", so the minimum version + // must be 1.1.1. + config.nightly_features_allowed = true; config .configure( 1, diff -Nru cargo-0.52.0/debian/changelog cargo-0.54.0/debian/changelog --- cargo-0.52.0/debian/changelog 2021-07-22 22:59:21.000000000 +0000 +++ cargo-0.54.0/debian/changelog 2021-10-29 11:12:15.000000000 +0000 @@ -1,17 +1,43 @@ -cargo (0.52.0-0ubuntu1~21.04.1) hirsute; urgency=medium +cargo (0.54.0-0ubuntu1~21.04.1) hirsute; urgency=medium - * Backport to Hirsute. (LP: #1932145) + [ Michael Hudson-Doyle ] + * Backport to Hirsute. (LP: #1943842) + * Drop change to -march on armhf. - -- Michael Hudson-Doyle Fri, 23 Jul 2021 10:59:21 +1200 + -- Olivier Tilloy Fri, 29 Oct 2021 13:12:15 +0200 -cargo (0.52.0-0ubuntu1) UNRELEASED; urgency=medium +cargo (0.54.0-0ubuntu1) UNRELEASED; urgency=medium + + * Update debian/watch file to account for GitHub changes. + * New upstream version. + * Update patches. + * Cherry pick 2112-handle-4-siphasher-algorithms.patch from Debian. + + -- Michael Hudson-Doyle Wed, 27 Oct 2021 10:18:02 +1300 + +cargo (0.53.0-0ubuntu2) jammy; urgency=medium + + * Disable lto::test_profile test on i386. + + -- Michael Hudson-Doyle Tue, 26 Oct 2021 11:17:40 +1300 + +cargo (0.53.0-0ubuntu1) jammy; urgency=medium + + * Do not exclude libgit2 source from libgit2-sys crate. + * New upstream version. + * Update patches. + * Set -march appropriately on armhf. + + -- Michael Hudson-Doyle Fri, 22 Oct 2021 16:19:48 +1300 + +cargo (0.52.0-0ubuntu1) impish; urgency=medium * New upstream version. * Backport patch from upstream to fix failure with new version of 'tar' crate. * Add another patch from upstream - -- Michael Hudson-Doyle Tue, 06 Jul 2021 11:18:18 +1200 + -- Michael Hudson-Doyle Mon, 12 Jul 2021 22:31:53 +1200 cargo (0.51.0-0ubuntu1) hirsute; urgency=medium diff -Nru cargo-0.52.0/debian/control cargo-0.54.0/debian/control --- cargo-0.52.0/debian/control 2021-07-22 22:59:18.000000000 +0000 +++ cargo-0.54.0/debian/control 2021-10-29 11:10:07.000000000 +0000 @@ -19,7 +19,6 @@ python3:native, libcurl4-gnutls-dev | libcurl4-openssl-dev, libssh2-1-dev, - libgit2-dev (>= 1), libhttp-parser-dev, libssl-dev, zlib1g-dev, diff -Nru cargo-0.52.0/debian/patches/0001-Minor-update-to-registry-API-error-messages.patch cargo-0.54.0/debian/patches/0001-Minor-update-to-registry-API-error-messages.patch --- cargo-0.52.0/debian/patches/0001-Minor-update-to-registry-API-error-messages.patch 2021-07-12 05:00:18.000000000 +0000 +++ cargo-0.54.0/debian/patches/0001-Minor-update-to-registry-API-error-messages.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,411 +0,0 @@ -From c9bd6e12e1c34dc8f2d44d6ceb2f0b443796d4a8 Mon Sep 17 00:00:00 2001 -From: Eric Huss -Date: Sat, 27 Feb 2021 12:38:17 -0800 -Subject: [PATCH] Minor update to registry API error messages. - ---- - crates/crates-io/lib.rs | 14 ++-- - src/cargo/ops/registry.rs | 163 ++++++++++++++++++++----------------- - tests/testsuite/owner.rs | 7 +- - tests/testsuite/publish.rs | 78 +++++++++++++++--- - tests/testsuite/yank.rs | 2 +- - 5 files changed, 170 insertions(+), 94 deletions(-) - -diff --git a/crates/crates-io/lib.rs b/crates/crates-io/lib.rs -index 4ae5069de..1358fefe9 100644 ---- a/crates/crates-io/lib.rs -+++ b/crates/crates-io/lib.rs -@@ -155,13 +155,13 @@ impl fmt::Display for ResponseError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - ResponseError::Curl(e) => write!(f, "{}", e), -- ResponseError::Api { code, errors } => write!( -- f, -- "api errors (status {} {}): {}", -- code, -- reason(*code), -- errors.join(", ") -- ), -+ ResponseError::Api { code, errors } => { -+ f.write_str("the remote server responded with an error")?; -+ if *code != 200 { -+ write!(f, " (status {} {})", code, reason(*code))?; -+ }; -+ write!(f, ": {}", errors.join(", ")) -+ } - ResponseError::Code { - code, - headers, -diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs -index 7032ae130..e95918efb 100644 ---- a/src/cargo/ops/registry.rs -+++ b/src/cargo/ops/registry.rs -@@ -286,65 +286,62 @@ fn transmit( - None => BTreeMap::new(), - }; - -- let publish = registry.publish( -- &NewCrate { -- name: pkg.name().to_string(), -- vers: pkg.version().to_string(), -- deps, -- features: string_features, -- authors: authors.clone(), -- description: description.clone(), -- homepage: homepage.clone(), -- documentation: documentation.clone(), -- keywords: keywords.clone(), -- categories: categories.clone(), -- readme: readme_content, -- readme_file: readme.clone(), -- repository: repository.clone(), -- license: license.clone(), -- license_file: license_file.clone(), -- badges: badges.clone(), -- links: links.clone(), -- v: None, -- }, -- tarball, -- ); -- -- match publish { -- Ok(warnings) => { -- if !warnings.invalid_categories.is_empty() { -- let msg = format!( -- "the following are not valid category slugs and were \ -- ignored: {}. Please see https://crates.io/category_slugs \ -- for the list of all category slugs. \ -- ", -- warnings.invalid_categories.join(", ") -- ); -- config.shell().warn(&msg)?; -- } -- -- if !warnings.invalid_badges.is_empty() { -- let msg = format!( -- "the following are not valid badges and were ignored: {}. \ -- Either the badge type specified is unknown or a required \ -- attribute is missing. Please see \ -- https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata \ -- for valid badge types and their required attributes.", -- warnings.invalid_badges.join(", ") -- ); -- config.shell().warn(&msg)?; -- } -+ let warnings = registry -+ .publish( -+ &NewCrate { -+ name: pkg.name().to_string(), -+ vers: pkg.version().to_string(), -+ deps, -+ features: string_features, -+ authors: authors.clone(), -+ description: description.clone(), -+ homepage: homepage.clone(), -+ documentation: documentation.clone(), -+ keywords: keywords.clone(), -+ categories: categories.clone(), -+ readme: readme_content, -+ readme_file: readme.clone(), -+ repository: repository.clone(), -+ license: license.clone(), -+ license_file: license_file.clone(), -+ badges: badges.clone(), -+ links: links.clone(), -+ v: None, -+ }, -+ tarball, -+ ) -+ .chain_err(|| format!("failed to publish to registry at {}", registry.host()))?; -+ -+ if !warnings.invalid_categories.is_empty() { -+ let msg = format!( -+ "the following are not valid category slugs and were \ -+ ignored: {}. Please see https://crates.io/category_slugs \ -+ for the list of all category slugs. \ -+ ", -+ warnings.invalid_categories.join(", ") -+ ); -+ config.shell().warn(&msg)?; -+ } - -- if !warnings.other.is_empty() { -- for msg in warnings.other { -- config.shell().warn(&msg)?; -- } -- } -+ if !warnings.invalid_badges.is_empty() { -+ let msg = format!( -+ "the following are not valid badges and were ignored: {}. \ -+ Either the badge type specified is unknown or a required \ -+ attribute is missing. Please see \ -+ https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata \ -+ for valid badge types and their required attributes.", -+ warnings.invalid_badges.join(", ") -+ ); -+ config.shell().warn(&msg)?; -+ } - -- Ok(()) -+ if !warnings.other.is_empty() { -+ for msg in warnings.other { -+ config.shell().warn(&msg)?; - } -- Err(e) => Err(e), - } -+ -+ Ok(()) - } - - /// Returns the index and token from the config file for the given registry. -@@ -731,9 +728,9 @@ pub fn registry_login( - input - .lock() - .read_line(&mut line) -- .chain_err(|| "failed to read stdin") -- .map_err(anyhow::Error::from)?; -- // Automatically remove `cargo login` from an inputted token to allow direct pastes from `registry.host()`/me. -+ .chain_err(|| "failed to read stdin")?; -+ // Automatically remove `cargo login` from an inputted token to -+ // allow direct pastes from `registry.host()`/me. - line.replace("cargo login", "").trim().to_string() - } - }; -@@ -820,9 +817,13 @@ pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> { - - if let Some(ref v) = opts.to_add { - let v = v.iter().map(|s| &s[..]).collect::>(); -- let msg = registry -- .add_owners(&name, &v) -- .map_err(|e| format_err!("failed to invite owners to crate {}: {}", name, e))?; -+ let msg = registry.add_owners(&name, &v).chain_err(|| { -+ format!( -+ "failed to invite owners to crate `{}` on registry at {}", -+ name, -+ registry.host() -+ ) -+ })?; - - config.shell().status("Owner", msg)?; - } -@@ -832,15 +833,23 @@ pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> { - config - .shell() - .status("Owner", format!("removing {:?} from crate {}", v, name))?; -- registry -- .remove_owners(&name, &v) -- .chain_err(|| format!("failed to remove owners from crate {}", name))?; -+ registry.remove_owners(&name, &v).chain_err(|| { -+ format!( -+ "failed to remove owners from crate `{}` on registry at {}", -+ name, -+ registry.host() -+ ) -+ })?; - } - - if opts.list { -- let owners = registry -- .list_owners(&name) -- .chain_err(|| format!("failed to list owners of crate {}", name))?; -+ let owners = registry.list_owners(&name).chain_err(|| { -+ format!( -+ "failed to list owners of crate `{}` on registry at {}", -+ name, -+ registry.host() -+ ) -+ })?; - for owner in owners.iter() { - drop_print!(config, "{}", owner.login); - match (owner.name.as_ref(), owner.email.as_ref()) { -@@ -882,16 +891,19 @@ pub fn yank( - config - .shell() - .status("Unyank", format!("{}:{}", name, version))?; -- registry -- .unyank(&name, &version) -- .chain_err(|| "failed to undo a yank")?; -+ registry.unyank(&name, &version).chain_err(|| { -+ format!( -+ "failed to undo a yank from the registry at {}", -+ registry.host() -+ ) -+ })?; - } else { - config - .shell() - .status("Yank", format!("{}:{}", name, version))?; - registry - .yank(&name, &version) -- .chain_err(|| "failed to yank")?; -+ .chain_err(|| format!("failed to yank from the registry at {}", registry.host()))?; - } - - Ok(()) -@@ -937,9 +949,12 @@ pub fn search( - } - - let (mut registry, _, source_id) = registry(config, None, index, reg, false, false)?; -- let (crates, total_crates) = registry -- .search(query, limit) -- .chain_err(|| "failed to retrieve search results from the registry")?; -+ let (crates, total_crates) = registry.search(query, limit).chain_err(|| { -+ format!( -+ "failed to retrieve search results from the registry at {}", -+ registry.host() -+ ) -+ })?; - - let names = crates - .iter() -diff --git a/tests/testsuite/owner.rs b/tests/testsuite/owner.rs -index e4e3cd2cf..8c4bcbe17 100644 ---- a/tests/testsuite/owner.rs -+++ b/tests/testsuite/owner.rs -@@ -81,7 +81,10 @@ fn simple_add() { - .with_status(101) - .with_stderr( - " Updating `[..]` index --error: failed to invite owners to crate foo: EOF while parsing a value at line 1 column 0", -+error: failed to invite owners to crate `foo` on registry at file://[..] -+ -+Caused by: -+ EOF while parsing a value at line 1 column 0", - ) - .run(); - } -@@ -111,7 +114,7 @@ fn simple_remove() { - .with_stderr( - " Updating `[..]` index - Owner removing [\"username\"] from crate foo --error: failed to remove owners from crate foo -+error: failed to remove owners from crate `foo` on registry at file://[..] - - Caused by: - EOF while parsing a value at line 1 column 0", -diff --git a/tests/testsuite/publish.rs b/tests/testsuite/publish.rs -index b0a0c547b..95addc04c 100644 ---- a/tests/testsuite/publish.rs -+++ b/tests/testsuite/publish.rs -@@ -1490,7 +1490,56 @@ fn api_error_json() { - [UPDATING] [..] - [PACKAGING] foo v0.0.1 [..] - [UPLOADING] foo v0.0.1 [..] --[ERROR] api errors (status 403 Forbidden): you must be logged in -+[ERROR] failed to publish to registry at http://127.0.0.1:[..]/ -+ -+Caused by: -+ the remote server responded with an error (status 403 Forbidden): you must be logged in -+", -+ ) -+ .run(); -+ -+ t.join().unwrap(); -+} -+ -+#[cargo_test] -+fn api_error_200() { -+ // Registry returns an API error with a 200 status code. -+ let t = registry::RegistryBuilder::new().build_api_server(&|_headers| { -+ ( -+ 200, -+ &r#"{"errors": [{"detail": "max upload size is 123"}]}"#, -+ ) -+ }); -+ -+ let p = project() -+ .file( -+ "Cargo.toml", -+ r#" -+ [project] -+ name = "foo" -+ version = "0.0.1" -+ authors = [] -+ license = "MIT" -+ description = "foo" -+ documentation = "foo" -+ homepage = "foo" -+ repository = "foo" -+ "#, -+ ) -+ .file("src/lib.rs", "") -+ .build(); -+ -+ p.cargo("publish --no-verify --registry alternative") -+ .with_status(101) -+ .with_stderr( -+ "\ -+[UPDATING] [..] -+[PACKAGING] foo v0.0.1 [..] -+[UPLOADING] foo v0.0.1 [..] -+[ERROR] failed to publish to registry at http://127.0.0.1:[..]/ -+ -+Caused by: -+ the remote server responded with an error: max upload size is 123 - ", - ) - .run(); -@@ -1528,13 +1577,16 @@ fn api_error_code() { - [UPDATING] [..] - [PACKAGING] foo v0.0.1 [..] - [UPLOADING] foo v0.0.1 [..] --[ERROR] failed to get a 200 OK response, got 400 --headers: --HTTP/1.1 400 --Content-Length: 7 -- --body: --go away -+[ERROR] failed to publish to registry at http://127.0.0.1:[..]/ -+ -+Caused by: -+ failed to get a 200 OK response, got 400 -+ headers: -+ HTTP/1.1 400 -+ Content-Length: 7 -+ -+ body: -+ go away - ", - ) - .run(); -@@ -1577,7 +1629,10 @@ fn api_curl_error() { - [UPDATING] [..] - [PACKAGING] foo v0.0.1 [..] - [UPLOADING] foo v0.0.1 [..] --[ERROR] [52] [..] -+[ERROR] failed to publish to registry at http://127.0.0.1:[..]/ -+ -+Caused by: -+ [52] [..] - ", - ) - .run(); -@@ -1616,7 +1671,10 @@ fn api_other_error() { - [UPDATING] [..] - [PACKAGING] foo v0.0.1 [..] - [UPLOADING] foo v0.0.1 [..] --[ERROR] invalid response from server -+[ERROR] failed to publish to registry at http://127.0.0.1:[..]/ -+ -+Caused by: -+ invalid response from server - - Caused by: - response body was not valid utf-8 -diff --git a/tests/testsuite/yank.rs b/tests/testsuite/yank.rs -index f9d6266a1..e70f53940 100644 ---- a/tests/testsuite/yank.rs -+++ b/tests/testsuite/yank.rs -@@ -39,7 +39,7 @@ fn simple() { - .with_stderr( - " Updating `[..]` index - Unyank foo:0.0.1 --error: failed to undo a yank -+error: failed to undo a yank from the registry at file:///[..] - - Caused by: - EOF while parsing a value at line 1 column 0", --- -2.25.1 - diff -Nru cargo-0.52.0/debian/patches/0001-Update-tar-dependency-to-0.4.34.patch cargo-0.54.0/debian/patches/0001-Update-tar-dependency-to-0.4.34.patch --- cargo-0.52.0/debian/patches/0001-Update-tar-dependency-to-0.4.34.patch 2021-07-12 05:00:18.000000000 +0000 +++ cargo-0.54.0/debian/patches/0001-Update-tar-dependency-to-0.4.34.patch 2021-10-28 19:57:51.000000000 +0000 @@ -14,7 +14,7 @@ --- a/Cargo.toml +++ b/Cargo.toml -@@ -57,7 +57,7 @@ +@@ -56,7 +56,7 @@ serde_json = { version = "1.0.30", features = ["raw_value"] } shell-escape = "0.1.4" strip-ansi-escapes = "0.1.0" @@ -25,7 +25,7 @@ toml = "0.5.7" --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs -@@ -525,6 +525,8 @@ +@@ -524,6 +524,8 @@ header.set_entry_type(EntryType::file()); header.set_mode(0o644); header.set_size(contents.len() as u64); @@ -33,7 +33,7 @@ + header.set_mtime(1); header.set_cksum(); ar.append_data(&mut header, &ar_path, contents.as_bytes()) - .chain_err(|| format!("could not archive source file `{}`", rel_str))?; + .with_context(|| format!("could not archive source file `{}`", rel_str))?; --- a/tests/testsuite/package.rs +++ b/tests/testsuite/package.rs @@ -1947,9 +1947,10 @@ diff -Nru cargo-0.52.0/debian/patches/2112-handle-4-siphasher-algorithms.patch cargo-0.54.0/debian/patches/2112-handle-4-siphasher-algorithms.patch --- cargo-0.52.0/debian/patches/2112-handle-4-siphasher-algorithms.patch 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/debian/patches/2112-handle-4-siphasher-algorithms.patch 2021-10-28 19:58:51.000000000 +0000 @@ -0,0 +1,19 @@ +Bug: https://github.com/rust-lang/cargo/issues/10004 + +--- a/src/cargo/core/source/source_id.rs ++++ b/src/cargo/core/source/source_id.rs +@@ -584,7 +584,13 @@ + fn test_cratesio_hash() { + let config = Config::default().unwrap(); + let crates_io = SourceId::crates_io(&config).unwrap(); +- assert_eq!(crate::util::hex::short_hash(&crates_io), "1ecc6299db9ec823"); ++ assert!([ ++ "1ecc6299db9ec823", // 64 LE ++ "1285ae84e5963aae", // 32 LE ++ "eae4ba8cbf2ce1c7", // 64 BE ++ "b420f105fcaca6de", // 32 BE ++ ] ++ .contains(&crate::util::hex::short_hash(&crates_io).as_str())); + } + + /// A `Display`able view into a `SourceId` that will write it as a url diff -Nru cargo-0.52.0/debian/patches/disable-lto-test_profiele.patch cargo-0.54.0/debian/patches/disable-lto-test_profiele.patch --- cargo-0.52.0/debian/patches/disable-lto-test_profiele.patch 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/debian/patches/disable-lto-test_profiele.patch 2021-10-28 19:57:51.000000000 +0000 @@ -0,0 +1,10 @@ +--- a/tests/testsuite/lto.rs ++++ b/tests/testsuite/lto.rs +@@ -589,6 +589,7 @@ + } + + #[cargo_test] ++#[cfg(not(target_arch = "x86"))] + fn test_profile() { + Package::new("bar", "0.0.1") + .file("src/lib.rs", "pub fn foo() -> i32 { 123 } ") diff -Nru cargo-0.52.0/debian/patches/series cargo-0.54.0/debian/patches/series --- cargo-0.52.0/debian/patches/series 2021-07-22 22:59:18.000000000 +0000 +++ cargo-0.54.0/debian/patches/series 2021-10-29 11:12:06.000000000 +0000 @@ -3,4 +3,5 @@ skip-filters_target-i386.patch skip-filter_platform-non-amd64.patch 0001-Update-tar-dependency-to-0.4.34.patch -0001-Minor-update-to-registry-API-error-messages.patch +disable-lto-test_profiele.patch +2112-handle-4-siphasher-algorithms.patch diff -Nru cargo-0.52.0/debian/scripts/debian-cargo-vendor cargo-0.54.0/debian/scripts/debian-cargo-vendor --- cargo-0.52.0/debian/scripts/debian-cargo-vendor 2020-05-27 21:12:31.000000000 +0000 +++ cargo-0.54.0/debian/scripts/debian-cargo-vendor 2021-10-28 19:57:51.000000000 +0000 @@ -111,7 +111,13 @@ # remove excluded files ( cd vendor -for i in *; do ( + for i in *; do + case $i in + libgit2-sys) + continue + ;; + esac + ( debname=$(crate_to_debcargo_conf "$i") shopt -s globstar # needed for double-glob to work in excludes cd $i diff -Nru cargo-0.52.0/debian/watch cargo-0.54.0/debian/watch --- cargo-0.52.0/debian/watch 2021-07-12 05:00:18.000000000 +0000 +++ cargo-0.54.0/debian/watch 2021-10-28 19:57:51.000000000 +0000 @@ -1,2 +1,4 @@ -version=3 -https://github.com/rust-lang/cargo/releases /rust-lang/cargo/archive.*/(\d+\.\d+\.\d+)\.tar\.gz +version=4 +opts="filenamemangle=s%(?:.*?)?v?(\d[\d.]*)\.tar\.gz%cargo-$1.tar.gz%" \ + https://github.com/rust-lang/cargo/tags \ + (?:.*?/)?v?(\d[\d.]*)\.tar\.gz diff -Nru cargo-0.52.0/.github/workflows/main.yml cargo-0.54.0/.github/workflows/main.yml --- cargo-0.52.0/.github/workflows/main.yml 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/.github/workflows/main.yml 2021-04-27 14:35:53.000000000 +0000 @@ -66,6 +66,7 @@ - run: cargo test --features 'deny-warnings' - run: cargo test --features 'deny-warnings' -p cargo-test-support - run: cargo test -p cargo-platform + - run: cargo test -p cargo-util - run: cargo test --manifest-path crates/mdman/Cargo.toml - run: cargo build --manifest-path crates/credential/cargo-credential-1password/Cargo.toml - run: cargo build --manifest-path crates/credential/cargo-credential-gnome-secret/Cargo.toml diff -Nru cargo-0.52.0/publish.py cargo-0.54.0/publish.py --- cargo-0.52.0/publish.py 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/publish.py 2021-04-27 14:35:53.000000000 +0000 @@ -12,6 +12,7 @@ TO_PUBLISH = [ 'crates/cargo-platform', + 'crates/cargo-util', 'crates/crates-io', '.', ] diff -Nru cargo-0.52.0/src/bin/cargo/cli.rs cargo-0.54.0/src/bin/cargo/cli.rs --- cargo-0.52.0/src/bin/cargo/cli.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/cli.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,10 +1,11 @@ -use cargo::core::features; +use cargo::core::{features, CliUnstable}; use cargo::{self, drop_print, drop_println, CliResult, Config}; use clap::{AppSettings, Arg, ArgMatches}; use super::commands; use super::list_commands; use crate::command_prelude::*; +use cargo::core::features::HIDDEN; pub fn main(config: &mut Config) -> CliResult { // CAUTION: Be careful with using `config` until it is configured below. @@ -30,25 +31,40 @@ }; if args.value_of("unstable-features") == Some("help") { + let options = CliUnstable::help(); + let non_hidden_options: Vec<(String, String)> = options + .iter() + .filter(|(_, help_message)| *help_message != HIDDEN) + .map(|(name, help)| (name.to_string(), help.to_string())) + .collect(); + let longest_option = non_hidden_options + .iter() + .map(|(option_name, _)| option_name.len()) + .max() + .unwrap_or(0); + let help_lines: Vec = non_hidden_options + .iter() + .map(|(option_name, option_help_message)| { + let option_name_kebab_case = option_name.replace("_", "-"); + let padding = " ".repeat(longest_option - option_name.len()); // safe to substract + format!( + " -Z {}{} -- {}", + option_name_kebab_case, padding, option_help_message + ) + }) + .collect(); + let joined = help_lines.join("\n"); drop_println!( config, " Available unstable (nightly-only) flags: - -Z avoid-dev-deps -- Avoid installing dev-dependencies if possible - -Z extra-link-arg -- Allow `cargo:rustc-link-arg` in build scripts - -Z minimal-versions -- Install minimal dependency versions instead of maximum - -Z no-index-update -- Do not update the registry, avoids a network request for benchmarking - -Z unstable-options -- Allow the usage of unstable options - -Z timings -- Display concurrency information - -Z doctest-xcompile -- Compile and run doctests for non-host target using runner config - -Z terminal-width -- Provide a terminal width to rustc for error truncation - -Z namespaced-features -- Allow features with `dep:` prefix - -Z weak-dep-features -- Allow `dep_name?/feature` feature syntax +{} -Run with 'cargo -Z [FLAG] [SUBCOMMAND]'" +Run with 'cargo -Z [FLAG] [SUBCOMMAND]'", + joined ); - if !features::nightly_features_allowed() { + if !config.nightly_features_allowed { drop_println!( config, "\nUnstable flags are only available on the nightly channel \ diff -Nru cargo-0.52.0/src/bin/cargo/commands/build.rs cargo-0.54.0/src/bin/cargo/commands/build.rs --- cargo-0.52.0/src/bin/cargo/commands/build.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/build.rs 2021-04-27 14:35:53.000000000 +0000 @@ -43,6 +43,7 @@ .arg_message_format() .arg_build_plan() .arg_unit_graph() + .arg_future_incompat_report() .after_help("Run `cargo help build` for more detailed information.\n") } diff -Nru cargo-0.52.0/src/bin/cargo/commands/check.rs cargo-0.54.0/src/bin/cargo/commands/check.rs --- cargo-0.52.0/src/bin/cargo/commands/check.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/check.rs 2021-04-27 14:35:53.000000000 +0000 @@ -35,6 +35,7 @@ .arg_ignore_rust_version() .arg_message_format() .arg_unit_graph() + .arg_future_incompat_report() .after_help("Run `cargo help check` for more detailed information.\n") } diff -Nru cargo-0.52.0/src/bin/cargo/commands/config.rs cargo-0.54.0/src/bin/cargo/commands/config.rs --- cargo-0.52.0/src/bin/cargo/commands/config.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/config.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,48 @@ +use crate::command_prelude::*; +use cargo::ops::cargo_config; + +pub fn cli() -> App { + subcommand("config") + .about("Inspect configuration values") + .after_help("Run `cargo help config` for more detailed information.\n") + .setting(clap::AppSettings::SubcommandRequiredElseHelp) + .subcommand( + subcommand("get") + .arg(Arg::with_name("key").help("The config key to display")) + .arg( + opt("format", "Display format") + .possible_values(cargo_config::ConfigFormat::POSSIBLE_VALUES) + .default_value("toml"), + ) + .arg(opt( + "show-origin", + "Display where the config value is defined", + )) + .arg( + opt("merged", "Whether or not to merge config values") + .possible_values(&["yes", "no"]) + .default_value("yes"), + ), + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + config + .cli_unstable() + .fail_if_stable_command(config, "config", 9301)?; + match args.subcommand() { + ("get", Some(args)) => { + let opts = cargo_config::GetOptions { + key: args.value_of("key"), + format: args.value_of("format").unwrap().parse()?, + show_origin: args.is_present("show-origin"), + merged: args.value_of("merged") == Some("yes"), + }; + cargo_config::get(config, &opts)?; + } + (cmd, _) => { + panic!("unexpected command `{}`", cmd) + } + } + Ok(()) +} diff -Nru cargo-0.52.0/src/bin/cargo/commands/describe_future_incompatibilities.rs cargo-0.54.0/src/bin/cargo/commands/describe_future_incompatibilities.rs --- cargo-0.52.0/src/bin/cargo/commands/describe_future_incompatibilities.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/describe_future_incompatibilities.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,56 @@ +use crate::command_prelude::*; +use anyhow::{anyhow, Context as _}; +use cargo::core::compiler::future_incompat::{OnDiskReport, FUTURE_INCOMPAT_FILE}; +use cargo::drop_eprint; +use std::io::Read; + +pub fn cli() -> App { + subcommand("describe-future-incompatibilities") + .arg( + opt( + "id", + "identifier of the report [generated by a Cargo command invocation", + ) + .value_name("id") + .required(true), + ) + .about("Reports any crates which will eventually stop compiling") +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + if !config.nightly_features_allowed { + return Err(anyhow!( + "`cargo describe-future-incompatibilities` can only be used on the nightly channel" + ) + .into()); + } + + let ws = args.workspace(config)?; + let report_file = ws.target_dir().open_ro( + FUTURE_INCOMPAT_FILE, + ws.config(), + "Future incompatible report", + )?; + + let mut file_contents = String::new(); + report_file + .file() + .read_to_string(&mut file_contents) + .with_context(|| "failed to read report")?; + let on_disk_report: OnDiskReport = + serde_json::from_str(&file_contents).with_context(|| "failed to load report")?; + + let id = args.value_of("id").unwrap(); + if id != on_disk_report.id { + return Err(anyhow!( + "Expected an id of `{}`, but `{}` was provided on the command line. \ + Your report may have been overwritten by a different one.", + on_disk_report.id, + id + ) + .into()); + } + + drop_eprint!(config, "{}", on_disk_report.report); + Ok(()) +} diff -Nru cargo-0.52.0/src/bin/cargo/commands/fix.rs cargo-0.54.0/src/bin/cargo/commands/fix.rs --- cargo-0.52.0/src/bin/cargo/commands/fix.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/fix.rs 2021-04-27 14:35:53.000000000 +0000 @@ -42,17 +42,6 @@ .help("Fix in preparation for the next edition"), ) .arg( - // This is a deprecated argument, we'll want to phase it out - // eventually. - Arg::with_name("prepare-for") - .long("prepare-for") - .help("Fix warnings in preparation of an edition upgrade") - .takes_value(true) - .possible_values(&["2018"]) - .conflicts_with("edition") - .hidden(true), - ) - .arg( Arg::with_name("idioms") .long("edition-idioms") .help("Fix warnings to migrate to the idioms of an edition"), @@ -111,7 +100,6 @@ &ws, &mut ops::FixOptions { edition: args.is_present("edition"), - prepare_for: args.value_of("prepare-for"), idioms: args.is_present("idioms"), compile_opts: opts, allow_dirty: args.is_present("allow-dirty"), diff -Nru cargo-0.52.0/src/bin/cargo/commands/help.rs cargo-0.54.0/src/bin/cargo/commands/help.rs --- cargo-0.52.0/src/bin/cargo/commands/help.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/help.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,7 +1,7 @@ use crate::aliased_command; use cargo::util::errors::CargoResult; -use cargo::util::paths::resolve_executable; use cargo::Config; +use cargo_util::paths::resolve_executable; use flate2::read::GzDecoder; use std::ffi::OsString; use std::io::Read; @@ -56,16 +56,16 @@ Some(man) => man, None => return Ok(false), }; - write_and_spawn(&man, "man")?; + write_and_spawn(&subcommand, &man, "man")?; } else { let txt = match extract_man(&subcommand, "txt") { Some(txt) => txt, None => return Ok(false), }; if resolve_executable(Path::new("less")).is_ok() { - write_and_spawn(&txt, "less")?; + write_and_spawn(&subcommand, &txt, "less")?; } else if resolve_executable(Path::new("more")).is_ok() { - write_and_spawn(&txt, "more")?; + write_and_spawn(&subcommand, &txt, "more")?; } else { drop(std::io::stdout().write_all(&txt)); } @@ -117,13 +117,20 @@ /// Write the contents of a man page to disk and spawn the given command to /// display it. -fn write_and_spawn(contents: &[u8], command: &str) -> CargoResult<()> { - let mut tmp = tempfile::Builder::new().prefix("cargo-man").tempfile()?; +fn write_and_spawn(name: &str, contents: &[u8], command: &str) -> CargoResult<()> { + let prefix = format!("cargo-{}.", name); + let mut tmp = tempfile::Builder::new().prefix(&prefix).tempfile()?; let f = tmp.as_file_mut(); f.write_all(contents)?; f.flush()?; + let path = tmp.path(); + // Use a path relative to the temp directory so that it can work on + // cygwin/msys systems which don't handle windows-style paths. + let mut relative_name = std::ffi::OsString::from("./"); + relative_name.push(path.file_name().unwrap()); let mut cmd = std::process::Command::new(command) - .arg(tmp.path()) + .arg(relative_name) + .current_dir(path.parent().unwrap()) .spawn()?; drop(cmd.wait()); Ok(()) diff -Nru cargo-0.52.0/src/bin/cargo/commands/install.rs cargo-0.54.0/src/bin/cargo/commands/install.rs --- cargo-0.52.0/src/bin/cargo/commands/install.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/install.rs 2021-04-27 14:35:53.000000000 +0000 @@ -78,6 +78,7 @@ if let Some(path) = args.value_of_path("path", config) { config.reload_rooted_at(path)?; } else { + // TODO: Consider calling set_search_stop_path(home). config.reload_rooted_at(config.home().clone().into_path_unlocked())?; } @@ -116,7 +117,7 @@ let version = args.value_of("version"); let root = args.value_of("root"); - // We only provide worksapce information for local crate installation from + // We only provide workspace information for local crate installation from // one of the following sources: // - From current working directory (only work for edition 2015). // - From a specific local file path. diff -Nru cargo-0.52.0/src/bin/cargo/commands/locate_project.rs cargo-0.54.0/src/bin/cargo/commands/locate_project.rs --- cargo-0.52.0/src/bin/cargo/commands/locate_project.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/locate_project.rs 2021-04-27 14:35:53.000000000 +0000 @@ -51,7 +51,7 @@ let location = ProjectLocation { root }; match MessageFormat::parse(args)? { - MessageFormat::Json => config.shell().print_json(&location), + MessageFormat::Json => config.shell().print_json(&location)?, MessageFormat::Plain => drop_println!(config, "{}", location.root), } diff -Nru cargo-0.52.0/src/bin/cargo/commands/logout.rs cargo-0.54.0/src/bin/cargo/commands/logout.rs --- cargo-0.52.0/src/bin/cargo/commands/logout.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/logout.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,6 +1,4 @@ use crate::command_prelude::*; -use anyhow::format_err; -use cargo::core::features; use cargo::ops; pub fn cli() -> App { @@ -12,29 +10,10 @@ } pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let unstable = config.cli_unstable(); - if !(unstable.credential_process || unstable.unstable_options) { - const SEE: &str = "See https://github.com/rust-lang/cargo/issues/8933 for more \ - information about the `cargo logout` command."; - if features::nightly_features_allowed() { - return Err(format_err!( - "the `cargo logout` command is unstable, pass `-Z unstable-options` to enable it\n\ - {}", - SEE - ) - .into()); - } else { - return Err(format_err!( - "the `cargo logout` command is unstable, and only available on the \ - nightly channel of Cargo, but this is the `{}` channel\n\ - {}\n\ - {}", - features::channel(), - features::SEE_CHANNELS, - SEE - ) - .into()); - } + if !config.cli_unstable().credential_process { + config + .cli_unstable() + .fail_if_stable_command(config, "logout", 8933)?; } config.load_credentials()?; ops::registry_logout(config, args.value_of("registry").map(String::from))?; diff -Nru cargo-0.52.0/src/bin/cargo/commands/metadata.rs cargo-0.54.0/src/bin/cargo/commands/metadata.rs --- cargo-0.52.0/src/bin/cargo/commands/metadata.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/metadata.rs 2021-04-27 14:35:53.000000000 +0000 @@ -44,15 +44,13 @@ }; let options = OutputMetadataOptions { - features: values(args, "features"), - all_features: args.is_present("all-features"), - no_default_features: args.is_present("no-default-features"), + cli_features: args.cli_features()?, no_deps: args.is_present("no-deps"), filter_platforms: args._values_of("filter-platform"), version, }; let result = ops::output_metadata(&ws, &options)?; - config.shell().print_json(&result); + config.shell().print_json(&result)?; Ok(()) } diff -Nru cargo-0.52.0/src/bin/cargo/commands/mod.rs cargo-0.54.0/src/bin/cargo/commands/mod.rs --- cargo-0.52.0/src/bin/cargo/commands/mod.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -6,6 +6,8 @@ build::cli(), check::cli(), clean::cli(), + config::cli(), + describe_future_incompatibilities::cli(), doc::cli(), fetch::cli(), fix::cli(), @@ -44,6 +46,8 @@ "build" => build::exec, "check" => check::exec, "clean" => clean::exec, + "config" => config::exec, + "describe-future-incompatibilities" => describe_future_incompatibilities::exec, "doc" => doc::exec, "fetch" => fetch::exec, "fix" => fix::exec, @@ -82,6 +86,8 @@ pub mod build; pub mod check; pub mod clean; +pub mod config; +pub mod describe_future_incompatibilities; pub mod doc; pub mod fetch; pub mod fix; diff -Nru cargo-0.52.0/src/bin/cargo/commands/package.rs cargo-0.54.0/src/bin/cargo/commands/package.rs --- cargo-0.52.0/src/bin/cargo/commands/package.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/package.rs 2021-04-27 14:35:53.000000000 +0000 @@ -45,9 +45,7 @@ allow_dirty: args.is_present("allow-dirty"), targets: args.targets(), jobs: args.jobs()?, - features: args._values_of("features"), - all_features: args.is_present("all-features"), - no_default_features: args.is_present("no-default-features"), + cli_features: args.cli_features()?, }, )?; Ok(()) diff -Nru cargo-0.52.0/src/bin/cargo/commands/publish.rs cargo-0.54.0/src/bin/cargo/commands/publish.rs --- cargo-0.52.0/src/bin/cargo/commands/publish.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/publish.rs 2021-04-27 14:35:53.000000000 +0000 @@ -45,9 +45,7 @@ jobs: args.jobs()?, dry_run: args.is_present("dry-run"), registry, - features: args._values_of("features"), - all_features: args.is_present("all-features"), - no_default_features: args.is_present("no-default-features"), + cli_features: args.cli_features()?, }, )?; Ok(()) diff -Nru cargo-0.52.0/src/bin/cargo/commands/read_manifest.rs cargo-0.54.0/src/bin/cargo/commands/read_manifest.rs --- cargo-0.52.0/src/bin/cargo/commands/read_manifest.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/read_manifest.rs 2021-04-27 14:35:53.000000000 +0000 @@ -15,6 +15,8 @@ pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { let ws = args.workspace(config)?; - config.shell().print_json(&ws.current()?.serialized(config)); + config + .shell() + .print_json(&ws.current()?.serialized(config))?; Ok(()) } diff -Nru cargo-0.52.0/src/bin/cargo/commands/run.rs cargo-0.54.0/src/bin/cargo/commands/run.rs --- cargo-0.52.0/src/bin/cargo/commands/run.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/run.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,8 +1,8 @@ use crate::command_prelude::*; use crate::util::restricted_names::is_glob_pattern; -use crate::util::ProcessError; use cargo::core::Verbosity; use cargo::ops::{self, CompileFilter, Packages}; +use cargo_util::ProcessError; pub fn cli() -> App { subcommand("run") diff -Nru cargo-0.52.0/src/bin/cargo/commands/rustc.rs cargo-0.54.0/src/bin/cargo/commands/rustc.rs --- cargo-0.52.0/src/bin/cargo/commands/rustc.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/rustc.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2,6 +2,8 @@ use cargo::ops; +const PRINT_ARG_NAME: &str = "print"; + pub fn cli() -> App { subcommand("rustc") .setting(AppSettings::TrailingVarArg) @@ -26,11 +28,19 @@ .arg_profile("Build artifacts with the specified profile") .arg_features() .arg_target_triple("Target triple which compiles will be for") + .arg( + opt( + PRINT_ARG_NAME, + "Output compiler information without compiling", + ) + .value_name("INFO"), + ) .arg_target_dir() .arg_manifest_path() .arg_message_format() .arg_unit_graph() .arg_ignore_rust_version() + .arg_future_incompat_report() .after_help("Run `cargo help rustc` for more detailed information.\n") } @@ -62,6 +72,13 @@ } else { Some(target_args) }; - ops::compile(&ws, &compile_opts)?; + if let Some(opt_value) = args.value_of(PRINT_ARG_NAME) { + config + .cli_unstable() + .fail_if_stable_opt(PRINT_ARG_NAME, 8923)?; + ops::print(&ws, &compile_opts, opt_value)?; + } else { + ops::compile(&ws, &compile_opts)?; + } Ok(()) } diff -Nru cargo-0.52.0/src/bin/cargo/commands/test.rs cargo-0.54.0/src/bin/cargo/commands/test.rs --- cargo-0.52.0/src/bin/cargo/commands/test.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/test.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,7 +1,6 @@ use crate::command_prelude::*; use anyhow::Error; use cargo::ops::{self, CompileFilter, FilterRule, LibRule}; -use cargo::util::errors; pub fn cli() -> App { subcommand("test") @@ -56,6 +55,7 @@ .arg_ignore_rust_version() .arg_message_format() .arg_unit_graph() + .arg_future_incompat_report() .after_help("Run `cargo help test` for more detailed information.\n") } @@ -127,7 +127,7 @@ let context = anyhow::format_err!("{}", err.hint(&ws, &ops.compile_opts)); let e = match err.code { // Don't show "process didn't exit successfully" for simple errors. - Some(i) if errors::is_simple_exit_code(i) => CliError::new(context, i), + Some(i) if cargo_util::is_simple_exit_code(i) => CliError::new(context, i), Some(i) => CliError::new(Error::from(err).context(context), i), None => CliError::new(Error::from(err).context(context), 101), }; diff -Nru cargo-0.52.0/src/bin/cargo/commands/tree.rs cargo-0.54.0/src/bin/cargo/commands/tree.rs --- cargo-0.52.0/src/bin/cargo/commands/tree.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/tree.rs 2021-04-27 14:35:53.000000000 +0000 @@ -190,9 +190,7 @@ let charset = tree::Charset::from_str(args.value_of("charset").unwrap()) .map_err(|e| anyhow::anyhow!("{}", e))?; let opts = tree::TreeOptions { - features: values(args, "features"), - all_features: args.is_present("all-features"), - no_default_features: args.is_present("no-default-features"), + cli_features: args.cli_features()?, packages, target, edge_kinds, diff -Nru cargo-0.52.0/src/bin/cargo/commands/verify_project.rs cargo-0.54.0/src/bin/cargo/commands/verify_project.rs --- cargo-0.52.0/src/bin/cargo/commands/verify_project.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/commands/verify_project.rs 2021-04-27 14:35:53.000000000 +0000 @@ -15,12 +15,12 @@ if let Err(e) = args.workspace(config) { let mut h = HashMap::new(); h.insert("invalid".to_string(), e.to_string()); - config.shell().print_json(&h); + config.shell().print_json(&h)?; process::exit(1) } let mut h = HashMap::new(); h.insert("success".to_string(), "true".to_string()); - config.shell().print_json(&h); + config.shell().print_json(&h)?; Ok(()) } diff -Nru cargo-0.52.0/src/bin/cargo/main.rs cargo-0.54.0/src/bin/cargo/main.rs --- cargo-0.52.0/src/bin/cargo/main.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/bin/cargo/main.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,17 +1,17 @@ #![warn(rust_2018_idioms)] // while we're getting used to 2018 -#![allow(clippy::redundant_closure)] // there's a false positive +#![allow(clippy::all)] #![warn(clippy::needless_borrow)] #![warn(clippy::redundant_clone)] +use cargo::core::shell::Shell; +use cargo::util::CliError; +use cargo::util::{self, closest_msg, command_prelude, CargoResult, CliResult, Config}; +use cargo_util::{ProcessBuilder, ProcessError}; use std::collections::{BTreeMap, BTreeSet}; use std::env; use std::fs; use std::path::{Path, PathBuf}; -use cargo::core::shell::Shell; -use cargo::util::{self, closest_msg, command_prelude, CargoResult, CliResult, Config}; -use cargo::util::{CliError, ProcessError}; - mod cli; mod commands; @@ -22,7 +22,6 @@ pretty_env_logger::init_custom_env("CARGO_LOG"); #[cfg(not(feature = "pretty-env-logger"))] env_logger::init_from_env("CARGO_LOG"); - cargo::core::maybe_allow_nightly_features(); let mut config = match Config::default() { Ok(cfg) => cfg, @@ -32,7 +31,7 @@ } }; - let result = match cargo::ops::fix_maybe_exec_rustc() { + let result = match cargo::ops::fix_maybe_exec_rustc(&config) { Ok(true) => Ok(()), Ok(false) => { let _token = cargo::util::job::setup(); @@ -76,9 +75,8 @@ Err(_) => config.get::>>(&alias_name)?, }; - let result = user_alias.or_else(|| match builtin_aliases_execs(command) { - Some(command_str) => Some(vec![command_str.1.to_string()]), - None => None, + let result = user_alias.or_else(|| { + builtin_aliases_execs(command).map(|command_str| vec![command_str.1.to_string()]) }); Ok(result) } @@ -161,7 +159,7 @@ }; let cargo_exe = config.cargo_exe()?; - let err = match util::process(&command) + let err = match ProcessBuilder::new(&command) .env(cargo::CARGO_ENV, cargo_exe) .args(args) .exec_replace() diff -Nru cargo-0.52.0/src/cargo/core/compiler/build_config.rs cargo-0.54.0/src/cargo/core/compiler/build_config.rs --- cargo-0.52.0/src/cargo/core/compiler/build_config.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/build_config.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,8 +1,8 @@ use crate::core::compiler::CompileKind; use crate::util::interning::InternedString; -use crate::util::ProcessBuilder; use crate::util::{CargoResult, Config, RustfixDiagnosticServer}; use anyhow::bail; +use cargo_util::ProcessBuilder; use serde::ser; use std::cell::RefCell; use std::path::PathBuf; @@ -37,6 +37,8 @@ // Note that, although the cmd-line flag name is `out-dir`, in code we use // `export_dir`, to avoid confusion with out dir at `target/debug/deps`. pub export_dir: Option, + /// `true` to output a future incompatibility report at the end of the build + pub future_incompat_report: bool, } impl BuildConfig { @@ -80,6 +82,7 @@ primary_unit_rustc: None, rustfix_diagnostic_server: RefCell::new(None), export_dir: None, + future_incompat_report: false, }) } diff -Nru cargo-0.52.0/src/cargo/core/compiler/build_context/mod.rs cargo-0.54.0/src/cargo/core/compiler/build_context/mod.rs --- cargo-0.52.0/src/cargo/core/compiler/build_context/mod.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/build_context/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -11,7 +11,9 @@ use std::path::PathBuf; mod target_info; -pub use self::target_info::{FileFlavor, FileType, RustcTargetData, TargetInfo}; +pub use self::target_info::{ + FileFlavor, FileType, RustDocFingerprint, RustcTargetData, TargetInfo, +}; /// The build context, containing all information about a build task. /// @@ -37,7 +39,7 @@ pub packages: PackageSet<'cfg>, /// Information about rustc and the target platform. - pub target_data: RustcTargetData, + pub target_data: RustcTargetData<'cfg>, /// The root units of `unit_graph` (units requested on the command-line). pub roots: Vec, @@ -56,7 +58,7 @@ build_config: &'a BuildConfig, profiles: Profiles, extra_compiler_args: HashMap>, - target_data: RustcTargetData, + target_data: RustcTargetData<'cfg>, roots: Vec, unit_graph: UnitGraph, ) -> CargoResult> { diff -Nru cargo-0.52.0/src/cargo/core/compiler/build_context/target_info.rs cargo-0.54.0/src/cargo/core/compiler/build_context/target_info.rs --- cargo-0.52.0/src/cargo/core/compiler/build_context/target_info.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/build_context/target_info.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,12 +1,17 @@ -use crate::core::compiler::{BuildOutput, CompileKind, CompileMode, CompileTarget, CrateType}; +use crate::core::compiler::{ + BuildOutput, CompileKind, CompileMode, CompileTarget, Context, CrateType, +}; use crate::core::{Dependency, Target, TargetKind, Workspace}; use crate::util::config::{Config, StringList, TargetConfig}; -use crate::util::{CargoResult, CargoResultExt, ProcessBuilder, Rustc}; +use crate::util::{CargoResult, Rustc}; +use anyhow::Context as _; use cargo_platform::{Cfg, CfgExpr}; +use cargo_util::{paths, ProcessBuilder}; +use serde::{Deserialize, Serialize}; use std::cell::RefCell; use std::collections::hash_map::{Entry, HashMap}; use std::env; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use std::str::{self, FromStr}; /// Information about the platform target gleaned from querying rustc. @@ -134,7 +139,8 @@ kind, "RUSTFLAGS", )?; - let mut process = rustc.process(); + let extra_fingerprint = kind.fingerprint_hash(); + let mut process = rustc.workspace_process(); process .arg("-") .arg("--crate-name") @@ -160,15 +166,18 @@ process.arg("--crate-type").arg(crate_type.as_str()); } let supports_split_debuginfo = rustc - .cached_output(process.clone().arg("-Csplit-debuginfo=packed")) + .cached_output( + process.clone().arg("-Csplit-debuginfo=packed"), + extra_fingerprint, + ) .is_ok(); process.arg("--print=sysroot"); process.arg("--print=cfg"); let (output, error) = rustc - .cached_output(&process) - .chain_err(|| "failed to run `rustc` to learn about target-specific information")?; + .cached_output(&process, extra_fingerprint) + .with_context(|| "failed to run `rustc` to learn about target-specific information")?; let mut lines = output.lines(); let mut map = HashMap::new(); @@ -204,7 +213,7 @@ .map(|line| Ok(Cfg::from_str(line)?)) .filter(TargetInfo::not_user_specific_cfg) .collect::>>() - .chain_err(|| { + .with_context(|| { format!( "failed to parse the cfg from `rustc --print=cfg`, got:\n{}", output @@ -405,7 +414,7 @@ process.arg("--crate-type").arg(crate_type.as_str()); - let output = process.exec_with_output().chain_err(|| { + let output = process.exec_with_output().with_context(|| { format!( "failed to run `rustc` to learn about crate-type {} information", crate_type @@ -646,9 +655,14 @@ } /// Collection of information about `rustc` and the host and target. -pub struct RustcTargetData { +pub struct RustcTargetData<'cfg> { /// Information about `rustc` itself. pub rustc: Rustc, + + /// Config + config: &'cfg Config, + requested_kinds: Vec, + /// Build information for the "host", which is information about when /// `rustc` is invoked without a `--target` flag. This is used for /// procedural macros, build scripts, etc. @@ -661,27 +675,17 @@ target_info: HashMap, } -impl RustcTargetData { +impl<'cfg> RustcTargetData<'cfg> { pub fn new( - ws: &Workspace<'_>, + ws: &Workspace<'cfg>, requested_kinds: &[CompileKind], - ) -> CargoResult { + ) -> CargoResult> { let config = ws.config(); let rustc = config.load_global_rustc(Some(ws))?; let host_config = config.target_cfg_triple(&rustc.host)?; let host_info = TargetInfo::new(config, requested_kinds, &rustc, CompileKind::Host)?; let mut target_config = HashMap::new(); let mut target_info = HashMap::new(); - for kind in requested_kinds { - if let CompileKind::Target(target) = *kind { - let tcfg = config.target_cfg_triple(target.short_name())?; - target_config.insert(target, tcfg); - target_info.insert( - target, - TargetInfo::new(config, requested_kinds, &rustc, *kind)?, - ); - } - } // This is a hack. The unit_dependency graph builder "pretends" that // `CompileKind::Host` is `CompileKind::Target(host)` if the @@ -694,13 +698,49 @@ target_config.insert(ct, host_config.clone()); } - Ok(RustcTargetData { + let mut res = RustcTargetData { rustc, - target_config, - target_info, + config, + requested_kinds: requested_kinds.into(), host_config, host_info, - }) + target_config, + target_info, + }; + + // Get all kinds we currently know about. + // + // For now, targets can only ever come from the root workspace + // units as artifact dependencies are not a thing yet, so this + // correctly represents all the kinds that can happen. When we + // have artifact dependencies or other ways for targets to + // appear at places that are not the root units, we may have + // to revisit this. + let all_kinds = requested_kinds + .iter() + .copied() + .chain(ws.members().flat_map(|p| { + p.manifest() + .default_kind() + .into_iter() + .chain(p.manifest().forced_kind()) + })); + for kind in all_kinds { + if let CompileKind::Target(target) = kind { + if !res.target_config.contains_key(&target) { + res.target_config + .insert(target, res.config.target_cfg_triple(target.short_name())?); + } + if !res.target_info.contains_key(&target) { + res.target_info.insert( + target, + TargetInfo::new(res.config, &res.requested_kinds, &res.rustc, kind)?, + ); + } + } + } + + Ok(res) } /// Returns a "short" name for the given kind, suitable for keying off @@ -754,3 +794,98 @@ self.target_config(kind).links_overrides.get(lib_name) } } + +/// Structure used to deal with Rustdoc fingerprinting +#[derive(Debug, Serialize, Deserialize)] +pub struct RustDocFingerprint { + pub rustc_vv: String, +} + +impl RustDocFingerprint { + /// This function checks whether the latest version of `Rustc` used to compile this + /// `Workspace`'s docs was the same as the one is currently being used in this `cargo doc` + /// call. + /// + /// In case it's not, it takes care of removing the `doc/` folder as well as overwriting + /// the rustdoc fingerprint info in order to guarantee that we won't end up with mixed + /// versions of the `js/html/css` files that `rustdoc` autogenerates which do not have + /// any versioning. + pub fn check_rustdoc_fingerprint(cx: &Context<'_, '_>) -> CargoResult<()> { + if cx.bcx.config.cli_unstable().skip_rustdoc_fingerprint { + return Ok(()); + } + let actual_rustdoc_target_data = RustDocFingerprint { + rustc_vv: cx.bcx.rustc().verbose_version.clone(), + }; + + let fingerprint_path = cx.files().host_root().join(".rustdoc_fingerprint.json"); + let write_fingerprint = || -> CargoResult<()> { + paths::write( + &fingerprint_path, + serde_json::to_string(&actual_rustdoc_target_data)?, + ) + }; + let rustdoc_data = match paths::read(&fingerprint_path) { + Ok(rustdoc_data) => rustdoc_data, + // If the fingerprint does not exist, do not clear out the doc + // directories. Otherwise this ran into problems where projects + // like rustbuild were creating the doc directory before running + // `cargo doc` in a way that deleting it would break it. + Err(_) => return write_fingerprint(), + }; + match serde_json::from_str::(&rustdoc_data) { + Ok(fingerprint) => { + if fingerprint.rustc_vv == actual_rustdoc_target_data.rustc_vv { + return Ok(()); + } else { + log::debug!( + "doc fingerprint changed:\noriginal:\n{}\nnew:\n{}", + fingerprint.rustc_vv, + actual_rustdoc_target_data.rustc_vv + ); + } + } + Err(e) => { + log::debug!("could not deserialize {:?}: {}", fingerprint_path, e); + } + }; + // Fingerprint does not match, delete the doc directories and write a new fingerprint. + log::debug!( + "fingerprint {:?} mismatch, clearing doc directories", + fingerprint_path + ); + cx.bcx + .all_kinds + .iter() + .map(|kind| cx.files().layout(*kind).doc()) + .filter(|path| path.exists()) + .try_for_each(|path| clean_doc(path))?; + write_fingerprint()?; + return Ok(()); + + fn clean_doc(path: &Path) -> CargoResult<()> { + let entries = path + .read_dir() + .with_context(|| format!("failed to read directory `{}`", path.display()))?; + for entry in entries { + let entry = entry?; + // Don't remove hidden files. Rustdoc does not create them, + // but the user might have. + if entry + .file_name() + .to_str() + .map_or(false, |name| name.starts_with('.')) + { + continue; + } + let path = entry.path(); + if entry.file_type()?.is_dir() { + paths::remove_dir_all(path)?; + } else { + paths::remove_file(path)?; + } + } + Ok(()) + } + } +} diff -Nru cargo-0.52.0/src/cargo/core/compiler/build_plan.rs cargo-0.54.0/src/cargo/core/compiler/build_plan.rs --- cargo-0.52.0/src/cargo/core/compiler/build_plan.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/build_plan.rs 2021-04-27 14:35:53.000000000 +0000 @@ -14,7 +14,8 @@ use super::context::OutputFile; use super::{CompileKind, CompileMode, Context, Unit}; use crate::core::TargetKind; -use crate::util::{internal, CargoResult, Config, ProcessBuilder}; +use crate::util::{internal, CargoResult, Config}; +use cargo_util::ProcessBuilder; #[derive(Debug, Serialize)] struct Invocation { diff -Nru cargo-0.52.0/src/cargo/core/compiler/compilation.rs cargo-0.54.0/src/cargo/core/compiler/compilation.rs --- cargo-0.52.0/src/cargo/core/compiler/compilation.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/compilation.rs 2021-04-27 14:35:53.000000000 +0000 @@ -4,12 +4,13 @@ use std::path::PathBuf; use cargo_platform::CfgExpr; +use cargo_util::{paths, ProcessBuilder}; use semver::Version; use super::BuildContext; use crate::core::compiler::{CompileKind, Metadata, Unit}; -use crate::core::{Edition, Package}; -use crate::util::{self, config, join_paths, process, CargoResult, Config, ProcessBuilder}; +use crate::core::Package; +use crate::util::{config, CargoResult, Config}; /// Structure with enough information to run `rustdoc --test`. pub struct Doctest { @@ -126,10 +127,10 @@ sysroot_target_libdir: bcx .all_kinds .iter() - .map(|kind| { + .map(|&kind| { ( - *kind, - bcx.target_data.info(*kind).sysroot_target_libdir.clone(), + kind, + bcx.target_data.info(kind).sysroot_target_libdir.clone(), ) }) .collect(), @@ -184,12 +185,10 @@ unit: &Unit, script_meta: Option, ) -> CargoResult { - let rustdoc = process(&*self.config.rustdoc()?); + let rustdoc = ProcessBuilder::new(&*self.config.rustdoc()?); let cmd = fill_rustc_tool_env(rustdoc, unit); let mut p = self.fill_env(cmd, &unit.pkg, script_meta, unit.kind, true)?; - if unit.target.edition() != Edition::Edition2015 { - p.arg(format!("--edition={}", unit.target.edition())); - } + unit.target.edition().cmd_edition_arg(&mut p); for crate_type in unit.target.rustc_crate_types() { p.arg("--crate-type").arg(crate_type.as_str()); @@ -209,7 +208,13 @@ cmd: T, pkg: &Package, ) -> CargoResult { - self.fill_env(process(cmd), pkg, None, CompileKind::Host, false) + self.fill_env( + ProcessBuilder::new(cmd), + pkg, + None, + CompileKind::Host, + false, + ) } pub fn target_runner(&self, kind: CompileKind) -> Option<&(PathBuf, Vec)> { @@ -231,12 +236,12 @@ script_meta: Option, ) -> CargoResult { let builder = if let Some((runner, args)) = self.target_runner(kind) { - let mut builder = process(runner); + let mut builder = ProcessBuilder::new(runner); builder.args(args); builder.arg(cmd); builder } else { - process(cmd) + ProcessBuilder::new(cmd) }; self.fill_env(builder, pkg, script_meta, kind, false) } @@ -274,7 +279,7 @@ } } - let dylib_path = util::dylib_path(); + let dylib_path = paths::dylib_path(); let dylib_path_is_empty = dylib_path.is_empty(); search_path.extend(dylib_path.into_iter()); if cfg!(target_os = "macos") && dylib_path_is_empty { @@ -287,9 +292,9 @@ search_path.push(PathBuf::from("/usr/local/lib")); search_path.push(PathBuf::from("/usr/lib")); } - let search_path = join_paths(&search_path, util::dylib_path_envvar())?; + let search_path = paths::join_paths(&search_path, paths::dylib_path_envvar())?; - cmd.env(util::dylib_path_envvar(), &search_path); + cmd.env(paths::dylib_path_envvar(), &search_path); if let Some(meta) = script_meta { if let Some(env) = self.extra_env.get(&meta) { for (k, v) in env { @@ -339,6 +344,16 @@ ) .env("CARGO_PKG_AUTHORS", &pkg.authors().join(":")) .cwd(pkg.root()); + + if self.config.cli_unstable().configurable_env { + // Apply any environment variables from the config + for (key, value) in self.config.env_config()?.iter() { + if value.is_force() || cmd.get_env(key).is_none() { + cmd.env(key, value.resolve(self.config)); + } + } + } + Ok(cmd) } } diff -Nru cargo-0.52.0/src/cargo/core/compiler/compile_kind.rs cargo-0.54.0/src/cargo/core/compiler/compile_kind.rs --- cargo-0.52.0/src/cargo/core/compiler/compile_kind.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/compile_kind.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,10 +1,12 @@ use crate::core::Target; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; -use crate::util::Config; -use anyhow::bail; +use crate::util::{Config, StableHasher}; +use anyhow::{bail, Context as _}; use serde::Serialize; use std::collections::BTreeSet; +use std::fs; +use std::hash::{Hash, Hasher}; use std::path::Path; /// Indicator for how a unit is being compiled. @@ -78,6 +80,18 @@ }; Ok(vec![kind]) } + + /// Hash used for fingerprinting. + /// + /// Metadata hashing uses the normal Hash trait, which does not + /// differentiate on `.json` file contents. The fingerprint hash does + /// check the contents. + pub fn fingerprint_hash(&self) -> u64 { + match self { + CompileKind::Host => 0, + CompileKind::Target(target) => target.fingerprint_hash(), + } + } } impl serde::ser::Serialize for CompileKind { @@ -129,7 +143,7 @@ // with different paths always produce the same result. let path = Path::new(name) .canonicalize() - .chain_err(|| anyhow::format_err!("target path {:?} is not a valid file", name))?; + .with_context(|| format!("target path {:?} is not a valid file", name))?; let name = path .into_os_string() @@ -166,4 +180,19 @@ &self.name } } + + /// See [`CompileKind::fingerprint_hash`]. + pub fn fingerprint_hash(&self) -> u64 { + let mut hasher = StableHasher::new(); + self.name.hash(&mut hasher); + if self.name.ends_with(".json") { + // This may have some performance concerns, since it is called + // fairly often. If that ever seems worth fixing, consider + // embedding this in `CompileTarget`. + if let Ok(contents) = fs::read_to_string(self.name) { + contents.hash(&mut hasher); + } + } + hasher.finish() + } } diff -Nru cargo-0.52.0/src/cargo/core/compiler/context/compilation_files.rs cargo-0.54.0/src/cargo/core/compiler/context/compilation_files.rs --- cargo-0.52.0/src/cargo/core/compiler/context/compilation_files.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/context/compilation_files.rs 2021-04-27 14:35:53.000000000 +0000 @@ -80,6 +80,17 @@ } } +/// Information about the metadata hashes used for a `Unit`. +struct MetaInfo { + /// The symbol hash to use. + meta_hash: Metadata, + /// Whether or not the `-C extra-filename` flag is used to generate unique + /// output filenames for this `Unit`. + /// + /// If this is `true`, the `meta_hash` is used for the filename. + use_extra_filename: bool, +} + /// Collection of information about the files emitted by the compiler, and the /// output directory structure. pub struct CompilationFiles<'a, 'cfg> { @@ -94,7 +105,7 @@ roots: Vec, ws: &'a Workspace<'cfg>, /// Metadata hash to use for each unit. - metas: HashMap>, + metas: HashMap, /// For each Unit, a list all files produced. outputs: HashMap>>>, } @@ -160,11 +171,14 @@ /// Gets the metadata for the given unit. /// /// See module docs for more details. - /// - /// Returns `None` if the unit should not use a metadata hash (like - /// rustdoc, or some dylibs). - pub fn metadata(&self, unit: &Unit) -> Option { - self.metas[unit] + pub fn metadata(&self, unit: &Unit) -> Metadata { + self.metas[unit].meta_hash + } + + /// Returns whether or not `-C extra-filename` is used to extend the + /// output filenames to make them unique. + pub fn use_extra_filename(&self, unit: &Unit) -> bool { + self.metas[unit].use_extra_filename } /// Gets the short hash based only on the `PackageId`. @@ -201,9 +215,11 @@ /// taken in those cases! fn pkg_dir(&self, unit: &Unit) -> String { let name = unit.pkg.package_id().name(); - match self.metas[unit] { - Some(ref meta) => format!("{}-{}", name, meta), - None => format!("{}-{}", name, self.target_short_hash(unit)), + let meta = &self.metas[unit]; + if meta.use_extra_filename { + format!("{}-{}", name, meta.meta_hash) + } else { + format!("{}-{}", name, self.target_short_hash(unit)) } } @@ -448,8 +464,9 @@ // Convert FileType to OutputFile. let mut outputs = Vec::new(); for file_type in file_types { - let meta = self.metadata(unit).map(|m| m.to_string()); - let path = out_dir.join(file_type.output_filename(&unit.target, meta.as_deref())); + let meta = &self.metas[unit]; + let meta_opt = meta.use_extra_filename.then(|| meta.meta_hash.to_string()); + let path = out_dir.join(file_type.output_filename(&unit.target, meta_opt.as_deref())); let hardlink = self.uplift_to(unit, &file_type, &path); let export_path = if unit.target.is_custom_build() { None @@ -471,11 +488,11 @@ } } -fn metadata_of( +fn metadata_of<'a>( unit: &Unit, cx: &Context<'_, '_>, - metas: &mut HashMap>, -) -> Option { + metas: &'a mut HashMap, +) -> &'a MetaInfo { if !metas.contains_key(unit) { let meta = compute_metadata(unit, cx, metas); metas.insert(unit.clone(), meta); @@ -483,18 +500,15 @@ metadata_of(&dep.unit, cx, metas); } } - metas[unit] + &metas[unit] } fn compute_metadata( unit: &Unit, cx: &Context<'_, '_>, - metas: &mut HashMap>, -) -> Option { + metas: &mut HashMap, +) -> MetaInfo { let bcx = &cx.bcx; - if !should_use_metadata(bcx, unit) { - return None; - } let mut hasher = StableHasher::new(); METADATA_VERSION.hash(&mut hasher); @@ -514,7 +528,7 @@ let mut deps_metadata = cx .unit_deps(unit) .iter() - .map(|dep| metadata_of(&dep.unit, cx, metas)) + .map(|dep| metadata_of(&dep.unit, cx, metas).meta_hash) .collect::>(); deps_metadata.sort(); deps_metadata.hash(&mut hasher); @@ -561,7 +575,10 @@ // with user dependencies. unit.is_std.hash(&mut hasher); - Some(Metadata(hasher.finish())) + MetaInfo { + meta_hash: Metadata(hasher.finish()), + use_extra_filename: should_use_metadata(bcx, unit), + } } fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut StableHasher) { @@ -598,7 +615,7 @@ /// Returns whether or not this unit should use a metadata hash. fn should_use_metadata(bcx: &BuildContext<'_, '_>, unit: &Unit) -> bool { - if unit.mode.is_doc_test() { + if unit.mode.is_doc_test() || unit.mode.is_doc() { // Doc tests do not have metadata. return false; } diff -Nru cargo-0.52.0/src/cargo/core/compiler/context/mod.rs cargo-0.54.0/src/cargo/core/compiler/context/mod.rs --- cargo-0.52.0/src/cargo/core/compiler/context/mod.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/context/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2,13 +2,14 @@ use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex}; +use anyhow::Context as _; use filetime::FileTime; use jobserver::Client; use crate::core::compiler::compilation::{self, UnitOutput}; use crate::core::compiler::{self, Unit}; use crate::core::PackageId; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::profile; use super::build_plan::BuildPlan; @@ -18,7 +19,9 @@ use super::layout::Layout; use super::lto::Lto; use super::unit_graph::UnitDep; -use super::{BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor}; +use super::{ + BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor, RustDocFingerprint, +}; mod compilation_files; use self::compilation_files::CompilationFiles; @@ -94,7 +97,7 @@ Some(c) => c.clone(), None => { let client = Client::new(bcx.build_config.jobs as usize) - .chain_err(|| "failed to create jobserver")?; + .with_context(|| "failed to create jobserver")?; client.acquire_raw()?; client } @@ -133,6 +136,18 @@ custom_build::build_map(&mut self)?; self.check_collisions()?; + // We need to make sure that if there were any previous docs + // already compiled, they were compiled with the same Rustc version that we're currently + // using. Otherways we must remove the `doc/` folder and compile again forcing a rebuild. + // + // This is important because the `.js`/`.html` & `.css` files that are generated by Rustc don't have + // any versioning (See https://github.com/rust-lang/cargo/issues/8461). + // Therefore, we can end up with weird bugs and behaviours if we mix different + // versions of these files. + if self.bcx.build_config.mode.is_doc() { + RustDocFingerprint::check_rustdoc_fingerprint(&self)? + } + for unit in &self.bcx.roots { // Build up a list of pending jobs, each of which represent // compiling a particular package. No actual work is executed as @@ -214,6 +229,7 @@ let mut unstable_opts = false; let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?; args.extend(compiler::lto_args(&self, unit)); + for feature in &unit.features { args.push("--cfg".into()); args.push(format!("feature=\"{}\"", feature).into()); @@ -228,6 +244,16 @@ } } args.extend(self.bcx.rustdocflags_args(unit).iter().map(Into::into)); + + use super::MessageFormat; + let format = match self.bcx.build_config.message_format { + MessageFormat::Short => "short", + MessageFormat::Human => "human", + MessageFormat::Json { .. } => "json", + }; + args.push("--error-format".into()); + args.push(format.into()); + self.compilation.to_doc_test.push(compilation::Doctest { unit: unit.clone(), args, @@ -299,11 +325,11 @@ self.files_mut() .host .prepare() - .chain_err(|| "couldn't prepare build directories")?; + .with_context(|| "couldn't prepare build directories")?; for target in self.files.as_mut().unwrap().target.values_mut() { target .prepare() - .chain_err(|| "couldn't prepare build directories")?; + .with_context(|| "couldn't prepare build directories")?; } let files = self.files.as_ref().unwrap(); @@ -365,9 +391,7 @@ /// Returns the metadata hash for a RunCustomBuild unit. pub fn get_run_build_script_metadata(&self, unit: &Unit) -> Metadata { assert!(unit.mode.is_run_custom_build()); - self.files() - .metadata(unit) - .expect("build script should always have hash") + self.files().metadata(unit) } pub fn is_primary_package(&self, unit: &Unit) -> bool { @@ -534,11 +558,11 @@ pub fn new_jobserver(&mut self) -> CargoResult { let tokens = self.bcx.build_config.jobs as usize; - let client = Client::new(tokens).chain_err(|| "failed to create jobserver")?; + let client = Client::new(tokens).with_context(|| "failed to create jobserver")?; // Drain the client fully for i in 0..tokens { - client.acquire_raw().chain_err(|| { + client.acquire_raw().with_context(|| { format!( "failed to fully drain {}/{} token from jobserver at startup", i, tokens, diff -Nru cargo-0.52.0/src/cargo/core/compiler/custom_build.rs cargo-0.54.0/src/cargo/core/compiler/custom_build.rs --- cargo-0.52.0/src/cargo/core/compiler/custom_build.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/custom_build.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,10 +3,12 @@ use crate::core::compiler::context::Metadata; use crate::core::compiler::job_queue::JobState; use crate::core::{profiles::ProfileRoot, PackageId}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::machine_message::{self, Message}; -use crate::util::{self, internal, paths, profile}; +use crate::util::{internal, profile}; +use anyhow::Context as _; use cargo_platform::Cfg; +use cargo_util::paths; use std::collections::hash_map::{Entry, HashMap}; use std::collections::{BTreeSet, HashSet}; use std::path::{Path, PathBuf}; @@ -267,7 +269,8 @@ } }) .collect::>(); - let pkg_name = unit.pkg.to_string(); + let library_name = unit.pkg.library().map(|t| t.crate_name()); + let pkg_descr = unit.pkg.to_string(); let build_script_outputs = Arc::clone(&cx.build_script_outputs); let id = unit.pkg.package_id(); let output_file = script_run_dir.join("output"); @@ -276,7 +279,8 @@ let host_target_root = cx.files().host_dest().to_path_buf(); let all = ( id, - pkg_name.clone(), + library_name.clone(), + pkg_descr.clone(), Arc::clone(&build_script_outputs), output_file.clone(), script_out_dir.clone(), @@ -291,6 +295,7 @@ paths::create_dir_all(&script_out_dir)?; let extra_link_arg = cx.bcx.config.cli_unstable().extra_link_arg; + let nightly_features_allowed = cx.bcx.config.nightly_features_allowed; // Prepare the unit of "dirty work" which will actually run the custom build // command. @@ -303,7 +308,7 @@ // If we have an old build directory, then just move it into place, // otherwise create it! paths::create_dir_all(&script_out_dir) - .chain_err(|| "failed to create script output directory for build command")?; + .with_context(|| "failed to create script output directory for build command")?; // For all our native lib dependencies, pick up their metadata to pass // along to this custom build command. We're also careful to augment our @@ -365,7 +370,7 @@ }, true, ) - .chain_err(|| format!("failed to run custom build command for `{}`", pkg_name)); + .with_context(|| format!("failed to run custom build command for `{}`", pkg_descr)); if let Err(error) = output { insert_warnings_in_build_outputs( @@ -391,13 +396,15 @@ // modified in the middle of the build. paths::set_file_time_no_err(output_file, timestamp); paths::write(&err_file, &output.stderr)?; - paths::write(&root_output_file, util::path2bytes(&script_out_dir)?)?; + paths::write(&root_output_file, paths::path2bytes(&script_out_dir)?)?; let parsed_output = BuildOutput::parse( &output.stdout, - &pkg_name, + library_name, + &pkg_descr, &script_out_dir, &script_out_dir, extra_link_arg, + nightly_features_allowed, )?; if json_messages { @@ -414,15 +421,17 @@ // itself to run when we actually end up just discarding what we calculated // above. let fresh = Work::new(move |state| { - let (id, pkg_name, build_script_outputs, output_file, script_out_dir) = all; + let (id, library_name, pkg_descr, build_script_outputs, output_file, script_out_dir) = all; let output = match prev_output { Some(output) => output, None => BuildOutput::parse_file( &output_file, - &pkg_name, + library_name, + &pkg_descr, &prev_script_out_dir, &script_out_dir, extra_link_arg, + nightly_features_allowed, )?, }; @@ -469,29 +478,37 @@ impl BuildOutput { pub fn parse_file( path: &Path, - pkg_name: &str, + library_name: Option, + pkg_descr: &str, script_out_dir_when_generated: &Path, script_out_dir: &Path, extra_link_arg: bool, + nightly_features_allowed: bool, ) -> CargoResult { let contents = paths::read_bytes(path)?; BuildOutput::parse( &contents, - pkg_name, + library_name, + pkg_descr, script_out_dir_when_generated, script_out_dir, extra_link_arg, + nightly_features_allowed, ) } // Parses the output of a script. - // The `pkg_name` is used for error messages. + // The `pkg_descr` is used for error messages. + // The `library_name` is used for determining if RUSTC_BOOTSTRAP should be allowed. pub fn parse( input: &[u8], - pkg_name: &str, + // Takes String instead of InternedString so passing `unit.pkg.name()` will give a compile error. + library_name: Option, + pkg_descr: &str, script_out_dir_when_generated: &Path, script_out_dir: &Path, extra_link_arg: bool, + nightly_features_allowed: bool, ) -> CargoResult { let mut library_paths = Vec::new(); let mut library_links = Vec::new(); @@ -502,7 +519,7 @@ let mut rerun_if_changed = Vec::new(); let mut rerun_if_env_changed = Vec::new(); let mut warnings = Vec::new(); - let whence = format!("build script of `{}`", pkg_name); + let whence = format!("build script of `{}`", pkg_descr); for line in input.split(|b| *b == b'\n') { let line = match str::from_utf8(line) { @@ -562,7 +579,53 @@ } } "rustc-cfg" => cfgs.push(value.to_string()), - "rustc-env" => env.push(BuildOutput::parse_rustc_env(&value, &whence)?), + "rustc-env" => { + let (key, val) = BuildOutput::parse_rustc_env(&value, &whence)?; + // Build scripts aren't allowed to set RUSTC_BOOTSTRAP. + // See https://github.com/rust-lang/cargo/issues/7088. + if key == "RUSTC_BOOTSTRAP" { + // If RUSTC_BOOTSTRAP is already set, the user of Cargo knows about + // bootstrap and still wants to override the channel. Give them a way to do + // so, but still emit a warning that the current crate shouldn't be trying + // to set RUSTC_BOOTSTRAP. + // If this is a nightly build, setting RUSTC_BOOTSTRAP wouldn't affect the + // behavior, so still only give a warning. + // NOTE: cargo only allows nightly features on RUSTC_BOOTSTRAP=1, but we + // want setting any value of RUSTC_BOOTSTRAP to downgrade this to a warning + // (so that `RUSTC_BOOTSTRAP=library_name` will work) + let rustc_bootstrap_allows = |name: Option<&str>| { + let name = match name { + // as of 2021, no binaries on crates.io use RUSTC_BOOTSTRAP, so + // fine-grained opt-outs aren't needed. end-users can always use + // RUSTC_BOOTSTRAP=1 from the top-level if it's really a problem. + None => return false, + Some(n) => n, + }; + std::env::var("RUSTC_BOOTSTRAP") + .map_or(false, |var| var.split(',').any(|s| s == name)) + }; + if nightly_features_allowed + || rustc_bootstrap_allows(library_name.as_deref()) + { + warnings.push(format!("Cannot set `RUSTC_BOOTSTRAP={}` from {}.\n\ + note: Crates cannot set `RUSTC_BOOTSTRAP` themselves, as doing so would subvert the stability guarantees of Rust for your project.", + val, whence + )); + } else { + // Setting RUSTC_BOOTSTRAP would change the behavior of the crate. + // Abort with an error. + anyhow::bail!("Cannot set `RUSTC_BOOTSTRAP={}` from {}.\n\ + note: Crates cannot set `RUSTC_BOOTSTRAP` themselves, as doing so would subvert the stability guarantees of Rust for your project.\n\ + help: If you're sure you want to do this in your project, set the environment variable `RUSTC_BOOTSTRAP={}` before running cargo instead.", + val, + whence, + library_name.as_deref().unwrap_or("1"), + ); + } + } else { + env.push((key, val)); + } + } "warning" => warnings.push(value.to_string()), "rerun-if-changed" => rerun_if_changed.push(PathBuf::from(value)), "rerun-if-env-changed" => rerun_if_env_changed.push(value.to_string()), @@ -805,7 +868,7 @@ let output_file = script_run_dir.join("output"); let prev_script_out_dir = paths::read_bytes(&root_output_file) - .and_then(|bytes| util::bytes2path(&bytes)) + .and_then(|bytes| paths::bytes2path(&bytes)) .unwrap_or_else(|_| script_out_dir.clone()); let extra_link_arg = cx.bcx.config.cli_unstable().extra_link_arg; @@ -813,10 +876,12 @@ ( BuildOutput::parse_file( &output_file, + unit.pkg.library().map(|t| t.crate_name()), &unit.pkg.to_string(), &prev_script_out_dir, &script_out_dir, extra_link_arg, + cx.bcx.config.nightly_features_allowed, ) .ok(), prev_script_out_dir, diff -Nru cargo-0.52.0/src/cargo/core/compiler/fingerprint.rs cargo-0.54.0/src/cargo/core/compiler/fingerprint.rs --- cargo-0.52.0/src/cargo/core/compiler/fingerprint.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/fingerprint.rs 2021-04-27 14:35:53.000000000 +0000 @@ -60,10 +60,10 @@ //! `cargo rustc` extra args | ✓ | ✓ //! CompileMode | ✓ | ✓ //! Target Name | ✓ | ✓ -//! Target CompileKind (bin/lib/etc.) | ✓ | ✓ +//! TargetKind (bin/lib/etc.) | ✓ | ✓ //! Enabled Features | ✓ | ✓ //! Immediate dependency’s hashes | ✓[^1] | ✓ -//! Target or Host mode | | ✓ +//! CompileKind (host/target) | ✓ | ✓ //! __CARGO_DEFAULT_LIB_METADATA[^4] | | ✓ //! package_id | | ✓ //! authors, description, homepage, repo | ✓ | @@ -321,7 +321,8 @@ use std::sync::{Arc, Mutex}; use std::time::SystemTime; -use anyhow::{bail, format_err}; +use anyhow::{bail, format_err, Context as _}; +use cargo_util::{paths, ProcessBuilder}; use filetime::FileTime; use log::{debug, info}; use serde::de; @@ -331,10 +332,10 @@ use crate::core::compiler::unit_graph::UnitDep; use crate::core::Package; use crate::util; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; -use crate::util::paths; -use crate::util::{internal, profile, ProcessBuilder}; +use crate::util::{internal, path_args, profile}; +use crate::CARGO_ENV; use super::custom_build::BuildDeps; use super::job::{Job, Work}; @@ -542,6 +543,9 @@ metadata: u64, /// Hash of various config settings that change how things are compiled. config: u64, + /// The rustc target. This is only relevant for `.json` files, otherwise + /// the metadata hash segregates the units. + compile_kind: u64, /// Description of whether the filesystem status for this unit is up to date /// or should be considered stale. #[serde(skip)] @@ -709,6 +713,7 @@ mtime_cache: &mut HashMap, pkg_root: &Path, target_root: &Path, + cargo_exe: &Path, ) -> CargoResult> { match self { // We need to parse `dep_info`, learn about the crate's dependencies. @@ -724,7 +729,21 @@ None => return Ok(Some(StaleItem::MissingFile(dep_info))), }; for (key, previous) in info.env.iter() { - let current = env::var(key).ok(); + let current = if key == CARGO_ENV { + Some( + cargo_exe + .to_str() + .ok_or_else(|| { + format_err!( + "cargo exe path {} must be valid UTF-8", + cargo_exe.display() + ) + })? + .to_string(), + ) + } else { + env::var(key).ok() + }; if current == *previous { continue; } @@ -780,6 +799,7 @@ rustflags: Vec::new(), metadata: 0, config: 0, + compile_kind: 0, fs_status: FsStatus::Stale, outputs: Vec::new(), } @@ -843,6 +863,9 @@ if self.config != old.config { bail!("configuration settings have changed") } + if self.compile_kind != old.compile_kind { + bail!("compile kind (rustc target) changed") + } let my_local = self.local.lock().unwrap(); let old_local = old.local.lock().unwrap(); if my_local.len() != old_local.len() { @@ -973,6 +996,7 @@ mtime_cache: &mut HashMap, pkg_root: &Path, target_root: &Path, + cargo_exe: &Path, ) -> CargoResult<()> { assert!(!self.fs_status.up_to_date()); @@ -1064,7 +1088,9 @@ // files for this package itself. If we do find something log a helpful // message and bail out so we stay stale. for local in self.local.get_mut().unwrap().iter() { - if let Some(item) = local.find_stale_item(mtime_cache, pkg_root, target_root)? { + if let Some(item) = + local.find_stale_item(mtime_cache, pkg_root, target_root, cargo_exe)? + { item.log(); return Ok(()); } @@ -1090,12 +1116,22 @@ ref local, metadata, config, + compile_kind, ref rustflags, .. } = *self; let local = local.lock().unwrap(); ( - rustc, features, target, path, profile, &*local, metadata, config, rustflags, + rustc, + features, + target, + path, + profile, + &*local, + metadata, + config, + compile_kind, + rustflags, ) .hash(h); @@ -1239,7 +1275,13 @@ // After we built the initial `Fingerprint` be sure to update the // `fs_status` field of it. let target_root = target_root(cx); - fingerprint.check_filesystem(&mut cx.mtime_cache, unit.pkg.root(), &target_root)?; + let cargo_exe = cx.bcx.config.cargo_exe()?; + fingerprint.check_filesystem( + &mut cx.mtime_cache, + unit.pkg.root(), + &target_root, + cargo_exe, + )?; let fingerprint = Arc::new(fingerprint); cx.fingerprints @@ -1269,7 +1311,7 @@ let target_root = target_root(cx); let local = if unit.mode.is_doc() { // rustdoc does not have dep-info files. - let fingerprint = pkg_fingerprint(cx.bcx, &unit.pkg).chain_err(|| { + let fingerprint = pkg_fingerprint(cx.bcx, &unit.pkg).with_context(|| { format!( "failed to determine package fingerprint for documenting {}", unit.pkg @@ -1310,27 +1352,33 @@ // Include metadata since it is exposed as environment variables. let m = unit.pkg.manifest().metadata(); let metadata = util::hash_u64((&m.authors, &m.description, &m.homepage, &m.repository)); - let config = if unit.mode.is_doc() && cx.bcx.config.cli_unstable().rustdoc_map { - cx.bcx - .config - .doc_extern_map() - .map_or(0, |map| util::hash_u64(map)) - } else { - 0 - }; + let mut config = 0u64; + if unit.mode.is_doc() && cx.bcx.config.cli_unstable().rustdoc_map { + config = config.wrapping_add( + cx.bcx + .config + .doc_extern_map() + .map_or(0, |map| util::hash_u64(map)), + ); + } + if let Some(allow_features) = &cx.bcx.config.cli_unstable().allow_features { + config = config.wrapping_add(util::hash_u64(allow_features)); + } + let compile_kind = unit.kind.fingerprint_hash(); Ok(Fingerprint { rustc: util::hash_u64(&cx.bcx.rustc().verbose_version), target: util::hash_u64(&unit.target), profile: profile_hash, // Note that .0 is hashed here, not .1 which is the cwd. That doesn't // actually affect the output artifact so there's no need to hash it. - path: util::hash_u64(super::path_args(cx.bcx, unit).0), + path: util::hash_u64(path_args(cx.bcx.ws, unit).0), features: format!("{:?}", unit.features), deps, local: Mutex::new(local), memoized_hash: Mutex::new(None), metadata, config, + compile_kind, rustflags: extra_flags, fs_status: FsStatus::Stale, outputs, @@ -1352,7 +1400,7 @@ let local = (gen_local)( deps, Some(&|| { - pkg_fingerprint(cx.bcx, &unit.pkg).chain_err(|| { + pkg_fingerprint(cx.bcx, &unit.pkg).with_context(|| { format!( "failed to determine package fingerprint for build script for {}", unit.pkg @@ -1620,7 +1668,7 @@ let old_fingerprint_json = paths::read(&loc.with_extension("json"))?; let old_fingerprint: Fingerprint = serde_json::from_str(&old_fingerprint_json) - .chain_err(|| internal("failed to deserialize json"))?; + .with_context(|| internal("failed to deserialize json"))?; // Fingerprint can be empty after a failed rebuild (see comment in prepare_target). if !old_fingerprint_short.is_empty() { debug_assert_eq!(util::to_hex(old_fingerprint.hash()), old_fingerprint_short); @@ -1827,9 +1875,13 @@ // you write a binary that does `println!("{}", env!("OUT_DIR"))` we won't // recompile that if you move the target directory. Hopefully that's not too // bad of an issue for now... + // + // This also includes `CARGO` since if the code is explicitly wanting to + // know that path, it should be rebuilt if it changes. The CARGO path is + // not tracked elsewhere in the fingerprint. on_disk_info .env - .retain(|(key, _)| !rustc_cmd.get_envs().contains_key(key)); + .retain(|(key, _)| !rustc_cmd.get_envs().contains_key(key) || key == CARGO_ENV); for file in depinfo.files { // The path may be absolute or relative, canonical or not. Make sure @@ -1896,7 +1948,7 @@ _ => return None, }; let bytes = read_bytes(bytes)?; - files.push((ty, util::bytes2path(bytes).ok()?)); + files.push((ty, paths::bytes2path(bytes).ok()?)); } let nenv = read_usize(bytes)?; @@ -1941,7 +1993,7 @@ DepInfoPathType::PackageRootRelative => dst.push(0), DepInfoPathType::TargetRootRelative => dst.push(1), } - write_bytes(dst, util::path2bytes(file)?); + write_bytes(dst, paths::path2bytes(file)?); } write_usize(dst, self.env.len()); diff -Nru cargo-0.52.0/src/cargo/core/compiler/future_incompat.rs cargo-0.54.0/src/cargo/core/compiler/future_incompat.rs --- cargo-0.52.0/src/cargo/core/compiler/future_incompat.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/future_incompat.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,36 @@ +use serde::{Deserialize, Serialize}; + +/// The future incompatibility report, emitted by the compiler as a JSON message. +#[derive(serde::Deserialize)] +pub struct FutureIncompatReport { + pub future_incompat_report: Vec, +} + +#[derive(Serialize, Deserialize)] +pub struct FutureBreakageItem { + /// The date at which this lint will become an error. + /// Currently unused + pub future_breakage_date: Option, + /// The original diagnostic emitted by the compiler + pub diagnostic: Diagnostic, +} + +/// A diagnostic emitted by the compiler as a JSON message. +/// We only care about the 'rendered' field +#[derive(Serialize, Deserialize)] +pub struct Diagnostic { + pub rendered: String, +} + +/// The filename in the top-level `target` directory where we store +/// the report +pub const FUTURE_INCOMPAT_FILE: &str = ".future-incompat-report.json"; + +#[derive(Serialize, Deserialize)] +pub struct OnDiskReport { + // A Cargo-generated id used to detect when a report has been overwritten + pub id: String, + // Cannot be a &str, since Serde needs + // to be able to un-escape the JSON + pub report: String, +} diff -Nru cargo-0.52.0/src/cargo/core/compiler/job_queue.rs cargo-0.54.0/src/cargo/core/compiler/job_queue.rs --- cargo-0.52.0/src/cargo/core/compiler/job_queue.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/job_queue.rs 2021-04-27 14:35:53.000000000 +0000 @@ -56,10 +56,13 @@ use std::sync::Arc; use std::time::Duration; -use anyhow::format_err; +use anyhow::{format_err, Context as _}; +use cargo_util::ProcessBuilder; use crossbeam_utils::thread::Scope; use jobserver::{Acquired, Client, HelperThread}; use log::{debug, info, trace}; +use rand::distributions::Alphanumeric; +use rand::{thread_rng, Rng}; use super::context::OutputFile; use super::job::{ @@ -68,11 +71,15 @@ }; use super::timings::Timings; use super::{BuildContext, BuildPlan, CompileMode, Context, Unit}; +use crate::core::compiler::future_incompat::{ + FutureBreakageItem, OnDiskReport, FUTURE_INCOMPAT_FILE, +}; use crate::core::{PackageId, Shell, TargetKind}; +use crate::drop_eprint; use crate::util::diagnostic_server::{self, DiagnosticPrinter}; use crate::util::machine_message::{self, Message as _}; +use crate::util::CargoResult; use crate::util::{self, internal, profile}; -use crate::util::{CargoResult, CargoResultExt, ProcessBuilder}; use crate::util::{Config, DependencyQueue, Progress, ProgressStyle, Queue}; /// This structure is backed by the `DependencyQueue` type and manages the @@ -149,8 +156,9 @@ pending_queue: Vec<(Unit, Job)>, print: DiagnosticPrinter<'cfg>, - // How many jobs we've finished + /// How many jobs we've finished finished: usize, + per_crate_future_incompat_reports: Vec, } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] @@ -162,6 +170,11 @@ } } +struct FutureIncompatReportCrate { + package_id: PackageId, + report: Vec, +} + /// A `JobState` is constructed by `JobQueue::run` and passed to `Job::run`. It includes everything /// necessary to communicate between the main thread and the execution of the job. /// @@ -228,6 +241,7 @@ FixDiagnostic(diagnostic_server::Message), Token(io::Result), Finish(JobId, Artifact, CargoResult<()>), + FutureIncompatReport(JobId, Vec), // This client should get release_raw called on it with one of our tokens NeedsToken(JobId), @@ -282,6 +296,11 @@ .push(Message::Finish(self.id, Artifact::Metadata, Ok(()))); } + pub fn future_incompat_report(&self, report: Vec) { + self.messages + .push(Message::FutureIncompatReport(self.id, report)); + } + /// The rustc underlying this Job is about to acquire a jobserver token (i.e., block) /// on the passed client. /// @@ -410,6 +429,7 @@ pending_queue: Vec::new(), print: DiagnosticPrinter::new(cx.bcx.config), finished: 0, + per_crate_future_incompat_reports: Vec::new(), }; // Create a helper thread for acquiring jobserver tokens @@ -420,7 +440,7 @@ .into_helper_thread(move |token| { messages.push(Message::Token(token)); }) - .chain_err(|| "failed to create helper thread for jobserver management")?; + .with_context(|| "failed to create helper thread for jobserver management")?; // Create a helper thread to manage the diagnostics for rustfix if // necessary. @@ -469,7 +489,15 @@ // we're able to perform some parallel work. while self.has_extra_tokens() && !self.pending_queue.is_empty() { let (unit, job) = self.pending_queue.remove(0); - self.run(&unit, job, cx, scope)?; + *self.counts.get_mut(&unit.pkg.package_id()).unwrap() -= 1; + if !cx.bcx.build_config.build_plan { + // Print out some nice progress information. + // NOTE: An error here will drop the job without starting it. + // That should be OK, since we want to exit as soon as + // possible during an error. + self.note_working_on(cx.bcx.config, &unit, job.freshness())?; + } + self.run(&unit, job, cx, scope); } Ok(()) @@ -509,7 +537,7 @@ .push(token); client .release_raw() - .chain_err(|| "failed to release jobserver token")?; + .with_context(|| "failed to release jobserver token")?; } Ok(()) @@ -583,8 +611,13 @@ } } } + Message::FutureIncompatReport(id, report) => { + let package_id = self.active[&id].pkg.package_id(); + self.per_crate_future_incompat_reports + .push(FutureIncompatReportCrate { package_id, report }); + } Message::Token(acquired_token) => { - let token = acquired_token.chain_err(|| "failed to acquire jobserver token")?; + let token = acquired_token.with_context(|| "failed to acquire jobserver token")?; self.tokens.push(token); } Message::NeedsToken(id) => { @@ -740,13 +773,14 @@ } } if cx.bcx.build_config.emit_json() { + let mut shell = cx.bcx.config.shell(); let msg = machine_message::BuildFinished { success: error.is_none(), } .to_json_string(); - if let Err(e) = writeln!(cx.bcx.config.shell().out(), "{}", msg) { + if let Err(e) = writeln!(shell.out(), "{}", msg) { if error.is_some() { - crate::display_error(&e.into(), &mut cx.bcx.config.shell()); + crate::display_error(&e.into(), &mut shell); } else { return Some(e.into()); } @@ -763,7 +797,9 @@ if !cx.bcx.build_config.build_plan { // It doesn't really matter if this fails. drop(cx.bcx.config.shell().status("Finished", message)); + self.emit_future_incompat(cx); } + None } else { debug!("queue: {:#?}", self.queue); @@ -771,6 +807,96 @@ } } + fn emit_future_incompat(&mut self, cx: &mut Context<'_, '_>) { + if cx.bcx.config.cli_unstable().future_incompat_report { + if self.per_crate_future_incompat_reports.is_empty() { + drop( + cx.bcx + .config + .shell() + .note("0 dependencies had future-incompat warnings"), + ); + return; + } + self.per_crate_future_incompat_reports + .sort_by_key(|r| r.package_id); + + let crates_and_versions = self + .per_crate_future_incompat_reports + .iter() + .map(|r| r.package_id.to_string()) + .collect::>() + .join(", "); + + drop(cx.bcx.config.shell().warn(&format!( + "the following crates contain code that will be rejected by a future version of Rust: {}", + crates_and_versions + ))); + + let mut full_report = String::new(); + let mut rng = thread_rng(); + + // Generate a short ID to allow detecting if a report gets overwritten + let id: String = std::iter::repeat(()) + .map(|()| char::from(rng.sample(Alphanumeric))) + .take(4) + .collect(); + + for report in std::mem::take(&mut self.per_crate_future_incompat_reports) { + full_report.push_str(&format!( + "The crate `{}` currently triggers the following future incompatibility lints:\n", + report.package_id + )); + for item in report.report { + let rendered = if cx.bcx.config.shell().err_supports_color() { + item.diagnostic.rendered + } else { + strip_ansi_escapes::strip(&item.diagnostic.rendered) + .map(|v| String::from_utf8(v).expect("utf8")) + .expect("strip should never fail") + }; + + for line in rendered.lines() { + full_report.push_str(&format!("> {}\n", line)); + } + } + } + + let report_file = cx.bcx.ws.target_dir().open_rw( + FUTURE_INCOMPAT_FILE, + cx.bcx.config, + "Future incompatibility report", + ); + let err = report_file + .and_then(|report_file| { + let on_disk_report = OnDiskReport { + id: id.clone(), + report: full_report.clone(), + }; + serde_json::to_writer(report_file, &on_disk_report).map_err(|e| e.into()) + }) + .err(); + if let Some(e) = err { + crate::display_warning_with_error( + "failed to write on-disk future incompat report", + &e, + &mut cx.bcx.config.shell(), + ); + } + + if cx.bcx.build_config.future_incompat_report { + drop_eprint!(cx.bcx.config, "{}", full_report); + drop(cx.bcx.config.shell().note( + &format!("this report can be shown with `cargo describe-future-incompatibilities -Z future-incompat-report --id {}`", id) + )); + } else { + drop(cx.bcx.config.shell().note( + &format!("to see what the problems were, use the option `--future-incompat-report`, or run `cargo describe-future-incompatibilities --id {}`", id) + )); + } + } + } + fn handle_error( &self, shell: &mut Shell, @@ -835,31 +961,22 @@ } } - /// Executes a job, pushing the spawned thread's handled onto `threads`. - fn run( - &mut self, - unit: &Unit, - job: Job, - cx: &Context<'_, '_>, - scope: &Scope<'_>, - ) -> CargoResult<()> { + /// Executes a job. + /// + /// Fresh jobs block until finished (which should be very fast!), Dirty + /// jobs will spawn a thread in the background and return immediately. + fn run(&mut self, unit: &Unit, job: Job, cx: &Context<'_, '_>, scope: &Scope<'_>) { let id = JobId(self.next_id); self.next_id = self.next_id.checked_add(1).unwrap(); info!("start {}: {:?}", id, unit); assert!(self.active.insert(id, unit.clone()).is_none()); - *self.counts.get_mut(&unit.pkg.package_id()).unwrap() -= 1; let messages = self.messages.clone(); let fresh = job.freshness(); let rmeta_required = cx.rmeta_required(unit); - if !cx.bcx.build_config.build_plan { - // Print out some nice progress information. - self.note_working_on(cx.bcx.config, unit, fresh)?; - } - let doit = move |state: JobState<'_>| { let mut sender = FinishOnDrop { messages: &state.messages, @@ -934,8 +1051,6 @@ }); } } - - Ok(()) } fn emit_warnings( diff -Nru cargo-0.52.0/src/cargo/core/compiler/layout.rs cargo-0.54.0/src/cargo/core/compiler/layout.rs --- cargo-0.52.0/src/cargo/core/compiler/layout.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/layout.rs 2021-04-27 14:35:53.000000000 +0000 @@ -100,8 +100,8 @@ use crate::core::compiler::CompileTarget; use crate::core::Workspace; -use crate::util::paths; use crate::util::{CargoResult, FileLock}; +use cargo_util::paths; use std::path::{Path, PathBuf}; /// Contains the paths of all target output locations. diff -Nru cargo-0.52.0/src/cargo/core/compiler/lto.rs cargo-0.54.0/src/cargo/core/compiler/lto.rs --- cargo-0.52.0/src/cargo/core/compiler/lto.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/lto.rs 2021-04-27 14:35:53.000000000 +0000 @@ -45,7 +45,8 @@ for unit in bcx.roots.iter() { let root_lto = match unit.profile.lto { // LTO not requested, no need for bitcode. - profiles::Lto::Bool(false) | profiles::Lto::Off => Lto::OnlyObject, + profiles::Lto::Bool(false) => Lto::OnlyObject, + profiles::Lto::Off => Lto::Off, _ => { let crate_types = unit.target.rustc_crate_types(); if unit.target.for_host() { @@ -127,8 +128,8 @@ (Lto::Run(_), false) => Lto::OnlyBitcode, // LTO when something needs object code. (Lto::Run(_), true) | (Lto::OnlyBitcode, true) => lto_when_needs_object(&crate_types), - // LTO is disabled, no need for bitcode. - (Lto::Off, _) => Lto::OnlyObject, + // LTO is disabled, continue to disable it. + (Lto::Off, _) => Lto::Off, // If this doesn't have any requirements, or the requirements are // already satisfied, then stay with our parent. (_, false) | (Lto::OnlyObject, true) | (Lto::ObjectAndBitcode, true) => parent_lto, diff -Nru cargo-0.52.0/src/cargo/core/compiler/mod.rs cargo-0.54.0/src/cargo/core/compiler/mod.rs --- cargo-0.52.0/src/cargo/core/compiler/mod.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -7,6 +7,7 @@ mod crate_type; mod custom_build; mod fingerprint; +pub mod future_incompat; mod job; mod job_queue; mod layout; @@ -27,12 +28,14 @@ use std::path::{Path, PathBuf}; use std::sync::Arc; -use anyhow::Error; +use anyhow::{Context as _, Error}; use lazycell::LazyCell; use log::debug; pub use self::build_config::{BuildConfig, CompileMode, MessageFormat}; -pub use self::build_context::{BuildContext, FileFlavor, FileType, RustcTargetData, TargetInfo}; +pub use self::build_context::{ + BuildContext, FileFlavor, FileType, RustDocFingerprint, RustcTargetData, TargetInfo, +}; use self::build_plan::BuildPlan; pub use self::compilation::{Compilation, Doctest, UnitOutput}; pub use self::compile_kind::{CompileKind, CompileTarget}; @@ -46,16 +49,16 @@ pub use self::lto::Lto; use self::output_depinfo::output_depinfo; use self::unit_graph::UnitDep; +use crate::core::compiler::future_incompat::FutureIncompatReport; pub use crate::core::compiler::unit::{Unit, UnitInterner}; -use crate::core::features::nightly_features_allowed; use crate::core::manifest::TargetSourcePath; use crate::core::profiles::{PanicStrategy, Profile, Strip}; -use crate::core::{Edition, Feature, PackageId, Target}; -use crate::util::errors::{self, CargoResult, CargoResultExt, ProcessError, VerboseError}; +use crate::core::{Feature, PackageId, Target}; +use crate::util::errors::{CargoResult, VerboseError}; use crate::util::interning::InternedString; -use crate::util::machine_message::Message; -use crate::util::{self, machine_message, ProcessBuilder}; -use crate::util::{internal, join_paths, paths, profile}; +use crate::util::machine_message::{self, Message}; +use crate::util::{add_path_args, internal, iter_join_onto, profile}; +use cargo_util::{paths, ProcessBuilder, ProcessError}; const RUSTDOC_CRATE_VERSION_FLAG: &str = "--crate-version"; @@ -171,17 +174,17 @@ }; work.then(link_targets(cx, unit, false)?) } else { - let work = if unit.show_warnings(bcx.config) { - replay_output_cache( - unit.pkg.package_id(), - &unit.target, - cx.files().message_cache_path(unit), - cx.bcx.build_config.message_format, - cx.bcx.config.shell().err_supports_color(), - ) - } else { - Work::noop() - }; + // We always replay the output cache, + // since it might contain future-incompat-report messages + let work = replay_output_cache( + unit.pkg.package_id(), + PathBuf::from(unit.pkg.manifest_path()), + &unit.target, + cx.files().message_cache_path(unit), + cx.bcx.build_config.message_format, + cx.bcx.config.shell().err_supports_color(), + unit.show_warnings(bcx.config), + ); // Need to link targets on both the dirty and fresh. work.then(link_targets(cx, unit, true)?) }); @@ -218,6 +221,7 @@ // Prepare the native lib state (extra `-L` and `-l` flags). let build_script_outputs = Arc::clone(&cx.build_script_outputs); let current_id = unit.pkg.package_id(); + let manifest_path = PathBuf::from(unit.pkg.manifest_path()); let build_scripts = cx.build_scripts.get(unit).cloned(); // If we are a binary and the package also contains a library, then we @@ -225,9 +229,14 @@ let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib()); let link_type = (&unit.target).into(); - let dep_info_name = match cx.files().metadata(unit) { - Some(metadata) => format!("{}-{}.d", unit.target.crate_name(), metadata), - None => format!("{}.d", unit.target.crate_name()), + let dep_info_name = if cx.files().use_extra_filename(unit) { + format!( + "{}-{}.d", + unit.target.crate_name(), + cx.files().metadata(unit) + ) + } else { + format!("{}.d", unit.target.crate_name()) }; let rustc_dep_info_loc = root.join(dep_info_name); let dep_info_loc = fingerprint::dep_info_loc(cx, unit); @@ -299,7 +308,7 @@ .as_ref() .and_then(|perr| perr.code) { - Some(n) if errors::is_simple_exit_code(n) => VerboseError::new(err).into(), + Some(n) if cargo_util::is_simple_exit_code(n) => VerboseError::new(err).into(), _ => err, } } @@ -315,10 +324,19 @@ &target, mode, &mut |line| on_stdout_line(state, line, package_id, &target), - &mut |line| on_stderr_line(state, line, package_id, &target, &mut output_options), + &mut |line| { + on_stderr_line( + state, + line, + package_id, + &manifest_path, + &target, + &mut output_options, + ) + }, ) .map_err(verbose_if_simple_exit_code) - .chain_err(|| format!("could not compile `{}`", name))?; + .with_context(|| format!("could not compile `{}`", name))?; } if rustc_dep_info_loc.exists() { @@ -332,7 +350,7 @@ // Do not track source files in the fingerprint for registry dependencies. is_local, ) - .chain_err(|| { + .with_context(|| { internal(format!( "could not parse/generate dep info at: {}", rustc_dep_info_loc.display() @@ -413,6 +431,7 @@ let outputs = cx.outputs(unit)?; let export_dir = cx.files().export_dir(); let package_id = unit.pkg.package_id(); + let manifest_path = PathBuf::from(unit.pkg.manifest_path()); let profile = unit.profile; let unit_mode = unit.mode; let features = unit.features.iter().map(|s| s.to_string()).collect(); @@ -466,6 +485,7 @@ let msg = machine_message::Artifact { package_id, + manifest_path, target: &target, profile: art_profile, features, @@ -489,7 +509,7 @@ build_scripts: &BuildScripts, root_output: &Path, ) -> CargoResult<()> { - let var = util::dylib_path_envvar(); + let var = paths::dylib_path_envvar(); let search_path = rustc.get_env(var).unwrap_or_default(); let mut search_path = env::split_paths(&search_path).collect::>(); for (pkg_id, metadata) in &build_scripts.plugins { @@ -501,7 +521,7 @@ root_output, )); } - let search_path = join_paths(&search_path, var)?; + let search_path = paths::join_paths(&search_path, var)?; rustc.env(var, &search_path); Ok(()) } @@ -579,14 +599,14 @@ // script_metadata is not needed here, it is only for tests. let mut rustdoc = cx.compilation.rustdoc_process(unit, None)?; rustdoc.inherit_jobserver(&cx.jobserver); - rustdoc.arg("--crate-name").arg(&unit.target.crate_name()); - add_path_args(bcx, unit, &mut rustdoc); + let crate_name = unit.target.crate_name(); + rustdoc.arg("--crate-name").arg(&crate_name); + add_path_args(bcx.ws, unit, &mut rustdoc); add_cap_lints(bcx, unit, &mut rustdoc); if let CompileKind::Target(target) = unit.kind { rustdoc.arg("--target").arg(target.rustc_target()); } - let doc_dir = cx.files().out_dir(unit); // Create the documentation directory ahead of time as rustdoc currently has @@ -594,13 +614,14 @@ // it doesn't already exist. paths::create_dir_all(&doc_dir)?; - rustdoc.arg("-o").arg(doc_dir); + rustdoc.arg("-o").arg(&doc_dir); for feat in &unit.features { rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); } add_error_format_and_color(cx, &mut rustdoc, false); + add_allow_features(cx, &mut rustdoc); if let Some(args) = cx.bcx.extra_args_for(unit) { rustdoc.args(args); @@ -618,10 +639,10 @@ let name = unit.pkg.name().to_string(); let build_script_outputs = Arc::clone(&cx.build_script_outputs); let package_id = unit.pkg.package_id(); + let manifest_path = PathBuf::from(unit.pkg.manifest_path()); let target = Target::clone(&unit.target); let mut output_options = OutputOptions::new(cx, unit); let script_metadata = cx.find_build_script_metadata(unit); - Ok(Work::new(move |state| { if let Some(script_metadata) = script_metadata { if let Some(output) = build_script_outputs.lock().unwrap().get(script_metadata) { @@ -633,15 +654,31 @@ } } } + let crate_dir = doc_dir.join(&crate_name); + if crate_dir.exists() { + // Remove output from a previous build. This ensures that stale + // files for removed items are removed. + log::debug!("removing pre-existing doc directory {:?}", crate_dir); + paths::remove_dir_all(crate_dir)?; + } state.running(&rustdoc); rustdoc .exec_with_streaming( &mut |line| on_stdout_line(state, line, package_id, &target), - &mut |line| on_stderr_line(state, line, package_id, &target, &mut output_options), + &mut |line| { + on_stderr_line( + state, + line, + package_id, + &manifest_path, + &target, + &mut output_options, + ) + }, false, ) - .chain_err(|| format!("could not document `{}`", name))?; + .with_context(|| format!("could not document `{}`", name))?; Ok(()) })) } @@ -661,41 +698,6 @@ .arg(unit.pkg.version().to_string()); } -// The path that we pass to rustc is actually fairly important because it will -// show up in error messages (important for readability), debug information -// (important for caching), etc. As a result we need to be pretty careful how we -// actually invoke rustc. -// -// In general users don't expect `cargo build` to cause rebuilds if you change -// directories. That could be if you just change directories in the package or -// if you literally move the whole package wholesale to a new directory. As a -// result we mostly don't factor in `cwd` to this calculation. Instead we try to -// track the workspace as much as possible and we update the current directory -// of rustc/rustdoc where appropriate. -// -// The first returned value here is the argument to pass to rustc, and the -// second is the cwd that rustc should operate in. -fn path_args(bcx: &BuildContext<'_, '_>, unit: &Unit) -> (PathBuf, PathBuf) { - let ws_root = bcx.ws.root(); - let src = match unit.target.src_path() { - TargetSourcePath::Path(path) => path.to_path_buf(), - TargetSourcePath::Metabuild => unit.pkg.manifest().metabuild_path(bcx.ws.target_dir()), - }; - assert!(src.is_absolute()); - if unit.pkg.package_id().source_id().is_path() { - if let Ok(path) = src.strip_prefix(ws_root) { - return (path.to_path_buf(), ws_root.to_path_buf()); - } - } - (src, unit.pkg.root().to_path_buf()) -} - -fn add_path_args(bcx: &BuildContext<'_, '_>, unit: &Unit, cmd: &mut ProcessBuilder) { - let (arg, cwd) = path_args(bcx, unit); - cmd.arg(arg); - cmd.cwd(cwd); -} - fn add_cap_lints(bcx: &BuildContext<'_, '_>, unit: &Unit, cmd: &mut ProcessBuilder) { // If this is an upstream dep we don't want warnings from, turn off all // lints. @@ -709,6 +711,15 @@ } } +/// Forward -Zallow-features if it is set for cargo. +fn add_allow_features(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder) { + if let Some(allow) = &cx.bcx.config.cli_unstable().allow_features { + let mut arg = String::from("-Zallow-features="); + let _ = iter_join_onto(&mut arg, allow, ","); + cmd.arg(&arg); + } +} + /// Add error-format flags to the command. /// /// Cargo always uses JSON output. This has several benefits, such as being @@ -734,8 +745,8 @@ } cmd.arg(json); - if nightly_features_allowed() { - let config = cx.bcx.config; + let config = cx.bcx.config; + if config.nightly_features_allowed { match ( config.cli_unstable().terminal_width, config.shell().err_width().diagnostic_terminal_width(), @@ -781,12 +792,11 @@ cmd.arg("--crate-name").arg(&unit.target.crate_name()); let edition = unit.target.edition(); - if edition != Edition::Edition2015 { - cmd.arg(format!("--edition={}", edition)); - } + edition.cmd_edition_arg(cmd); - add_path_args(bcx, unit, cmd); + add_path_args(bcx.ws, unit, cmd); add_error_format_and_color(cx, cmd, cx.rmeta_required(unit)); + add_allow_features(cx, cmd); if !test { for crate_type in crate_types.iter() { @@ -806,7 +816,7 @@ } let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build()) - || (crate_types.contains(&CrateType::Dylib) && bcx.ws.members().any(|p| *p != unit.pkg)); + || (crate_types.contains(&CrateType::Dylib) && !cx.is_primary_package(unit)); if prefer_dynamic { cmd.arg("-C").arg("prefer-dynamic"); } @@ -884,15 +894,10 @@ cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); } - match cx.files().metadata(unit) { - Some(m) => { - cmd.arg("-C").arg(&format!("metadata={}", m)); - cmd.arg("-C").arg(&format!("extra-filename=-{}", m)); - } - None => { - cmd.arg("-C") - .arg(&format!("metadata={}", cx.files().target_short_hash(unit))); - } + let meta = cx.files().metadata(unit); + cmd.arg("-C").arg(&format!("metadata={}", meta)); + if cx.files().use_extra_filename(unit) { + cmd.arg("-C").arg(&format!("extra-filename=-{}", meta)); } if rpath { @@ -939,6 +944,10 @@ .env("RUSTC_BOOTSTRAP", "1"); } + if bcx.config.cli_unstable().future_incompat_report { + cmd.arg("-Z").arg("emit-future-incompat-report"); + } + // Add `CARGO_BIN_` environment variables for building tests. if unit.target.is_test() || unit.target.is_bench() { for bin_target in unit @@ -967,7 +976,10 @@ match cx.lto[unit] { lto::Lto::Run(None) => push("lto"), lto::Lto::Run(Some(s)) => push(&format!("lto={}", s)), - lto::Lto::Off => push("lto=off"), + lto::Lto::Off => { + push("lto=off"); + push("embed-bitcode=no"); + } lto::Lto::ObjectAndBitcode => {} // this is rustc's default lto::Lto::OnlyBitcode => push("linker-plugin-lto"), lto::Lto::OnlyObject => push("embed-bitcode=no"), @@ -1140,6 +1152,10 @@ /// of empty files are not created. If this is None, the output will not /// be cached (such as when replaying cached messages). cache_cell: Option<(PathBuf, LazyCell)>, + /// If `true`, display any recorded warning messages. + /// Other types of messages are processed regardless + /// of the value of this flag + show_warnings: bool, } impl OutputOptions { @@ -1155,6 +1171,7 @@ look_for_metadata_directive, color, cache_cell, + show_warnings: true, } } } @@ -1173,10 +1190,11 @@ state: &JobState<'_>, line: &str, package_id: PackageId, + manifest_path: &std::path::Path, target: &Target, options: &mut OutputOptions, ) -> CargoResult<()> { - if on_stderr_line_inner(state, line, package_id, target, options)? { + if on_stderr_line_inner(state, line, package_id, manifest_path, target, options)? { // Check if caching is enabled. if let Some((path, cell)) = &mut options.cache_cell { // Cache the output, which will be replayed later when Fresh. @@ -1194,6 +1212,7 @@ state: &JobState<'_>, line: &str, package_id: PackageId, + manifest_path: &std::path::Path, target: &Target, options: &mut OutputOptions, ) -> CargoResult { @@ -1220,6 +1239,11 @@ } }; + if let Ok(report) = serde_json::from_str::(compiler_message.get()) { + state.future_incompat_report(report.future_incompat_report); + return Ok(true); + } + // Depending on what we're emitting from Cargo itself, we figure out what to // do with this JSON message. match options.format { @@ -1251,7 +1275,9 @@ .map(|v| String::from_utf8(v).expect("utf8")) .expect("strip should never fail") }; - state.stderr(rendered)?; + if options.show_warnings { + state.stderr(rendered)?; + } return Ok(true); } } @@ -1332,8 +1358,14 @@ // And failing all that above we should have a legitimate JSON diagnostic // from the compiler, so wrap it in an external Cargo JSON message // indicating which package it came from and then emit it. + + if !options.show_warnings { + return Ok(true); + } + let msg = machine_message::FromCompiler { package_id, + manifest_path, target, message: compiler_message, } @@ -1348,10 +1380,12 @@ fn replay_output_cache( package_id: PackageId, + manifest_path: PathBuf, target: &Target, path: PathBuf, format: MessageFormat, color: bool, + show_warnings: bool, ) -> Work { let target = target.clone(); let mut options = OutputOptions { @@ -1359,6 +1393,7 @@ look_for_metadata_directive: true, color, cache_cell: None, + show_warnings, }; Work::new(move |state| { if !path.exists() { @@ -1377,7 +1412,14 @@ break; } let trimmed = line.trim_end_matches(&['\n', '\r'][..]); - on_stderr_line(state, trimmed, package_id, &target, &mut options)?; + on_stderr_line( + state, + trimmed, + package_id, + &manifest_path, + &target, + &mut options, + )?; line.clear(); } Ok(()) diff -Nru cargo-0.52.0/src/cargo/core/compiler/output_depinfo.rs cargo-0.54.0/src/cargo/core/compiler/output_depinfo.rs --- cargo-0.52.0/src/cargo/core/compiler/output_depinfo.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/output_depinfo.rs 2021-04-27 14:35:53.000000000 +0000 @@ -26,11 +26,10 @@ use std::io::{BufWriter, Write}; use std::path::{Path, PathBuf}; -use log::debug; - use super::{fingerprint, Context, FileFlavor, Unit}; -use crate::util::paths; use crate::util::{internal, CargoResult}; +use cargo_util::paths; +use log::debug; fn render_filename>(path: P, basedir: Option<&str>) -> CargoResult { let path = path.as_ref(); diff -Nru cargo-0.52.0/src/cargo/core/compiler/rustdoc.rs cargo-0.54.0/src/cargo/core/compiler/rustdoc.rs --- cargo-0.52.0/src/cargo/core/compiler/rustdoc.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/rustdoc.rs 2021-04-27 14:35:53.000000000 +0000 @@ -5,7 +5,7 @@ use crate::core::compiler::CompileKind; use crate::sources::CRATES_IO_REGISTRY; use crate::util::errors::{internal, CargoResult}; -use crate::util::ProcessBuilder; +use cargo_util::ProcessBuilder; use std::collections::HashMap; use std::fmt; use std::hash; diff -Nru cargo-0.52.0/src/cargo/core/compiler/standard_lib.rs cargo-0.54.0/src/cargo/core/compiler/standard_lib.rs --- cargo-0.52.0/src/cargo/core/compiler/standard_lib.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/standard_lib.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,8 +3,8 @@ use crate::core::compiler::UnitInterner; use crate::core::compiler::{CompileKind, CompileMode, RustcTargetData, Unit}; use crate::core::profiles::{Profiles, UnitFor}; -use crate::core::resolver::features::{FeaturesFor, RequestedFeatures, ResolvedFeatures}; -use crate::core::resolver::{HasDevUnits, ResolveOpts}; +use crate::core::resolver::features::{CliFeatures, FeaturesFor, ResolvedFeatures}; +use crate::core::resolver::HasDevUnits; use crate::core::{Dependency, PackageId, PackageSet, Resolve, SourceId, Workspace}; use crate::ops::{self, Packages}; use crate::util::errors::CargoResult; @@ -33,7 +33,7 @@ /// Resolve the standard library dependencies. pub fn resolve_std<'cfg>( ws: &Workspace<'cfg>, - target_data: &RustcTargetData, + target_data: &RustcTargetData<'cfg>, requested_targets: &[CompileKind], crates: &[String], ) -> CargoResult<(PackageSet<'cfg>, Resolve, ResolvedFeatures)> { @@ -107,18 +107,14 @@ "default".to_string(), ], }; - // dev_deps setting shouldn't really matter here. - let opts = ResolveOpts::new( - /*dev_deps*/ false, - RequestedFeatures::from_command_line( - &features, /*all_features*/ false, /*uses_default_features*/ false, - ), - ); + let cli_features = CliFeatures::from_command_line( + &features, /*all_features*/ false, /*uses_default_features*/ false, + )?; let resolve = ops::resolve_ws_with_opts( &std_ws, target_data, requested_targets, - &opts, + &cli_features, &specs, HasDevUnits::No, crate::core::resolver::features::ForceAllTargets::No, @@ -162,17 +158,18 @@ // in time is minimal, and the difference in caching is // significant. let mode = CompileMode::Build; - let profile = profiles.get_profile( - pkg.package_id(), - /*is_member*/ false, - /*is_local*/ false, - unit_for, - mode, - ); let features = std_features.activated_features(pkg.package_id(), FeaturesFor::NormalOrDev); for kind in kinds { let list = ret.entry(*kind).or_insert_with(Vec::new); + let profile = profiles.get_profile( + pkg.package_id(), + /*is_member*/ false, + /*is_local*/ false, + unit_for, + mode, + *kind, + ); list.push(interner.intern( pkg, lib, @@ -188,7 +185,7 @@ Ok(ret) } -fn detect_sysroot_src_path(target_data: &RustcTargetData) -> CargoResult { +fn detect_sysroot_src_path(target_data: &RustcTargetData<'_>) -> CargoResult { if let Some(s) = env::var_os("__CARGO_TESTS_ONLY_SRC_ROOT") { return Ok(s.into()); } diff -Nru cargo-0.52.0/src/cargo/core/compiler/timings.rs cargo-0.54.0/src/cargo/core/compiler/timings.rs --- cargo-0.52.0/src/cargo/core/compiler/timings.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/timings.rs 2021-04-27 14:35:53.000000000 +0000 @@ -8,7 +8,9 @@ use crate::core::PackageId; use crate::util::cpu::State; use crate::util::machine_message::{self, Message}; -use crate::util::{paths, CargoResult, CargoResultExt, Config}; +use crate::util::{CargoResult, Config}; +use anyhow::Context as _; +use cargo_util::paths; use std::collections::HashMap; use std::io::{BufWriter, Write}; use std::time::{Duration, Instant, SystemTime}; @@ -323,7 +325,7 @@ .sort_unstable_by(|a, b| a.start.partial_cmp(&b.start).unwrap()); if self.report_html { self.report_html(bcx, error) - .chain_err(|| "failed to save timing report")?; + .with_context(|| "failed to save timing report")?; } Ok(()) } diff -Nru cargo-0.52.0/src/cargo/core/compiler/unit_dependencies.rs cargo-0.54.0/src/cargo/core/compiler/unit_dependencies.rs --- cargo-0.52.0/src/cargo/core/compiler/unit_dependencies.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/unit_dependencies.rs 2021-04-27 14:35:53.000000000 +0000 @@ -44,7 +44,7 @@ /// library. is_std: bool, global_mode: CompileMode, - target_data: &'a RustcTargetData, + target_data: &'a RustcTargetData<'cfg>, profiles: &'a Profiles, interner: &'a UnitInterner, @@ -63,7 +63,7 @@ roots: &[Unit], std_roots: &HashMap>, global_mode: CompileMode, - target_data: &'a RustcTargetData, + target_data: &'a RustcTargetData<'cfg>, profiles: &'a Profiles, interner: &'a UnitInterner, ) -> CargoResult { @@ -585,6 +585,7 @@ is_local, unit_for, mode, + kind, ); new_unit_dep_with_profile(state, parent, pkg, target, unit_for, kind, mode, profile) } diff -Nru cargo-0.52.0/src/cargo/core/compiler/unit_graph.rs cargo-0.54.0/src/cargo/core/compiler/unit_graph.rs --- cargo-0.52.0/src/cargo/core/compiler/unit_graph.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/compiler/unit_graph.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,9 +1,10 @@ use crate::core::compiler::Unit; use crate::core::compiler::{CompileKind, CompileMode}; use crate::core::profiles::{Profile, UnitFor}; -use crate::core::{nightly_features_allowed, PackageId, Target}; +use crate::core::{PackageId, Target}; use crate::util::interning::InternedString; use crate::util::CargoResult; +use crate::Config; use std::collections::HashMap; use std::io::Write; @@ -62,8 +63,11 @@ // internal detail that is mostly used for building the graph. } -pub fn emit_serialized_unit_graph(root_units: &[Unit], unit_graph: &UnitGraph) -> CargoResult<()> { - let is_nightly = nightly_features_allowed(); +pub fn emit_serialized_unit_graph( + root_units: &[Unit], + unit_graph: &UnitGraph, + config: &Config, +) -> CargoResult<()> { let mut units: Vec<(&Unit, &Vec)> = unit_graph.iter().collect(); units.sort_unstable(); // Create a map for quick lookup for dependencies. @@ -80,7 +84,7 @@ .iter() .map(|unit_dep| { // https://github.com/rust-lang/rust/issues/64260 when stabilized. - let (public, noprelude) = if is_nightly { + let (public, noprelude) = if config.nightly_features_allowed { (Some(unit_dep.public), Some(unit_dep.noprelude)) } else { (None, None) diff -Nru cargo-0.52.0/src/cargo/core/dependency.rs cargo-0.54.0/src/cargo/core/dependency.rs --- cargo-0.52.0/src/cargo/core/dependency.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/dependency.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,3 +1,4 @@ +use anyhow::Context as _; use cargo_platform::Platform; use log::trace; use semver::ReqParseError; @@ -8,7 +9,7 @@ use std::rc::Rc; use crate::core::{PackageId, SourceId, Summary}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::Config; @@ -132,7 +133,7 @@ } Err(e) => { let err: CargoResult = Err(e.into()); - let v: VersionReq = err.chain_err(|| { + let v: VersionReq = err.with_context(|| { format!( "failed to parse the version requirement `{}` for dependency `{}`", req, name diff -Nru cargo-0.52.0/src/cargo/core/features.rs cargo-0.54.0/src/cargo/core/features.rs --- cargo-0.52.0/src/cargo/core/features.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/features.rs 2021-04-27 14:35:53.000000000 +0000 @@ -68,7 +68,6 @@ //! get an instance of `CliUnstable` and check if the option has been //! enabled on the `CliUnstable` instance. Nightly gating is already //! handled, so no need to worry about that. -//! 4. Update the `-Z help` documentation in the `main` function. //! //! ## Stabilization //! @@ -83,26 +82,29 @@ //! 2. `-Z unstable-options`: Find the call to `fail_if_stable_opt` and //! remove it. Be sure to update the man pages if necessary. //! 3. `-Z` flag: Change the parsing code in [`CliUnstable::add`] to call -//! `stabilized_warn` or `stabilized_err`. Remove it from the `-Z help` -//! docs in the `main` function. Remove the `(unstable)` note in the -//! clap help text if necessary. +//! `stabilized_warn` or `stabilized_err` and remove the field from +//! `CliUnstable. Remove the `(unstable)` note in the clap help text if +//! necessary. //! 2. Remove `masquerade_as_nightly_cargo` from any tests, and remove //! `cargo-features` from `Cargo.toml` test files if any. //! 3. Remove the docs from unstable.md and update the redirect at the bottom //! of that page. Update the rest of the documentation to add the new //! feature. -use std::cell::Cell; +use std::collections::BTreeSet; use std::env; use std::fmt; use std::str::FromStr; use anyhow::{bail, Error}; +use cargo_util::ProcessBuilder; use serde::{Deserialize, Serialize}; use crate::util::errors::CargoResult; -use crate::util::indented_lines; +use crate::util::{indented_lines, iter_join}; +use crate::Config; +pub const HIDDEN: &str = ""; pub const SEE_CHANNELS: &str = "See https://doc.rust-lang.org/book/appendix-07-nightly-rust.html for more information \ about Rust release channels."; @@ -118,15 +120,117 @@ Edition2021, } +// Adding a new edition: +// - Add the next edition to the enum. +// - Update every match expression that now fails to compile. +// - Update the `FromStr` impl. +// - Update CLI_VALUES to include the new edition. +// - Set LATEST_UNSTABLE to Some with the new edition. +// - Add an unstable feature to the features! macro below for the new edition. +// - Gate on that new feature in TomlManifest::to_real_manifest. +// - Update the shell completion files. +// - Update any failing tests (hopefully there are very few). +// +// Stabilization instructions: +// - Set LATEST_UNSTABLE to None. +// - Set LATEST_STABLE to the new version. +// - Update `is_stable` to `true`. +// - Set the editionNNNN feature to stable in the features macro below. +// - Update the man page for the --edition flag. impl Edition { + /// The latest edition that is unstable. + /// + /// This is `None` if there is no next unstable edition. + pub const LATEST_UNSTABLE: Option = Some(Edition::Edition2021); + /// The latest stable edition. + pub const LATEST_STABLE: Edition = Edition::Edition2018; + /// Possible values allowed for the `--edition` CLI flag. + /// + /// This requires a static value due to the way clap works, otherwise I + /// would have built this dynamically. + pub const CLI_VALUES: &'static [&'static str] = &["2015", "2018", "2021"]; + + /// Returns the first version that a particular edition was released on + /// stable. pub(crate) fn first_version(&self) -> Option { use Edition::*; match self { Edition2015 => None, Edition2018 => Some(semver::Version::new(1, 31, 0)), + // FIXME: This will likely be 1.56, update when that seems more likely. Edition2021 => Some(semver::Version::new(1, 62, 0)), } } + + /// Returns `true` if this edition is stable in this release. + pub fn is_stable(&self) -> bool { + use Edition::*; + match self { + Edition2015 => true, + Edition2018 => true, + Edition2021 => false, + } + } + + /// Returns the previous edition from this edition. + /// + /// Returns `None` for 2015. + pub fn previous(&self) -> Option { + use Edition::*; + match self { + Edition2015 => None, + Edition2018 => Some(Edition2015), + Edition2021 => Some(Edition2018), + } + } + + /// Returns the next edition from this edition, returning the last edition + /// if this is already the last one. + pub fn saturating_next(&self) -> Edition { + use Edition::*; + match self { + Edition2015 => Edition2018, + Edition2018 => Edition2021, + Edition2021 => Edition2021, + } + } + + /// Updates the given [`ProcessBuilder`] to include the appropriate flags + /// for setting the edition. + pub(crate) fn cmd_edition_arg(&self, cmd: &mut ProcessBuilder) { + if *self != Edition::Edition2015 { + cmd.arg(format!("--edition={}", self)); + } + if !self.is_stable() { + cmd.arg("-Z").arg("unstable-options"); + } + } + + /// Whether or not this edition supports the `rust_*_compatibility` lint. + /// + /// Ideally this would not be necessary, but currently 2021 does not have + /// any lints, and thus `rustc` doesn't recognize it. Perhaps `rustc` + /// could create an empty group instead? + pub(crate) fn supports_compat_lint(&self) -> bool { + use Edition::*; + match self { + Edition2015 => false, + Edition2018 => true, + Edition2021 => false, + } + } + + /// Whether or not this edition supports the `rust_*_idioms` lint. + /// + /// Ideally this would not be necessary... + pub(crate) fn supports_idiom_lint(&self) -> bool { + use Edition::*; + match self { + Edition2015 => false, + Edition2018 => true, + Edition2021 => false, + } + } } impl fmt::Display for Edition { @@ -174,6 +278,7 @@ pub struct Features { $($feature: bool,)* activated: Vec, + nightly_features_allowed: bool, } impl Feature { @@ -282,6 +387,12 @@ // Specifying a minimal 'rust-version' attribute for crates (unstable, rust_version, "", "reference/unstable.html#rust-version"), + + // Support for 2021 edition. + (unstable, edition2021, "", "reference/unstable.html#edition-2021"), + + // Allow to specify per-package targets (compile kinds) + (unstable, per_package_target, "", "reference/unstable.html#per-package-target"), } const PUBLISH_LOCKFILE_REMOVED: &str = "The publish-lockfile key in Cargo.toml \ @@ -301,16 +412,27 @@ } impl Features { - pub fn new(features: &[String], warnings: &mut Vec) -> CargoResult { + pub fn new( + features: &[String], + config: &Config, + warnings: &mut Vec, + ) -> CargoResult { let mut ret = Features::default(); + ret.nightly_features_allowed = config.nightly_features_allowed; for feature in features { - ret.add(feature, warnings)?; + ret.add(feature, config, warnings)?; ret.activated.push(feature.to_string()); } Ok(ret) } - fn add(&mut self, feature_name: &str, warnings: &mut Vec) -> CargoResult<()> { + fn add( + &mut self, + feature_name: &str, + config: &Config, + warnings: &mut Vec, + ) -> CargoResult<()> { + let nightly_features_allowed = self.nightly_features_allowed; let (slot, feature) = match self.status(feature_name) { Some(p) => p, None => bail!("unknown cargo feature `{}`", feature_name), @@ -348,7 +470,7 @@ ); warnings.push(warning); } - Status::Unstable if !nightly_features_allowed() => bail!( + Status::Unstable if !nightly_features_allowed => bail!( "the cargo feature `{}` requires a nightly version of \ Cargo, but this is the `{}` channel\n\ {}\n{}", @@ -357,7 +479,17 @@ SEE_CHANNELS, see_docs() ), - Status::Unstable => {} + Status::Unstable => { + if let Some(allow) = &config.cli_unstable().allow_features { + if !allow.contains(feature_name) { + bail!( + "the feature `{}` is not in the list of allowed features: [{}]", + feature_name, + iter_join(allow, ", "), + ); + } + } + } Status::Removed => bail!( "the cargo feature `{}` has been removed\n\ Remove the feature from Cargo.toml to remove this error.\n\ @@ -383,7 +515,7 @@ let feature = feature.name.replace("_", "-"); let mut msg = format!("feature `{}` is required", feature); - if nightly_features_allowed() { + if self.nightly_features_allowed { let s = format!( "\n\nconsider adding `cargo-features = [\"{0}\"]` \ to the manifest", @@ -409,43 +541,75 @@ } } -/// A parsed representation of all unstable flags that Cargo accepts. -/// -/// Cargo, like `rustc`, accepts a suite of `-Z` flags which are intended for -/// gating unstable functionality to Cargo. These flags are only available on -/// the nightly channel of Cargo. -#[derive(Default, Debug, Deserialize)] -#[serde(default, rename_all = "kebab-case")] -pub struct CliUnstable { - pub print_im_a_teapot: bool, - pub unstable_options: bool, - pub no_index_update: bool, - pub avoid_dev_deps: bool, - pub minimal_versions: bool, - pub advanced_env: bool, - pub config_include: bool, - pub dual_proc_macros: bool, - pub mtime_on_use: bool, - pub named_profiles: bool, - pub binary_dep_depinfo: bool, - #[serde(deserialize_with = "deserialize_build_std")] - pub build_std: Option>, - pub build_std_features: Option>, - pub timings: Option>, - pub doctest_xcompile: bool, - pub panic_abort_tests: bool, - pub jobserver_per_rustc: bool, - pub features: Option>, - pub separate_nightlies: bool, - pub multitarget: bool, - pub rustdoc_map: bool, - pub terminal_width: Option>, - pub namespaced_features: bool, - pub weak_dep_features: bool, - pub extra_link_arg: bool, - pub credential_process: bool, +macro_rules! unstable_cli_options { + ( + $( + $(#[$meta:meta])? + $element: ident: $ty: ty = ($help: expr ), + )* + ) => { + /// A parsed representation of all unstable flags that Cargo accepts. + /// + /// Cargo, like `rustc`, accepts a suite of `-Z` flags which are intended for + /// gating unstable functionality to Cargo. These flags are only available on + /// the nightly channel of Cargo. + #[derive(Default, Debug, Deserialize)] + #[serde(default, rename_all = "kebab-case")] + pub struct CliUnstable { + $( + $(#[$meta])? + pub $element: $ty + ),* + } + impl CliUnstable { + pub fn help() -> Vec<(&'static str, &'static str)> { + let fields = vec![$((stringify!($element), $help)),*]; + fields + } + } + } } +unstable_cli_options!( + // Permanently unstable features: + allow_features: Option> = ("Allow *only* the listed unstable features"), + print_im_a_teapot: bool= (HIDDEN), + + // All other unstable features. + // Please keep this list lexiographically ordered. + advanced_env: bool = (HIDDEN), + avoid_dev_deps: bool = ("Avoid installing dev-dependencies if possible"), + binary_dep_depinfo: bool = ("Track changes to dependency artifacts"), + #[serde(deserialize_with = "deserialize_build_std")] + build_std: Option> = ("Enable Cargo to compile the standard library itself as part of a crate graph compilation"), + build_std_features: Option> = ("Configure features enabled for the standard library itself when building the standard library"), + config_include: bool = ("Enable the `include` key in config files"), + configurable_env: bool = ("Enable the [env] section in the .cargo/config.toml file"), + credential_process: bool = ("Add a config setting to fetch registry authentication tokens by calling an external process"), + doctest_in_workspace: bool = ("Compile doctests with paths relative to the workspace root"), + doctest_xcompile: bool = ("Compile and run doctests for non-host target using runner config"), + dual_proc_macros: bool = ("Build proc-macros for both the host and the target"), + future_incompat_report: bool = ("Enable creation of a future-incompat report for all dependencies"), + extra_link_arg: bool = ("Allow `cargo:rustc-link-arg` in build scripts"), + features: Option> = (HIDDEN), + jobserver_per_rustc: bool = (HIDDEN), + minimal_versions: bool = ("Resolve minimal dependency versions instead of maximum"), + mtime_on_use: bool = ("Configure Cargo to update the mtime of used files"), + multitarget: bool = ("Allow passing multiple `--target` flags to the cargo subcommand selected"), + named_profiles: bool = ("Allow defining custom profiles"), + namespaced_features: bool = ("Allow features with `dep:` prefix"), + no_index_update: bool = ("Do not update the registry index even if the cache is outdated"), + panic_abort_tests: bool = ("Enable support to run tests with -Cpanic=abort"), + patch_in_config: bool = ("Allow `[patch]` sections in .cargo/config.toml files"), + rustdoc_map: bool = ("Allow passing external documentation mappings to rustdoc"), + separate_nightlies: bool = (HIDDEN), + terminal_width: Option> = ("Provide a terminal width to rustc for error truncation"), + timings: Option> = ("Display concurrency information"), + unstable_options: bool = ("Allow the usage of unstable options"), + weak_dep_features: bool = ("Allow `dep_name?/feature` feature syntax"), + skip_rustdoc_fingerprint: bool = (HIDDEN), +); + const STABILIZED_COMPILE_PROGRESS: &str = "The progress bar is now always \ enabled when used on an interactive console.\n\ See https://doc.rust-lang.org/cargo/reference/config.html#termprogresswhen \ @@ -495,8 +659,12 @@ } impl CliUnstable { - pub fn parse(&mut self, flags: &[String]) -> CargoResult> { - if !flags.is_empty() && !nightly_features_allowed() { + pub fn parse( + &mut self, + flags: &[String], + nightly_features_allowed: bool, + ) -> CargoResult> { + if !flags.is_empty() && !nightly_features_allowed { bail!( "the `-Z` flag is only accepted on the nightly channel of Cargo, \ but this is the `{}` channel\n\ @@ -506,6 +674,13 @@ ); } let mut warnings = Vec::new(); + // We read flags twice, first to get allowed-features (if specified), + // and then to read the remaining unstable flags. + for flag in flags { + if flag.starts_with("allow-features=") { + self.add(flag, &mut warnings)?; + } + } for flag in flags { self.add(flag, &mut warnings)?; } @@ -535,6 +710,7 @@ fn parse_features(value: Option<&str>) -> Vec { match value { None => Vec::new(), + Some("") => Vec::new(), Some(v) => v.split(',').map(|s| s.to_string()).collect(), } } @@ -577,8 +753,19 @@ )) }; + if let Some(allowed) = &self.allow_features { + if k != "allow-features" && !allowed.contains(k) { + bail!( + "the feature `{}` is not in the list of allowed features: [{}]", + k, + iter_join(allowed, ", ") + ); + } + } + match k { "print-im-a-teapot" => self.print_im_a_teapot = parse_bool(k, v)?, + "allow-features" => self.allow_features = Some(parse_features(v).into_iter().collect()), "unstable-options" => self.unstable_options = parse_empty(k, v)?, "no-index-update" => self.no_index_update = parse_empty(k, v)?, "avoid-dev-deps" => self.avoid_dev_deps = parse_empty(k, v)?, @@ -596,8 +783,11 @@ "build-std-features" => self.build_std_features = Some(parse_features(v)), "timings" => self.timings = Some(parse_timings(v)), "doctest-xcompile" => self.doctest_xcompile = parse_empty(k, v)?, + "doctest-in-workspace" => self.doctest_in_workspace = parse_empty(k, v)?, "panic-abort-tests" => self.panic_abort_tests = parse_empty(k, v)?, "jobserver-per-rustc" => self.jobserver_per_rustc = parse_empty(k, v)?, + "configurable-env" => self.configurable_env = parse_empty(k, v)?, + "patch-in-config" => self.patch_in_config = parse_empty(k, v)?, "features" => { // For now this is still allowed (there are still some // unstable options like "compare"). This should be removed at @@ -627,6 +817,7 @@ "weak-dep-features" => self.weak_dep_features = parse_empty(k, v)?, "extra-link-arg" => self.extra_link_arg = parse_empty(k, v)?, "credential-process" => self.credential_process = parse_empty(k, v)?, + "skip-rustdoc-fingerprint" => self.skip_rustdoc_fingerprint = parse_empty(k, v)?, "compile-progress" => stabilized_warn(k, "1.30", STABILIZED_COMPILE_PROGRESS), "offline" => stabilized_err(k, "1.36", STABILIZED_OFFLINE)?, "cache-messages" => stabilized_warn(k, "1.40", STABILIZED_CACHE_MESSAGES), @@ -634,15 +825,15 @@ "config-profile" => stabilized_warn(k, "1.43", STABILIZED_CONFIG_PROFILE), "crate-versions" => stabilized_warn(k, "1.47", STABILIZED_CRATE_VERSIONS), "package-features" => stabilized_warn(k, "1.51", STABILIZED_PACKAGE_FEATURES), + "future-incompat-report" => self.future_incompat_report = parse_empty(k, v)?, _ => bail!("unknown `-Z` flag specified: {}", k), } Ok(()) } - /// Generates an error if `-Z unstable-options` was not used. - /// Intended to be used when a user passes a command-line flag that - /// requires `-Z unstable-options`. + /// Generates an error if `-Z unstable-options` was not used for a new, + /// unstable command-line flag. pub fn fail_if_stable_opt(&self, flag: &str, issue: u32) -> CargoResult<()> { if !self.unstable_options { let see = format!( @@ -650,7 +841,9 @@ information about the `{}` flag.", issue, flag ); - if nightly_features_allowed() { + // NOTE: a `config` isn't available here, check the channel directly + let channel = channel(); + if channel == "nightly" || channel == "dev" { bail!( "the `{}` flag is unstable, pass `-Z unstable-options` to enable it\n\ {}", @@ -664,7 +857,7 @@ {}\n\ {}", flag, - channel(), + channel, SEE_CHANNELS, see ); @@ -672,6 +865,43 @@ } Ok(()) } + + /// Generates an error if `-Z unstable-options` was not used for a new, + /// unstable subcommand. + pub fn fail_if_stable_command( + &self, + config: &Config, + command: &str, + issue: u32, + ) -> CargoResult<()> { + if self.unstable_options { + return Ok(()); + } + let see = format!( + "See https://github.com/rust-lang/cargo/issues/{} for more \ + information about the `cargo {}` command.", + issue, command + ); + if config.nightly_features_allowed { + bail!( + "the `cargo {}` command is unstable, pass `-Z unstable-options` to enable it\n\ + {}", + command, + see + ); + } else { + bail!( + "the `cargo {}` command is unstable, and only available on the \ + nightly channel of Cargo, but this is the `{}` channel\n\ + {}\n\ + {}", + command, + channel(), + SEE_CHANNELS, + see + ); + } + } } /// Returns the current release channel ("stable", "beta", "nightly", "dev"). @@ -689,46 +919,3 @@ .map(|c| c.release_channel) .unwrap_or_else(|| String::from("dev")) } - -thread_local!( - static NIGHTLY_FEATURES_ALLOWED: Cell = Cell::new(false); - static ENABLE_NIGHTLY_FEATURES: Cell = Cell::new(false); -); - -/// This is a little complicated. -/// This should return false if: -/// - this is an artifact of the rustc distribution process for "stable" or for "beta" -/// - this is an `#[test]` that does not opt in with `enable_nightly_features` -/// - this is a integration test that uses `ProcessBuilder` -/// that does not opt in with `masquerade_as_nightly_cargo` -/// This should return true if: -/// - this is an artifact of the rustc distribution process for "nightly" -/// - this is being used in the rustc distribution process internally -/// - this is a cargo executable that was built from source -/// - this is an `#[test]` that called `enable_nightly_features` -/// - this is a integration test that uses `ProcessBuilder` -/// that called `masquerade_as_nightly_cargo` -pub fn nightly_features_allowed() -> bool { - if ENABLE_NIGHTLY_FEATURES.with(|c| c.get()) { - return true; - } - match &channel()[..] { - "nightly" | "dev" => NIGHTLY_FEATURES_ALLOWED.with(|c| c.get()), - _ => false, - } -} - -/// Allows nightly features to be enabled for this thread, but only if the -/// development channel is nightly or dev. -/// -/// Used by cargo main to ensure that a cargo build from source has nightly features -pub fn maybe_allow_nightly_features() { - NIGHTLY_FEATURES_ALLOWED.with(|c| c.set(true)); -} - -/// Forcibly enables nightly features for this thread. -/// -/// Used by tests to allow the use of nightly features. -pub fn enable_nightly_features() { - ENABLE_NIGHTLY_FEATURES.with(|c| c.set(true)); -} diff -Nru cargo-0.52.0/src/cargo/core/manifest.rs cargo-0.54.0/src/cargo/core/manifest.rs --- cargo-0.52.0/src/cargo/core/manifest.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/manifest.rs 2021-04-27 14:35:53.000000000 +0000 @@ -5,12 +5,13 @@ use std::rc::Rc; use std::sync::Arc; +use anyhow::Context as _; use semver::Version; use serde::ser; use serde::Serialize; use url::Url; -use crate::core::compiler::CrateType; +use crate::core::compiler::{CompileKind, CrateType}; use crate::core::resolver::ResolveBehavior; use crate::core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary}; use crate::core::{Edition, Feature, Features, WorkspaceConfig}; @@ -31,6 +32,8 @@ pub struct Manifest { summary: Summary, targets: Vec, + default_kind: Option, + forced_kind: Option, links: Option, warnings: Warnings, exclude: Vec, @@ -285,7 +288,7 @@ edition: &self.edition().to_string(), required_features: self .required_features() - .map(|rf| rf.iter().map(|s| &**s).collect()), + .map(|rf| rf.iter().map(|s| s.as_str()).collect()), doc: self.documented(), doctest: self.doctested() && self.doctestable(), test: self.tested(), @@ -365,6 +368,8 @@ impl Manifest { pub fn new( summary: Summary, + default_kind: Option, + forced_kind: Option, targets: Vec, exclude: Vec, include: Vec, @@ -387,6 +392,8 @@ ) -> Manifest { Manifest { summary, + default_kind, + forced_kind, targets, warnings: Warnings::new(), exclude, @@ -413,6 +420,12 @@ pub fn dependencies(&self) -> &[Dependency] { self.summary.dependencies() } + pub fn default_kind(&self) -> Option { + self.default_kind + } + pub fn forced_kind(&self) -> Option { + self.forced_kind + } pub fn exclude(&self) -> &[String] { &self.exclude } @@ -496,11 +509,18 @@ if self.im_a_teapot.is_some() { self.unstable_features .require(Feature::test_dummy_unstable()) - .chain_err(|| { - anyhow::format_err!( - "the `im-a-teapot` manifest key is unstable and may \ - not work properly in England" - ) + .with_context(|| { + "the `im-a-teapot` manifest key is unstable and may \ + not work properly in England" + })?; + } + + if self.default_kind.is_some() || self.forced_kind.is_some() { + self.unstable_features + .require(Feature::per_package_target()) + .with_context(|| { + "the `package.default-target` and `package.forced-target` \ + manifest keys are unstable and may not work properly" })?; } diff -Nru cargo-0.52.0/src/cargo/core/mod.rs cargo-0.54.0/src/cargo/core/mod.rs --- cargo-0.52.0/src/cargo/core/mod.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,7 +1,4 @@ pub use self::dependency::Dependency; -pub use self::features::{ - enable_nightly_features, maybe_allow_nightly_features, nightly_features_allowed, -}; pub use self::features::{CliUnstable, Edition, Feature, Features}; pub use self::manifest::{EitherManifest, VirtualManifest}; pub use self::manifest::{Manifest, Target, TargetKind}; @@ -13,7 +10,7 @@ pub use self::shell::{Shell, Verbosity}; pub use self::source::{GitReference, Source, SourceId, SourceMap}; pub use self::summary::{FeatureMap, FeatureValue, Summary}; -pub use self::workspace::{Members, Workspace, WorkspaceConfig, WorkspaceRootConfig}; +pub use self::workspace::{MaybePackage, Members, Workspace, WorkspaceConfig, WorkspaceRootConfig}; pub mod compiler; pub mod dependency; diff -Nru cargo-0.52.0/src/cargo/core/package_id_spec.rs cargo-0.54.0/src/cargo/core/package_id_spec.rs --- cargo-0.52.0/src/cargo/core/package_id_spec.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/package_id_spec.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,13 +1,13 @@ use std::collections::HashMap; use std::fmt; -use anyhow::bail; +use anyhow::{bail, Context as _}; use semver::Version; use serde::{de, ser}; use url::Url; use crate::core::PackageId; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::lev_distance; use crate::util::{validate_package_name, IntoUrl, ToSemver}; @@ -38,12 +38,9 @@ /// use cargo::core::PackageIdSpec; /// /// let specs = vec![ + /// "https://crates.io/foo", /// "https://crates.io/foo#1.2.3", /// "https://crates.io/foo#bar:1.2.3", - /// "crates.io/foo", - /// "crates.io/foo#1.2.3", - /// "crates.io/foo#bar", - /// "crates.io/foo#bar:1.2.3", /// "foo", /// "foo:1.2.3", /// ]; @@ -51,14 +48,21 @@ /// assert!(PackageIdSpec::parse(spec).is_ok()); /// } pub fn parse(spec: &str) -> CargoResult { - if spec.contains('/') { + if spec.contains("://") { if let Ok(url) = spec.into_url() { return PackageIdSpec::from_url(url); } - if !spec.contains("://") { - if let Ok(url) = Url::parse(&format!("cargo://{}", spec)) { - return PackageIdSpec::from_url(url); - } + } else if spec.contains('/') || spec.contains('\\') { + let abs = std::env::current_dir().unwrap_or_default().join(spec); + if abs.exists() { + let maybe_url = Url::from_file_path(abs) + .map_or_else(|_| "a file:// URL".to_string(), |url| url.to_string()); + bail!( + "package ID specification `{}` looks like a file path, \ + maybe try {}", + spec, + maybe_url + ); } } let mut parts = spec.splitn(2, ':'); @@ -80,8 +84,11 @@ where I: IntoIterator, { - let spec = PackageIdSpec::parse(spec) - .chain_err(|| anyhow::format_err!("invalid package ID specification: `{}`", spec))?; + let i: Vec<_> = i.into_iter().collect(); + let spec = PackageIdSpec::parse(spec).with_context(|| { + let suggestion = lev_distance::closest_msg(spec, i.iter(), |id| id.name().as_str()); + format!("invalid package ID specification: `{}`{}", spec, suggestion) + })?; spec.query(i) } @@ -275,11 +282,7 @@ let mut printed_name = false; match self.url { Some(ref url) => { - if url.scheme() == "cargo" { - write!(f, "{}{}", url.host().unwrap(), url.path())?; - } else { - write!(f, "{}", url)?; - } + write!(f, "{}", url)?; if url.path_segments().unwrap().next_back().unwrap() != &*self.name { printed_name = true; write!(f, "#{}", self.name)?; @@ -333,51 +336,27 @@ } ok( - "https://crates.io/foo#1.2.3", - PackageIdSpec { - name: InternedString::new("foo"), - version: Some("1.2.3".to_semver().unwrap()), - url: Some(Url::parse("https://crates.io/foo").unwrap()), - }, - ); - ok( - "https://crates.io/foo#bar:1.2.3", - PackageIdSpec { - name: InternedString::new("bar"), - version: Some("1.2.3".to_semver().unwrap()), - url: Some(Url::parse("https://crates.io/foo").unwrap()), - }, - ); - ok( - "crates.io/foo", + "https://crates.io/foo", PackageIdSpec { name: InternedString::new("foo"), version: None, - url: Some(Url::parse("cargo://crates.io/foo").unwrap()), + url: Some(Url::parse("https://crates.io/foo").unwrap()), }, ); ok( - "crates.io/foo#1.2.3", + "https://crates.io/foo#1.2.3", PackageIdSpec { name: InternedString::new("foo"), version: Some("1.2.3".to_semver().unwrap()), - url: Some(Url::parse("cargo://crates.io/foo").unwrap()), - }, - ); - ok( - "crates.io/foo#bar", - PackageIdSpec { - name: InternedString::new("bar"), - version: None, - url: Some(Url::parse("cargo://crates.io/foo").unwrap()), + url: Some(Url::parse("https://crates.io/foo").unwrap()), }, ); ok( - "crates.io/foo#bar:1.2.3", + "https://crates.io/foo#bar:1.2.3", PackageIdSpec { name: InternedString::new("bar"), version: Some("1.2.3".to_semver().unwrap()), - url: Some(Url::parse("cargo://crates.io/foo").unwrap()), + url: Some(Url::parse("https://crates.io/foo").unwrap()), }, ); ok( diff -Nru cargo-0.52.0/src/cargo/core/package.rs cargo-0.54.0/src/cargo/core/package.rs --- cargo-0.52.0/src/cargo/core/package.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/package.rs 2021-04-27 14:35:53.000000000 +0000 @@ -26,7 +26,7 @@ use crate::core::{SourceMap, Summary, Workspace}; use crate::ops; use crate::util::config::PackageCacheLock; -use crate::util::errors::{CargoResult, CargoResultExt, HttpNot200}; +use crate::util::errors::{CargoResult, HttpNot200}; use crate::util::interning::InternedString; use crate::util::network::Retry; use crate::util::{self, internal, Config, Progress, ProgressStyle}; @@ -151,6 +151,10 @@ pub fn targets(&self) -> &[Target] { self.manifest().targets() } + /// Gets the library crate for this package, if it exists. + pub fn library(&self) -> Option<&Target> { + self.targets().iter().find(|t| t.is_lib()) + } /// Gets the current package version. pub fn version(&self) -> &Version { self.package_id().version() @@ -416,7 +420,7 @@ let multiplexing = config.http_config()?.multiplexing.unwrap_or(true); multi .pipelining(false, multiplexing) - .chain_err(|| "failed to enable multiplexing/pipelining in curl")?; + .with_context(|| "failed to enable multiplexing/pipelining in curl")?; // let's not flood crates.io with connections multi.set_max_host_connections(2)?; @@ -434,11 +438,11 @@ }) } - pub fn package_ids<'a>(&'a self) -> impl Iterator + 'a { + pub fn package_ids(&self) -> impl Iterator + '_ { self.packages.keys().cloned() } - pub fn packages<'a>(&'a self) -> impl Iterator + 'a { + pub fn packages(&self) -> impl Iterator { self.packages.values().filter_map(|p| p.borrow()) } @@ -496,7 +500,7 @@ root_ids: &[PackageId], has_dev_units: HasDevUnits, requested_kinds: &[CompileKind], - target_data: &RustcTargetData, + target_data: &RustcTargetData<'cfg>, force_all_targets: ForceAllTargets, ) -> CargoResult<()> { fn collect_used_deps( @@ -505,7 +509,7 @@ pkg_id: PackageId, has_dev_units: HasDevUnits, requested_kinds: &[CompileKind], - target_data: &RustcTargetData, + target_data: &RustcTargetData<'_>, force_all_targets: ForceAllTargets, ) -> CargoResult<()> { if !used.insert(pkg_id) { @@ -612,7 +616,7 @@ /// the package is ready and doesn't need to be downloaded. pub fn start(&mut self, id: PackageId) -> CargoResult> { self.start_inner(id) - .chain_err(|| format!("failed to download `{}`", id)) + .with_context(|| format!("failed to download `{}`", id)) } fn start_inner(&mut self, id: PackageId) -> CargoResult> { @@ -636,7 +640,7 @@ .ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?; let pkg = source .download(id) - .chain_err(|| anyhow::format_err!("unable to get packages from source"))?; + .with_context(|| "unable to get packages from source")?; let (url, descriptor) = match pkg { MaybePackage::Ready(pkg) => { debug!("{} doesn't need a download", id); @@ -810,7 +814,7 @@ } Ok(()) }) - .chain_err(|| format!("failed to download from `{}`", dl.url))? + .with_context(|| format!("failed to download from `{}`", dl.url))? }; match ret { Some(()) => break (dl, data), @@ -908,7 +912,7 @@ self.set .multi .perform() - .chain_err(|| "failed to perform http requests") + .with_context(|| "failed to perform http requests") })?; debug!("handles remaining: {}", n); let results = &mut self.results; @@ -935,7 +939,7 @@ self.set .multi .wait(&mut [], timeout) - .chain_err(|| "failed to wait on curl `Multi`")?; + .with_context(|| "failed to wait on curl `Multi`")?; } } diff -Nru cargo-0.52.0/src/cargo/core/profiles.rs cargo-0.54.0/src/cargo/core/profiles.rs --- cargo-0.52.0/src/cargo/core/profiles.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/profiles.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,11 +1,10 @@ -use crate::core::compiler::{CompileMode, Unit}; +use crate::core::compiler::{CompileKind, CompileMode, Unit}; use crate::core::resolver::features::FeaturesFor; use crate::core::{Feature, PackageId, PackageIdSpec, Resolve, Shell, Target, Workspace}; -use crate::util::errors::CargoResultExt; use crate::util::interning::InternedString; use crate::util::toml::{ProfilePackageSpec, StringOrBool, TomlProfile, TomlProfiles, U32OrBool}; use crate::util::{closest_msg, config, CargoResult, Config}; -use anyhow::bail; +use anyhow::{bail, Context as _}; use std::collections::{BTreeMap, HashMap, HashSet}; use std::hash::Hash; use std::{cmp, env, fmt, hash}; @@ -291,6 +290,7 @@ is_local: bool, unit_for: UnitFor, mode: CompileMode, + kind: CompileKind, ) -> Profile { let (profile_name, inherits) = if !self.named_profiles_enabled { // With the feature disabled, we degrade `--profile` back to the @@ -346,6 +346,23 @@ } } + // Default macOS debug information to being stored in the "unpacked" + // split-debuginfo format. At the time of this writing that's the only + // platform which has a stable `-Csplit-debuginfo` option for rustc, + // and it's typically much faster than running `dsymutil` on all builds + // in incremental cases. + if let Some(debug) = profile.debuginfo { + if profile.split_debuginfo.is_none() && debug > 0 { + let target = match &kind { + CompileKind::Host => self.rustc_host.as_str(), + CompileKind::Target(target) => target.short_name(), + }; + if target.contains("-apple-") { + profile.split_debuginfo = Some(InternedString::new("unpacked")); + } + } + } + // Incremental can be globally overridden. if let Some(v) = self.incremental { profile.incremental = v; @@ -552,9 +569,7 @@ } match toml.lto { Some(StringOrBool::Bool(b)) => profile.lto = Lto::Bool(b), - Some(StringOrBool::String(ref n)) if matches!(n.as_str(), "off" | "n" | "no") => { - profile.lto = Lto::Off - } + Some(StringOrBool::String(ref n)) if is_off(n.as_str()) => profile.lto = Lto::Off, Some(StringOrBool::String(ref n)) => profile.lto = Lto::Named(InternedString::new(n)), None => {} } @@ -590,9 +605,12 @@ if let Some(incremental) = toml.incremental { profile.incremental = incremental; } - if let Some(strip) = toml.strip { - profile.strip = strip; - } + profile.strip = match toml.strip { + Some(StringOrBool::Bool(true)) => Strip::Named(InternedString::new("symbols")), + None | Some(StringOrBool::Bool(false)) => Strip::None, + Some(StringOrBool::String(ref n)) if is_off(n.as_str()) => Strip::None, + Some(StringOrBool::String(ref n)) => Strip::Named(InternedString::new(n)), + }; } /// The root profile (dev/release). @@ -809,24 +827,22 @@ )] #[serde(rename_all = "lowercase")] pub enum Strip { - /// Only strip debugging symbols - DebugInfo, /// Don't remove any symbols None, - /// Strip all non-exported symbols from the final binary - Symbols, + /// Named Strip settings + Named(InternedString), } impl fmt::Display for Strip { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { - Strip::DebugInfo => "debuginfo", Strip::None => "none", - Strip::Symbols => "symbols", + Strip::Named(s) => s.as_str(), } .fmt(f) } } + /// Flags used in creating `Unit`s to indicate the purpose for the target, and /// to ensure the target's dependencies have the correct settings. #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] @@ -1125,11 +1141,10 @@ profile .val .validate(name, ws.unstable_features(), &mut warnings) - .chain_err(|| { - anyhow::format_err!( + .with_context(|| { + format!( "config profile `{}` is not valid (defined in `{}`)", - name, - profile.definition + name, profile.definition ) })?; for warning in warnings { @@ -1249,3 +1264,8 @@ } Ok(()) } + +/// Returns `true` if a string is a toggle that turns an option off. +fn is_off(s: &str) -> bool { + matches!(s, "off" | "n" | "no" | "none") +} diff -Nru cargo-0.52.0/src/cargo/core/registry.rs cargo-0.54.0/src/cargo/core/registry.rs --- cargo-0.52.0/src/cargo/core/registry.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/registry.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,10 +3,10 @@ use crate::core::PackageSet; use crate::core::{Dependency, PackageId, Source, SourceId, SourceMap, Summary}; use crate::sources::config::SourceConfigMap; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::{profile, CanonicalUrl, Config}; -use anyhow::bail; +use anyhow::{bail, Context as _}; use log::{debug, trace}; use semver::VersionReq; use url::Url; @@ -107,6 +107,21 @@ Normal, } +/// Argument to `PackageRegistry::patch` which is information about a `[patch]` +/// directive that we found in a lockfile, if present. +pub struct LockedPatchDependency { + /// The original `Dependency` directive, except "locked" so it's version + /// requirement is `=foo` and its `SourceId` has a "precise" listed. + pub dependency: Dependency, + /// The `PackageId` that was previously found in a lock file which + /// `dependency` matches. + pub package_id: PackageId, + /// Something only used for backwards compatibility with the v2 lock file + /// format where `branch=master` is considered the same as `DefaultBranch`. + /// For more comments on this see the code in `ops/resolve.rs`. + pub alt_package_id: Option, +} + impl<'cfg> PackageRegistry<'cfg> { pub fn new(config: &'cfg Config) -> CargoResult> { let source_config = SourceConfigMap::new(config)?; @@ -240,7 +255,7 @@ pub fn patch( &mut self, url: &Url, - deps: &[(&Dependency, Option<(Dependency, PackageId)>)], + deps: &[(&Dependency, Option)], ) -> CargoResult> { // NOTE: None of this code is aware of required features. If a patch // is missing a required feature, you end up with an "unused patch" @@ -268,7 +283,7 @@ let orig_patch = *orig_patch; // Use the locked patch if it exists, otherwise use the original. let dep = match locked { - Some((locked_patch, _locked_id)) => locked_patch, + Some(lock) => &lock.dependency, None => orig_patch, }; debug!( @@ -281,8 +296,8 @@ // normally would and then ask it directly for the list of summaries // corresponding to this `dep`. self.ensure_loaded(dep.source_id(), Kind::Normal) - .chain_err(|| { - anyhow::format_err!( + .with_context(|| { + format!( "failed to load source for dependency `{}`", dep.package_name() ) @@ -293,14 +308,16 @@ .get_mut(dep.source_id()) .expect("loaded source not present"); let summaries = source.query_vec(dep)?; - let (summary, should_unlock) = - summary_for_patch(orig_patch, locked, summaries, source).chain_err(|| { - format!( - "patch for `{}` in `{}` failed to resolve", - orig_patch.package_name(), - url, - ) - })?; + let (summary, should_unlock) = summary_for_patch( + orig_patch, locked, summaries, source, + ) + .with_context(|| { + format!( + "patch for `{}` in `{}` failed to resolve", + orig_patch.package_name(), + url, + ) + })?; debug!( "patch summary is {:?} should_unlock={:?}", summary, should_unlock @@ -320,7 +337,7 @@ Ok(summary) }) .collect::>>() - .chain_err(|| anyhow::format_err!("failed to resolve patches for `{}`", url))?; + .with_context(|| format!("failed to resolve patches for `{}`", url))?; let mut name_and_version = HashSet::new(); for summary in unlocked_summaries.iter() { @@ -336,13 +353,36 @@ } } + // Calculate a list of all patches available for this source which is + // then used later during calls to `lock` to rewrite summaries to point + // directly at these patched entries. + // + // Note that this is somewhat subtle where the list of `ids` for a + // canonical URL is extend with possibly two ids per summary. This is done + // to handle the transition from the v2->v3 lock file format where in + // v2 DefeaultBranch was either DefaultBranch or Branch("master") for + // git dependencies. In this case if `summary.package_id()` is + // Branch("master") then alt_package_id will be DefaultBranch. This + // signifies that there's a patch available for either of those + // dependency directives if we see them in the dependency graph. + // + // This is a bit complicated and hopefully an edge case we can remove + // in the future, but for now it hopefully doesn't cause too much + // harm... + let mut ids = Vec::new(); + for (summary, (_, lock)) in unlocked_summaries.iter().zip(deps) { + ids.push(summary.package_id()); + if let Some(lock) = lock { + ids.extend(lock.alt_package_id); + } + } + self.patches_available.insert(canonical.clone(), ids); + // Note that we do not use `lock` here to lock summaries! That step // happens later once `lock_patches` is invoked. In the meantime though // we want to fill in the `patches_available` map (later used in the // `lock` method) and otherwise store the unlocked summaries in // `patches` to get locked in a future call to `lock_patches`. - let ids = unlocked_summaries.iter().map(|s| s.package_id()).collect(); - self.patches_available.insert(canonical.clone(), ids); self.patches.insert(canonical, unlocked_summaries); Ok(unlock_patches) @@ -388,7 +428,7 @@ let _p = profile::start(format!("updating: {}", source_id)); self.sources.get_mut(source_id).unwrap().update() })() - .chain_err(|| anyhow::format_err!("Unable to update {}", source_id))?; + .with_context(|| format!("Unable to update {}", source_id))?; Ok(()) } @@ -539,8 +579,8 @@ // Ensure the requested source_id is loaded self.ensure_loaded(dep.source_id(), Kind::Normal) - .chain_err(|| { - anyhow::format_err!( + .with_context(|| { + format!( "failed to load source for dependency `{}`", dep.package_name() ) @@ -745,7 +785,7 @@ /// This is a helper for selecting the summary, or generating a helpful error message. fn summary_for_patch( orig_patch: &Dependency, - locked: &Option<(Dependency, PackageId)>, + locked: &Option, mut summaries: Vec, source: &mut dyn Source, ) -> CargoResult<(Summary, Option)> { @@ -777,7 +817,7 @@ } assert!(summaries.is_empty()); // No summaries found, try to help the user figure out what is wrong. - if let Some((_locked_patch, locked_id)) = locked { + if let Some(locked) = locked { // Since the locked patch did not match anything, try the unlocked one. let orig_matches = source.query_vec(orig_patch).unwrap_or_else(|e| { log::warn!( @@ -790,7 +830,7 @@ let (summary, _) = summary_for_patch(orig_patch, &None, orig_matches, source)?; // The unlocked version found a match. This returns a value to // indicate that this entry should be unlocked. - return Ok((summary, Some(*locked_id))); + return Ok((summary, Some(locked.package_id))); } // Try checking if there are *any* packages that match this by name. let name_only_dep = Dependency::new_override(orig_patch.package_name(), orig_patch.source_id()); diff -Nru cargo-0.52.0/src/cargo/core/resolver/context.rs cargo-0.54.0/src/cargo/core/resolver/context.rs --- cargo-0.52.0/src/cargo/core/resolver/context.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/resolver/context.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,6 +1,7 @@ use super::dep_cache::RegistryQueryer; use super::errors::ActivateResult; use super::types::{ConflictMap, ConflictReason, FeaturesSet, ResolveOpts}; +use super::RequestedFeatures; use crate::core::{Dependency, PackageId, SourceId, Summary}; use crate::util::interning::InternedString; use crate::util::Graph; @@ -160,23 +161,32 @@ } } debug!("checking if {} is already activated", summary.package_id()); - if opts.features.all_features { - return Ok(false); - } - - let has_default_feature = summary.features().contains_key("default"); - Ok(match self.resolve_features.get(&id) { - Some(prev) => { - opts.features.features.is_subset(prev) - && (!opts.features.uses_default_features - || prev.contains("default") - || !has_default_feature) - } - None => { - opts.features.features.is_empty() - && (!opts.features.uses_default_features || !has_default_feature) + match &opts.features { + // This returns `false` for CliFeatures just for simplicity. It + // would take a bit of work to compare since they are not in the + // same format as DepFeatures (and that may be expensive + // performance-wise). Also, it should only occur once for a root + // package. The only drawback is that it may re-activate a root + // package again, which should only affect performance, but that + // should be rare. Cycles should still be detected since those + // will have `DepFeatures` edges. + RequestedFeatures::CliFeatures(_) => Ok(false), + RequestedFeatures::DepFeatures { + features, + uses_default_features, + } => { + let has_default_feature = summary.features().contains_key("default"); + Ok(match self.resolve_features.get(&id) { + Some(prev) => { + features.is_subset(prev) + && (!uses_default_features + || prev.contains("default") + || !has_default_feature) + } + None => features.is_empty() && (!uses_default_features || !has_default_feature), + }) } - }) + } } /// If the package is active returns the `ContextAge` when it was added diff -Nru cargo-0.52.0/src/cargo/core/resolver/dep_cache.rs cargo-0.54.0/src/cargo/core/resolver/dep_cache.rs --- cargo-0.52.0/src/cargo/core/resolver/dep_cache.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/resolver/dep_cache.rs 2021-04-27 14:35:53.000000000 +0000 @@ -12,12 +12,14 @@ use crate::core::resolver::context::Context; use crate::core::resolver::errors::describe_path; use crate::core::resolver::types::{ConflictReason, DepInfo, FeaturesSet}; -use crate::core::resolver::{ActivateError, ActivateResult, ResolveOpts}; +use crate::core::resolver::{ + ActivateError, ActivateResult, CliFeatures, RequestedFeatures, ResolveOpts, +}; use crate::core::{Dependency, FeatureValue, PackageId, PackageIdSpec, Registry, Summary}; -use crate::core::{GitReference, SourceId}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; -use crate::util::Config; + +use anyhow::Context as _; use log::debug; use std::cmp::Ordering; use std::collections::{BTreeSet, HashMap, HashSet}; @@ -40,10 +42,6 @@ >, /// all the cases we ended up using a supplied replacement used_replacements: HashMap, - /// Where to print warnings, if configured. - config: Option<&'a Config>, - /// Sources that we've already wared about possibly colliding in the future. - warned_git_collisions: HashSet, } impl<'a> RegistryQueryer<'a> { @@ -52,7 +50,6 @@ replacements: &'a [(PackageIdSpec, Dependency)], try_to_use: &'a HashSet, minimal_versions: bool, - config: Option<&'a Config>, ) -> Self { RegistryQueryer { registry, @@ -62,8 +59,6 @@ registry_cache: HashMap::new(), summary_cache: HashMap::new(), used_replacements: HashMap::new(), - config, - warned_git_collisions: HashSet::new(), } } @@ -75,44 +70,6 @@ self.used_replacements.get(&p) } - /// Issues a future-compatible warning targeted at removing reliance on - /// unifying behavior between these two dependency directives: - /// - /// ```toml - /// [dependencies] - /// a = { git = 'https://example.org/foo' } - /// a = { git = 'https://example.org/foo', branch = 'master } - /// ``` - /// - /// Historical versions of Cargo considered these equivalent but going - /// forward we'd like to fix this. For more details see the comments in - /// src/cargo/sources/git/utils.rs - fn warn_colliding_git_sources(&mut self, id: SourceId) -> CargoResult<()> { - let config = match self.config { - Some(config) => config, - None => return Ok(()), - }; - let prev = match self.warned_git_collisions.replace(id) { - Some(prev) => prev, - None => return Ok(()), - }; - match (id.git_reference(), prev.git_reference()) { - (Some(GitReference::DefaultBranch), Some(GitReference::Branch(b))) - | (Some(GitReference::Branch(b)), Some(GitReference::DefaultBranch)) - if b == "master" => {} - _ => return Ok(()), - } - - config.shell().warn(&format!( - "two git dependencies found for `{}` \ - where one uses `branch = \"master\"` and the other doesn't; \ - this will break in a future version of Cargo, so please \ - ensure the dependency forms are consistent", - id.url(), - ))?; - Ok(()) - } - /// Queries the `registry` to return a list of candidates for `dep`. /// /// This method is the location where overrides are taken into account. If @@ -120,7 +77,6 @@ /// applied by performing a second query for what the override should /// return. pub fn query(&mut self, dep: &Dependency) -> CargoResult>> { - self.warn_colliding_git_sources(dep.source_id())?; if let Some(out) = self.registry_cache.get(dep).cloned() { return Ok(out); } @@ -268,8 +224,8 @@ let mut deps = deps .into_iter() .map(|(dep, features)| { - let candidates = self.query(&dep).chain_err(|| { - anyhow::format_err!( + let candidates = self.query(&dep).with_context(|| { + format!( "failed to get `{}` as a dependency of {}", dep.package_name(), describe_path(&cx.parents.path_to_bottom(&candidate.package_id())), @@ -329,15 +285,6 @@ .unwrap_or(&default_dep) .clone(); base.extend(dep.features().iter()); - for feature in base.iter() { - if feature.contains('/') { - return Err(anyhow::format_err!( - "feature names may not contain slashes: `{}`", - feature - ) - .into()); - } - } ret.push((dep.clone(), Rc::new(base))); } @@ -365,30 +312,46 @@ ) -> ActivateResult> { let mut reqs = Requirements::new(s); - if opts.features.all_features { - for key in s.features().keys() { - if let Err(e) = reqs.require_feature(*key) { + let handle_default = |uses_default_features, reqs: &mut Requirements<'_>| { + if uses_default_features && s.features().contains_key("default") { + if let Err(e) = reqs.require_feature(InternedString::new("default")) { return Err(e.into_activate_error(parent, s)); } } - } else { - for &f in opts.features.features.iter() { - let fv = FeatureValue::new(f); - if fv.has_dep_prefix() { - return Err(ActivateError::Fatal(anyhow::format_err!( - "feature value `{}` is not allowed to use explicit `dep:` syntax", - fv - ))); - } - if let Err(e) = reqs.require_value(&fv) { - return Err(e.into_activate_error(parent, s)); + Ok(()) + }; + + match &opts.features { + RequestedFeatures::CliFeatures(CliFeatures { + features, + all_features, + uses_default_features, + }) => { + if *all_features { + for key in s.features().keys() { + if let Err(e) = reqs.require_feature(*key) { + return Err(e.into_activate_error(parent, s)); + } + } + } else { + for fv in features.iter() { + if let Err(e) = reqs.require_value(fv) { + return Err(e.into_activate_error(parent, s)); + } + } + handle_default(*uses_default_features, &mut reqs)?; } } - } - - if opts.features.uses_default_features && s.features().contains_key("default") { - if let Err(e) = reqs.require_feature(InternedString::new("default")) { - return Err(e.into_activate_error(parent, s)); + RequestedFeatures::DepFeatures { + features, + uses_default_features, + } => { + for feature in features.iter() { + if let Err(e) = reqs.require_feature(*feature) { + return Err(e.into_activate_error(parent, s)); + } + } + handle_default(*uses_default_features, &mut reqs)?; } } diff -Nru cargo-0.52.0/src/cargo/core/resolver/encode.rs cargo-0.54.0/src/cargo/core/resolver/encode.rs --- cargo-0.52.0/src/cargo/core/resolver/encode.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/resolver/encode.rs 2021-04-27 14:35:53.000000000 +0000 @@ -113,10 +113,10 @@ use super::{Resolve, ResolveVersion}; use crate::core::{Dependency, GitReference, Package, PackageId, SourceId, Workspace}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::{internal, Graph}; -use anyhow::bail; +use anyhow::{bail, Context as _}; use log::debug; use serde::de; use serde::ser; @@ -154,7 +154,7 @@ /// primary uses is to be used with `resolve_with_previous` to guide the /// resolver to create a complete Resolve. pub fn into_resolve(self, original: &str, ws: &Workspace<'_>) -> CargoResult { - let path_deps = build_path_deps(ws); + let path_deps = build_path_deps(ws)?; let mut checksums = HashMap::new(); let mut version = match self.version { @@ -333,7 +333,7 @@ let k = &k[prefix.len()..]; let enc_id: EncodablePackageId = k .parse() - .chain_err(|| internal("invalid encoding of checksum in lockfile"))?; + .with_context(|| internal("invalid encoding of checksum in lockfile"))?; let id = match lookup_id(&enc_id) { Some(id) => id, _ => continue, @@ -402,7 +402,7 @@ } } -fn build_path_deps(ws: &Workspace<'_>) -> HashMap { +fn build_path_deps(ws: &Workspace<'_>) -> CargoResult> { // If a crate is **not** a path source, then we're probably in a situation // such as `cargo install` with a lock file from a remote dependency. In // that case we don't need to fixup any path dependencies (as they're not @@ -424,7 +424,7 @@ for member in members.iter() { build_pkg(member, ws, &mut ret, &mut visited); } - for deps in ws.root_patch().values() { + for deps in ws.root_patch()?.values() { for dep in deps { build_dep(dep, ws, &mut ret, &mut visited); } @@ -433,7 +433,7 @@ build_dep(dep, ws, &mut ret, &mut visited); } - return ret; + return Ok(ret); fn build_pkg( pkg: &Package, diff -Nru cargo-0.52.0/src/cargo/core/resolver/features.rs cargo-0.54.0/src/cargo/core/resolver/features.rs --- cargo-0.52.0/src/cargo/core/resolver/features.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/resolver/features.rs 2021-04-27 14:35:53.000000000 +0000 @@ -42,7 +42,8 @@ use crate::core::{FeatureValue, PackageId, PackageIdSpec, PackageSet, Workspace}; use crate::util::interning::InternedString; use crate::util::CargoResult; -use std::collections::{BTreeSet, HashMap, HashSet}; +use anyhow::bail; +use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::rc::Rc; /// Map of activated features. @@ -71,7 +72,7 @@ /// Options for how the feature resolver works. #[derive(Default)] -struct FeatureOpts { +pub struct FeatureOpts { /// Use the new resolver instead of the old one. new_resolver: bool, /// Build deps and proc-macros will not share share features with other dep kinds. @@ -123,7 +124,7 @@ } impl FeatureOpts { - fn new( + pub fn new( ws: &Workspace<'_>, has_dev_units: HasDevUnits, force_all_targets: ForceAllTargets, @@ -144,7 +145,7 @@ } "compare" => opts.compare = true, "ws" => unimplemented!(), - s => anyhow::bail!("-Zfeatures flag `{}` is not supported", s), + s => bail!("-Zfeatures flag `{}` is not supported", s), } } Ok(()) @@ -180,47 +181,110 @@ } Ok(opts) } + + /// Creates a new FeatureOpts for the given behavior. + pub fn new_behavior(behavior: ResolveBehavior, has_dev_units: HasDevUnits) -> FeatureOpts { + match behavior { + ResolveBehavior::V1 => FeatureOpts::default(), + ResolveBehavior::V2 => FeatureOpts { + new_resolver: true, + decouple_host_deps: true, + decouple_dev_deps: has_dev_units == HasDevUnits::No, + ignore_inactive_targets: true, + compare: false, + }, + } + } } /// Features flags requested for a package. +/// +/// This should be cheap and fast to clone, it is used in the resolver for +/// various caches. +/// +/// This is split into enum variants because the resolver needs to handle +/// features coming from different places (command-line and dependency +/// declarations), but those different places have different constraints on +/// which syntax is allowed. This helps ensure that every place dealing with +/// features is properly handling those syntax restrictions. +#[derive(Debug, Clone, Eq, PartialEq, Hash)] +pub enum RequestedFeatures { + /// Features requested on the command-line with flags. + CliFeatures(CliFeatures), + /// Features specified in a dependency declaration. + DepFeatures { + /// The `features` dependency field. + features: FeaturesSet, + /// The `default-features` dependency field. + uses_default_features: bool, + }, +} + +/// Features specified on the command-line. #[derive(Debug, Clone, Eq, PartialEq, Hash)] -pub struct RequestedFeatures { - pub features: FeaturesSet, +pub struct CliFeatures { + /// Features from the `--features` flag. + pub features: Rc>, + /// The `--all-features` flag. pub all_features: bool, + /// Inverse of `--no-default-features` flag. pub uses_default_features: bool, } -impl RequestedFeatures { - /// Creates a new RequestedFeatures from the given command-line flags. +impl CliFeatures { + /// Creates a new CliFeatures from the given command-line flags. pub fn from_command_line( features: &[String], all_features: bool, uses_default_features: bool, - ) -> RequestedFeatures { - RequestedFeatures { - features: Rc::new(RequestedFeatures::split_features(features)), + ) -> CargoResult { + let features = Rc::new(CliFeatures::split_features(features)); + // Some early validation to ensure correct syntax. + for feature in features.iter() { + match feature { + // Maybe call validate_feature_name here once it is an error? + FeatureValue::Feature(_) => {} + FeatureValue::Dep { .. } + | FeatureValue::DepFeature { + dep_prefix: true, .. + } => { + bail!( + "feature `{}` is not allowed to use explicit `dep:` syntax", + feature + ); + } + FeatureValue::DepFeature { dep_feature, .. } => { + if dep_feature.contains('/') { + bail!("multiple slashes in feature `{}` is not allowed", feature); + } + } + } + } + Ok(CliFeatures { + features, all_features, uses_default_features, - } + }) } - /// Creates a new RequestedFeatures with the given `all_features` setting. - pub fn new_all(all_features: bool) -> RequestedFeatures { - RequestedFeatures { + /// Creates a new CliFeatures with the given `all_features` setting. + pub fn new_all(all_features: bool) -> CliFeatures { + CliFeatures { features: Rc::new(BTreeSet::new()), all_features, uses_default_features: true, } } - fn split_features(features: &[String]) -> BTreeSet { + fn split_features(features: &[String]) -> BTreeSet { features .iter() .flat_map(|s| s.split_whitespace()) .flat_map(|s| s.split(',')) .filter(|s| !s.is_empty()) .map(InternedString::new) - .collect::>() + .map(FeatureValue::new) + .collect() } } @@ -282,15 +346,75 @@ if let Some(fs) = self.activated_features.get(&(pkg_id, is_build)) { Ok(fs.iter().cloned().collect()) } else { - anyhow::bail!("features did not find {:?} {:?}", pkg_id, is_build) + bail!("features did not find {:?} {:?}", pkg_id, is_build) } } } + + /// Compares the result against the original resolver behavior. + /// + /// Used by `cargo fix --edition` to display any differences. + pub fn compare_legacy(&self, legacy: &ResolvedFeatures) -> FeatureDifferences { + let legacy_features = legacy.legacy_features.as_ref().unwrap(); + let features = self + .activated_features + .iter() + .filter_map(|((pkg_id, for_host), new_features)| { + let old_features = match legacy_features.get(pkg_id) { + Some(feats) => feats.iter().cloned().collect(), + None => BTreeSet::new(), + }; + // The new resolver should never add features. + assert_eq!(new_features.difference(&old_features).next(), None); + let removed_features: BTreeSet<_> = + old_features.difference(new_features).cloned().collect(); + if removed_features.is_empty() { + None + } else { + Some(((*pkg_id, *for_host), removed_features)) + } + }) + .collect(); + let legacy_deps = legacy.legacy_dependencies.as_ref().unwrap(); + let optional_deps = self + .activated_dependencies + .iter() + .filter_map(|((pkg_id, for_host), new_deps)| { + let old_deps = match legacy_deps.get(pkg_id) { + Some(deps) => deps.iter().cloned().collect(), + None => BTreeSet::new(), + }; + // The new resolver should never add dependencies. + assert_eq!(new_deps.difference(&old_deps).next(), None); + let removed_deps: BTreeSet<_> = old_deps.difference(new_deps).cloned().collect(); + if removed_deps.is_empty() { + None + } else { + Some(((*pkg_id, *for_host), removed_deps)) + } + }) + .collect(); + FeatureDifferences { + features, + optional_deps, + } + } +} + +/// Map of differences. +/// +/// Key is `(pkg_id, for_host)`. Value is a set of features or dependencies removed. +pub type DiffMap = BTreeMap<(PackageId, bool), BTreeSet>; + +/// Differences between resolvers. +pub struct FeatureDifferences { + pub features: DiffMap, + pub optional_deps: DiffMap, } pub struct FeatureResolver<'a, 'cfg> { ws: &'a Workspace<'cfg>, - target_data: &'a RustcTargetData, + target_data: &'a RustcTargetData<'cfg>, /// The platforms to build for, requested by the user. requested_targets: &'a [CompileKind], resolve: &'a Resolve, @@ -328,19 +452,17 @@ /// with the result. pub fn resolve( ws: &Workspace<'cfg>, - target_data: &RustcTargetData, + target_data: &RustcTargetData<'cfg>, resolve: &Resolve, package_set: &'a PackageSet<'cfg>, - requested_features: &RequestedFeatures, + cli_features: &CliFeatures, specs: &[PackageIdSpec], requested_targets: &[CompileKind], - has_dev_units: HasDevUnits, - force_all_targets: ForceAllTargets, + opts: FeatureOpts, ) -> CargoResult { use crate::util::profile; let _p = profile::start("resolve features"); - let opts = FeatureOpts::new(ws, has_dev_units, force_all_targets)?; if !opts.new_resolver { // Legacy mode. return Ok(ResolvedFeatures { @@ -365,7 +487,7 @@ track_for_host, deferred_weak_dependencies: HashMap::new(), }; - r.do_resolve(specs, requested_features)?; + r.do_resolve(specs, cli_features)?; log::debug!("features={:#?}", r.activated_features); if r.opts.compare { r.compare(); @@ -383,11 +505,11 @@ fn do_resolve( &mut self, specs: &[PackageIdSpec], - requested_features: &RequestedFeatures, + cli_features: &CliFeatures, ) -> CargoResult<()> { - let member_features = self.ws.members_with_features(specs, requested_features)?; - for (member, requested_features) in &member_features { - let fvs = self.fvs_from_requested(member.package_id(), requested_features); + let member_features = self.ws.members_with_features(specs, cli_features)?; + for (member, cli_features) in &member_features { + let fvs = self.fvs_from_requested(member.package_id(), cli_features); let for_host = self.track_for_host && self.is_proc_macro(member.package_id()); self.activate_pkg(member.package_id(), for_host, &fvs)?; if for_host { @@ -653,24 +775,19 @@ fn fvs_from_requested( &self, pkg_id: PackageId, - requested_features: &RequestedFeatures, + cli_features: &CliFeatures, ) -> Vec { let summary = self.resolve.summary(pkg_id); let feature_map = summary.features(); - if requested_features.all_features { + if cli_features.all_features { feature_map .keys() .map(|k| FeatureValue::Feature(*k)) .collect() } else { - let mut result: Vec = requested_features - .features - .as_ref() - .iter() - .map(|f| FeatureValue::new(*f)) - .collect(); + let mut result: Vec = cli_features.features.iter().cloned().collect(); let default = InternedString::new("default"); - if requested_features.uses_default_features && feature_map.contains_key(&default) { + if cli_features.uses_default_features && feature_map.contains_key(&default) { result.push(FeatureValue::Feature(default)); } result diff -Nru cargo-0.52.0/src/cargo/core/resolver/mod.rs cargo-0.54.0/src/cargo/core/resolver/mod.rs --- cargo-0.52.0/src/cargo/core/resolver/mod.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/resolver/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -69,7 +69,7 @@ pub use self::encode::Metadata; pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; pub use self::errors::{ActivateError, ActivateResult, ResolveError}; -pub use self::features::{ForceAllTargets, HasDevUnits}; +pub use self::features::{CliFeatures, ForceAllTargets, HasDevUnits}; pub use self::resolve::{Resolve, ResolveVersion}; pub use self::types::{ResolveBehavior, ResolveOpts}; @@ -133,8 +133,7 @@ Some(config) => config.cli_unstable().minimal_versions, None => false, }; - let mut registry = - RegistryQueryer::new(registry, replacements, try_to_use, minimal_versions, config); + let mut registry = RegistryQueryer::new(registry, replacements, try_to_use, minimal_versions); let cx = activate_deps_loop(cx, &mut registry, summaries, config)?; let mut cksums = HashMap::new(); @@ -193,7 +192,7 @@ // Activate all the initial summaries to kick off some work. for &(ref summary, ref opts) in summaries { debug!("initial activation: {}", summary.package_id()); - let res = activate(&mut cx, registry, None, summary.clone(), opts.clone()); + let res = activate(&mut cx, registry, None, summary.clone(), opts); match res { Ok(Some((frame, _))) => remaining_deps.push(frame), Ok(None) => (), @@ -379,9 +378,8 @@ let pid = candidate.package_id(); let opts = ResolveOpts { dev_deps: false, - features: RequestedFeatures { + features: RequestedFeatures::DepFeatures { features: Rc::clone(&features), - all_features: false, uses_default_features: dep.uses_default_features(), }, }; @@ -392,7 +390,7 @@ dep.package_name(), candidate.version() ); - let res = activate(&mut cx, registry, Some((&parent, &dep)), candidate, opts); + let res = activate(&mut cx, registry, Some((&parent, &dep)), candidate, &opts); let successfully_activated = match res { // Success! We've now activated our `candidate` in our context @@ -604,7 +602,7 @@ registry: &mut RegistryQueryer<'_>, parent: Option<(&Summary, &Dependency)>, candidate: Summary, - opts: ResolveOpts, + opts: &ResolveOpts, ) -> ActivateResult> { let candidate_pid = candidate.package_id(); cx.age += 1; @@ -626,7 +624,7 @@ } } - let activated = cx.flag_activated(&candidate, &opts, parent)?; + let activated = cx.flag_activated(&candidate, opts, parent)?; let candidate = match registry.replacement_summary(candidate_pid) { Some(replace) => { @@ -635,7 +633,7 @@ // does. TBH it basically cause panics in the test suite if // `parent` is passed through here and `[replace]` is otherwise // on life support so it's not critical to fix bugs anyway per se. - if cx.flag_activated(replace, &opts, None)? && activated { + if cx.flag_activated(replace, opts, None)? && activated { return Ok(None); } trace!( @@ -656,7 +654,7 @@ let now = Instant::now(); let (used_features, deps) = - &*registry.build_deps(cx, parent.map(|p| p.0.package_id()), &candidate, &opts)?; + &*registry.build_deps(cx, parent.map(|p| p.0.package_id()), &candidate, opts)?; // Record what list of features is active for this package. if !used_features.is_empty() { diff -Nru cargo-0.52.0/src/cargo/core/resolver/resolve.rs cargo-0.54.0/src/cargo/core/resolver/resolve.rs --- cargo-0.52.0/src/cargo/core/resolver/resolve.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/resolver/resolve.rs 2021-04-27 14:35:53.000000000 +0000 @@ -236,7 +236,7 @@ self.graph.sort() } - pub fn iter<'a>(&'a self) -> impl Iterator + 'a { + pub fn iter(&self) -> impl Iterator + '_ { self.graph.iter().cloned() } @@ -409,6 +409,6 @@ /// file anyway so it takes the opportunity to bump the lock file version /// forward. fn default() -> ResolveVersion { - ResolveVersion::V2 + ResolveVersion::V3 } } diff -Nru cargo-0.52.0/src/cargo/core/resolver/types.rs cargo-0.54.0/src/cargo/core/resolver/types.rs --- cargo-0.52.0/src/cargo/core/resolver/types.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/resolver/types.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,4 +1,4 @@ -use super::features::RequestedFeatures; +use super::features::{CliFeatures, RequestedFeatures}; use crate::core::{Dependency, PackageId, Summary}; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; @@ -133,6 +133,7 @@ /// Whether or not dev-dependencies should be included. /// /// This may be set to `false` by things like `cargo install` or `-Z avoid-dev-deps`. + /// It also gets set to `false` when activating dependencies in the resolver. pub dev_deps: bool, /// Set of features requested on the command-line. pub features: RequestedFeatures, @@ -143,7 +144,7 @@ pub fn everything() -> ResolveOpts { ResolveOpts { dev_deps: true, - features: RequestedFeatures::new_all(true), + features: RequestedFeatures::CliFeatures(CliFeatures::new_all(true)), } } @@ -173,7 +174,7 @@ .unwrap_or(0) } - pub fn flatten<'a>(&'a self) -> impl Iterator + 'a { + pub fn flatten(&self) -> impl Iterator + '_ { self.remaining_siblings .clone() .map(move |(d, _, _)| (self.parent.package_id(), d)) @@ -247,7 +248,7 @@ } None } - pub fn iter<'a>(&'a mut self) -> impl Iterator + 'a { + pub fn iter(&mut self) -> impl Iterator + '_ { self.data.iter().flat_map(|(other, _)| other.flatten()) } } diff -Nru cargo-0.52.0/src/cargo/core/shell.rs cargo-0.54.0/src/cargo/core/shell.rs --- cargo-0.52.0/src/cargo/core/shell.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/shell.rs 2021-04-27 14:35:53.000000000 +0000 @@ -185,10 +185,7 @@ /// Erase from cursor to end of line. pub fn err_erase_line(&mut self) { - if let ShellOut::Stream { - stderr_tty: true, .. - } = self.output - { + if self.err_supports_color() { imp::err_erase_line(self); self.needs_clear = false; } @@ -342,9 +339,12 @@ Ok(()) } - pub fn print_json(&mut self, obj: &T) { - let encoded = serde_json::to_string(&obj).unwrap(); + pub fn print_json(&mut self, obj: &T) -> CargoResult<()> { + // Path may fail to serialize to JSON ... + let encoded = serde_json::to_string(&obj)?; + // ... but don't fail due to a closed pipe. drop(writeln!(self.out(), "{}", encoded)); + Ok(()) } } diff -Nru cargo-0.52.0/src/cargo/core/source/mod.rs cargo-0.54.0/src/cargo/core/source/mod.rs --- cargo-0.52.0/src/cargo/core/source/mod.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/source/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -271,20 +271,12 @@ /// Like `HashMap::get`. pub fn get(&self, id: SourceId) -> Option<&(dyn Source + 'src)> { - let source = self.map.get(&id); - - source.map(|s| { - let s: &(dyn Source + 'src) = &**s; - s - }) + self.map.get(&id).map(|s| s.as_ref()) } /// Like `HashMap::get_mut`. pub fn get_mut(&mut self, id: SourceId) -> Option<&mut (dyn Source + 'src)> { - self.map.get_mut(&id).map(|s| { - let s: &mut (dyn Source + 'src) = &mut **s; - s - }) + self.map.get_mut(&id).map(|s| s.as_mut()) } /// Like `HashMap::get`, but first calculates the `SourceId` from a `PackageId`. diff -Nru cargo-0.52.0/src/cargo/core/source/source_id.rs cargo-0.54.0/src/cargo/core/source/source_id.rs --- cargo-0.52.0/src/cargo/core/source/source_id.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/source/source_id.rs 2021-04-27 14:35:53.000000000 +0000 @@ -42,11 +42,11 @@ /// The possible kinds of code source. Along with `SourceIdInner`, this fully defines the /// source. -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] enum SourceKind { /// A git repository. Git(GitReference), - /// A local path.. + /// A local path. Path, /// A remote registry. Registry, @@ -394,45 +394,9 @@ // Sort first based on `kind`, deferring to the URL comparison below if // the kinds are equal. - match (&self.inner.kind, &other.inner.kind) { - (SourceKind::Path, SourceKind::Path) => {} - (SourceKind::Path, _) => return Ordering::Less, - (_, SourceKind::Path) => return Ordering::Greater, - - (SourceKind::Registry, SourceKind::Registry) => {} - (SourceKind::Registry, _) => return Ordering::Less, - (_, SourceKind::Registry) => return Ordering::Greater, - - (SourceKind::LocalRegistry, SourceKind::LocalRegistry) => {} - (SourceKind::LocalRegistry, _) => return Ordering::Less, - (_, SourceKind::LocalRegistry) => return Ordering::Greater, - - (SourceKind::Directory, SourceKind::Directory) => {} - (SourceKind::Directory, _) => return Ordering::Less, - (_, SourceKind::Directory) => return Ordering::Greater, - - (SourceKind::Git(a), SourceKind::Git(b)) => { - use GitReference::*; - let ord = match (a, b) { - (Tag(a), Tag(b)) => a.cmp(b), - (Tag(_), _) => Ordering::Less, - (_, Tag(_)) => Ordering::Greater, - - (Rev(a), Rev(b)) => a.cmp(b), - (Rev(_), _) => Ordering::Less, - (_, Rev(_)) => Ordering::Greater, - - // See module comments in src/cargo/sources/git/utils.rs - // for why `DefaultBranch` is treated specially here. - (Branch(a), DefaultBranch) => a.as_str().cmp("master"), - (DefaultBranch, Branch(b)) => "master".cmp(b), - (Branch(a), Branch(b)) => a.cmp(b), - (DefaultBranch, DefaultBranch) => Ordering::Equal, - }; - if ord != Ordering::Equal { - return ord; - } - } + match self.inner.kind.cmp(&other.inner.kind) { + Ordering::Equal => {} + other => return other, } // If the `kind` and the `url` are equal, then for git sources we also @@ -509,43 +473,9 @@ // The hash of SourceId is used in the name of some Cargo folders, so shouldn't // vary. `as_str` gives the serialisation of a url (which has a spec) and so // insulates against possible changes in how the url crate does hashing. -// -// Note that the semi-funky hashing here is done to handle `DefaultBranch` -// hashing the same as `"master"`, and also to hash the same as previous -// versions of Cargo while it's somewhat convenient to do so (that way all -// versions of Cargo use the same checkout). impl Hash for SourceId { fn hash(&self, into: &mut S) { - match &self.inner.kind { - SourceKind::Git(GitReference::Tag(a)) => { - 0usize.hash(into); - 0usize.hash(into); - a.hash(into); - } - SourceKind::Git(GitReference::Branch(a)) => { - 0usize.hash(into); - 1usize.hash(into); - a.hash(into); - } - // For now hash `DefaultBranch` the same way as `Branch("master")`, - // and for more details see module comments in - // src/cargo/sources/git/utils.rs for why `DefaultBranch` - SourceKind::Git(GitReference::DefaultBranch) => { - 0usize.hash(into); - 1usize.hash(into); - "master".hash(into); - } - SourceKind::Git(GitReference::Rev(a)) => { - 0usize.hash(into); - 2usize.hash(into); - a.hash(into); - } - - SourceKind::Path => 1usize.hash(into), - SourceKind::Registry => 2usize.hash(into), - SourceKind::LocalRegistry => 3usize.hash(into), - SourceKind::Directory => 4usize.hash(into), - } + self.inner.kind.hash(into); match self.inner.kind { SourceKind::Git(_) => self.inner.canonical_url.hash(into), _ => self.inner.url.as_str().hash(into), @@ -553,6 +483,110 @@ } } +// forward to `Ord` +impl PartialOrd for SourceKind { + fn partial_cmp(&self, other: &SourceKind) -> Option { + Some(self.cmp(other)) + } +} + +// Note that this is specifically not derived on `SourceKind` although the +// implementation here is very similar to what it might look like if it were +// otherwise derived. +// +// The reason for this is somewhat obtuse. First of all the hash value of +// `SourceKind` makes its way into `~/.cargo/registry/index/github.com-XXXX` +// which means that changes to the hash means that all Rust users need to +// redownload the crates.io index and all their crates. If possible we strive to +// not change this to make this redownloading behavior happen as little as +// possible. How is this connected to `Ord` you might ask? That's a good +// question! +// +// Since the beginning of time `SourceKind` has had `#[derive(Hash)]`. It for +// the longest time *also* derived the `Ord` and `PartialOrd` traits. In #8522, +// however, the implementation of `Ord` changed. This handwritten implementation +// forgot to sync itself with the originally derived implementation, namely +// placing git dependencies as sorted after all other dependencies instead of +// first as before. +// +// This regression in #8522 (Rust 1.47) went unnoticed. When we switched back +// to a derived implementation in #9133 (Rust 1.52 beta) we only then ironically +// saw an issue (#9334). In #9334 it was observed that stable Rust at the time +// (1.51) was sorting git dependencies last, whereas Rust 1.52 beta would sort +// git dependencies first. This is because the `PartialOrd` implementation in +// 1.51 used #8522, the buggy implementation, which put git deps last. In 1.52 +// it was (unknowingly) restored to the pre-1.47 behavior with git dependencies +// first. +// +// Because the breakage was only witnessed after the original breakage, this +// trait implementation is preserving the "broken" behavior. Put a different way: +// +// * Rust pre-1.47 sorted git deps first. +// * Rust 1.47 to Rust 1.51 sorted git deps last, a breaking change (#8522) that +// was never noticed. +// * Rust 1.52 restored the pre-1.47 behavior (#9133, without knowing it did +// so), and breakage was witnessed by actual users due to difference with +// 1.51. +// * Rust 1.52 (the source as it lives now) was fixed to match the 1.47-1.51 +// behavior (#9383), which is now considered intentionally breaking from the +// pre-1.47 behavior. +// +// Note that this was all discovered when Rust 1.53 was in nightly and 1.52 was +// in beta. #9133 was in both beta and nightly at the time of discovery. For +// 1.52 #9383 reverted #9133, meaning 1.52 is the same as 1.51. On nightly +// (1.53) #9397 was created to fix the regression introduced by #9133 relative +// to the current stable (1.51). +// +// That's all a long winded way of saying "it's wierd that git deps hash first +// and are sorted last, but it's the way it is right now". The author of this +// comment chose to handwrite the `Ord` implementation instead of the `Hash` +// implementation, but it's only required that at most one of them is +// hand-written because the other can be derived. Perhaps one day in +// the future someone can figure out how to remove this behavior. +impl Ord for SourceKind { + fn cmp(&self, other: &SourceKind) -> Ordering { + match (self, other) { + (SourceKind::Path, SourceKind::Path) => Ordering::Equal, + (SourceKind::Path, _) => Ordering::Less, + (_, SourceKind::Path) => Ordering::Greater, + + (SourceKind::Registry, SourceKind::Registry) => Ordering::Equal, + (SourceKind::Registry, _) => Ordering::Less, + (_, SourceKind::Registry) => Ordering::Greater, + + (SourceKind::LocalRegistry, SourceKind::LocalRegistry) => Ordering::Equal, + (SourceKind::LocalRegistry, _) => Ordering::Less, + (_, SourceKind::LocalRegistry) => Ordering::Greater, + + (SourceKind::Directory, SourceKind::Directory) => Ordering::Equal, + (SourceKind::Directory, _) => Ordering::Less, + (_, SourceKind::Directory) => Ordering::Greater, + + (SourceKind::Git(a), SourceKind::Git(b)) => a.cmp(b), + } + } +} + +// This is a test that the hash of the `SourceId` for crates.io is a well-known +// value. +// +// Note that the hash value matches what the crates.io source id has hashed +// since long before Rust 1.30. We strive to keep this value the same across +// versions of Cargo because changing it means that users will need to +// redownload the index and all crates they use when using a new Cargo version. +// +// This isn't to say that this hash can *never* change, only that when changing +// this it should be explicitly done. If this hash changes accidentally and +// you're able to restore the hash to its original value, please do so! +// Otherwise please just leave a comment in your PR as to why the hash value is +// changing and why the old value can't be easily preserved. +#[test] +fn test_cratesio_hash() { + let config = Config::default().unwrap(); + let crates_io = SourceId::crates_io(&config).unwrap(); + assert_eq!(crate::util::hex::short_hash(&crates_io), "1ecc6299db9ec823"); +} + /// A `Display`able view into a `SourceId` that will write it as a url pub struct SourceIdAsUrl<'a> { inner: &'a SourceIdInner, diff -Nru cargo-0.52.0/src/cargo/core/summary.rs cargo-0.54.0/src/cargo/core/summary.rs --- cargo-0.52.0/src/cargo/core/summary.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/summary.rs 2021-04-27 14:35:53.000000000 +0000 @@ -37,6 +37,9 @@ features: &BTreeMap>, links: Option>, ) -> CargoResult { + // ****CAUTION**** If you change anything here than may raise a new + // error, be sure to coordinate that change with either the index + // schema field or the SummariesCache version. let mut has_overlapping_features = None; for dep in dependencies.iter() { let dep_name = dep.name_in_toml(); @@ -247,6 +250,12 @@ feature ); } + if feature.contains('/') { + bail!( + "feature named `{}` is not allowed to contain slashes", + feature + ); + } validate_feature_name(config, pkg_id, feature)?; for fv in fvs { // Find data for the referenced dependency... @@ -313,7 +322,20 @@ ); } } - DepFeature { dep_name, weak, .. } => { + DepFeature { + dep_name, + dep_feature, + weak, + .. + } => { + // Early check for some unlikely syntax. + if dep_feature.contains('/') { + bail!( + "multiple slashes in feature `{}` (included by feature `{}`) are not allowed", + fv, + feature + ); + } // Validation of the feature name will be performed in the resolver. if !is_any_dep { bail!( @@ -359,7 +381,7 @@ } /// FeatureValue represents the types of dependencies a feature can have. -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] pub enum FeatureValue { /// A feature enabling another feature. Feature(InternedString), diff -Nru cargo-0.52.0/src/cargo/core/workspace.rs cargo-0.54.0/src/cargo/core/workspace.rs --- cargo-0.52.0/src/cargo/core/workspace.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/core/workspace.rs 2021-04-27 14:35:53.000000000 +0000 @@ -5,23 +5,24 @@ use std::rc::Rc; use std::slice; +use anyhow::{bail, Context as _}; use glob::glob; use log::debug; use url::Url; use crate::core::features::Features; use crate::core::registry::PackageRegistry; -use crate::core::resolver::features::RequestedFeatures; +use crate::core::resolver::features::CliFeatures; use crate::core::resolver::ResolveBehavior; -use crate::core::{Dependency, PackageId, PackageIdSpec}; +use crate::core::{Dependency, Edition, FeatureValue, PackageId, PackageIdSpec}; use crate::core::{EitherManifest, Package, SourceId, VirtualManifest}; use crate::ops; -use crate::sources::PathSource; -use crate::util::errors::{CargoResult, CargoResultExt, ManifestError}; +use crate::sources::{PathSource, CRATES_IO_INDEX, CRATES_IO_REGISTRY}; +use crate::util::errors::{CargoResult, ManifestError}; use crate::util::interning::InternedString; -use crate::util::paths; -use crate::util::toml::{read_manifest, TomlProfiles}; -use crate::util::{Config, Filesystem}; +use crate::util::toml::{read_manifest, TomlDependency, TomlProfiles}; +use crate::util::{config::ConfigRelativePath, Config, Filesystem, IntoUrl}; +use cargo_util::paths; /// The core abstraction in Cargo for working with a workspace of crates. /// @@ -88,7 +89,7 @@ ignore_lock: bool, /// The resolver behavior specified with the `resolver` field. - resolve_behavior: Option, + resolve_behavior: ResolveBehavior, /// Workspace-level custom metadata custom_metadata: Option, @@ -103,7 +104,7 @@ } #[derive(Debug)] -enum MaybePackage { +pub enum MaybePackage { Package(Package), Virtual(VirtualManifest), } @@ -152,7 +153,7 @@ ws.target_dir = config.target_dir()?; if manifest_path.is_relative() { - anyhow::bail!( + bail!( "manifest_path:{:?} is not an absolute path. Please provide an absolute path.", manifest_path ) @@ -164,10 +165,7 @@ .load_workspace_config()? .and_then(|cfg| cfg.custom_metadata); ws.find_members()?; - ws.resolve_behavior = match ws.root_maybe() { - MaybePackage::Package(p) => p.manifest().resolve_behavior(), - MaybePackage::Virtual(vm) => vm.resolve_behavior(), - }; + ws.set_resolve_behavior(); ws.validate()?; Ok(ws) } @@ -189,7 +187,7 @@ require_optional_deps: true, loaded_packages: RefCell::new(HashMap::new()), ignore_lock: false, - resolve_behavior: None, + resolve_behavior: ResolveBehavior::V1, custom_metadata: None, } } @@ -203,11 +201,11 @@ let mut ws = Workspace::new_default(current_manifest, config); ws.root_manifest = Some(root_path.join("Cargo.toml")); ws.target_dir = config.target_dir()?; - ws.resolve_behavior = manifest.resolve_behavior(); ws.packages .packages .insert(root_path, MaybePackage::Virtual(manifest)); ws.find_members()?; + ws.set_resolve_behavior(); // TODO: validation does not work because it walks up the directory // tree looking for the root which is a fake file that doesn't exist. Ok(ws) @@ -231,7 +229,6 @@ let mut ws = Workspace::new_default(package.manifest_path().to_path_buf(), config); ws.is_ephemeral = true; ws.require_optional_deps = require_optional_deps; - ws.resolve_behavior = package.manifest().resolve_behavior(); let key = ws.current_manifest.parent().unwrap(); let id = package.package_id(); let package = MaybePackage::Package(package); @@ -244,9 +241,28 @@ ws.members.push(ws.current_manifest.clone()); ws.member_ids.insert(id); ws.default_members.push(ws.current_manifest.clone()); + ws.set_resolve_behavior(); Ok(ws) } + fn set_resolve_behavior(&mut self) { + // - If resolver is specified in the workspace definition, use that. + // - If the root package specifies the resolver, use that. + // - If the root package specifies edition 2021, use v2. + // - Otherwise, use the default v1. + self.resolve_behavior = match self.root_maybe() { + MaybePackage::Package(p) => p.manifest().resolve_behavior().or_else(|| { + if p.manifest().edition() >= Edition::Edition2021 { + Some(ResolveBehavior::V2) + } else { + None + } + }), + MaybePackage::Virtual(vm) => vm.resolve_behavior(), + } + .unwrap_or(ResolveBehavior::V1); + } + /// Returns the current package of this workspace. /// /// Note that this can return an error if it the current manifest is @@ -327,7 +343,7 @@ } /// Returns the root Package or VirtualManifest. - fn root_maybe(&self) -> &MaybePackage { + pub fn root_maybe(&self) -> &MaybePackage { self.packages.get(self.root_manifest()) } @@ -347,14 +363,105 @@ } } + fn config_patch(&self) -> CargoResult>> { + let config_patch: Option< + BTreeMap>>, + > = self.config.get("patch")?; + + if config_patch.is_some() && !self.config.cli_unstable().patch_in_config { + self.config.shell().warn("`[patch]` in cargo config was ignored, the -Zpatch-in-config command-line flag is required".to_owned())?; + return Ok(HashMap::new()); + } + + let source = SourceId::for_path(self.root())?; + + let mut warnings = Vec::new(); + let mut nested_paths = Vec::new(); + + let mut patch = HashMap::new(); + for (url, deps) in config_patch.into_iter().flatten() { + let url = match &url[..] { + CRATES_IO_REGISTRY => CRATES_IO_INDEX.parse().unwrap(), + url => self + .config + .get_registry_index(url) + .or_else(|_| url.into_url()) + .with_context(|| { + format!("[patch] entry `{}` should be a URL or registry name", url) + })?, + }; + patch.insert( + url, + deps.iter() + .map(|(name, dep)| { + dep.to_dependency_split( + name, + /* pkg_id */ None, + source, + &mut nested_paths, + self.config, + &mut warnings, + /* platform */ None, + // NOTE: Since we use ConfigRelativePath, this root isn't used as + // any relative paths are resolved before they'd be joined with root. + Path::new("unused-relative-path"), + self.unstable_features(), + /* kind */ None, + ) + }) + .collect::>>()?, + ); + } + + for message in warnings { + self.config + .shell() + .warn(format!("[patch] in cargo config: {}", message))? + } + + Ok(patch) + } + /// Returns the root `[patch]` section of this workspace. /// /// This may be from a virtual crate or an actual crate. - pub fn root_patch(&self) -> &HashMap> { - match self.root_maybe() { + pub fn root_patch(&self) -> CargoResult>> { + let from_manifest = match self.root_maybe() { MaybePackage::Package(p) => p.manifest().patch(), MaybePackage::Virtual(vm) => vm.patch(), + }; + + let from_config = self.config_patch()?; + if from_config.is_empty() { + return Ok(from_manifest.clone()); + } + if from_manifest.is_empty() { + return Ok(from_config); + } + + // We could just chain from_manifest and from_config, + // but that's not quite right as it won't deal with overlaps. + let mut combined = from_manifest.clone(); + for (url, cdeps) in from_config { + if let Some(deps) = combined.get_mut(&url) { + // We want from_manifest to take precedence for each patched name. + // NOTE: This is inefficient if the number of patches is large! + let mut left = cdeps.clone(); + for dep in &mut *deps { + if let Some(i) = left.iter().position(|cdep| { + // XXX: should this also take into account version numbers? + dep.name_in_toml() == cdep.name_in_toml() + }) { + left.swap_remove(i); + } + } + // Whatever is left does not exist in manifest dependencies. + deps.extend(left); + } else { + combined.insert(url.clone(), cdeps.clone()); + } } + Ok(combined) } /// Returns an iterator over all packages in this workspace @@ -417,7 +524,7 @@ return Ok(Some(root_config.clone())); } - _ => anyhow::bail!( + _ => bail!( "root of a workspace inferred but wasn't a root: {}", root_path.display() ), @@ -461,7 +568,7 @@ } } - for path in paths::ancestors(manifest_path).skip(2) { + for path in paths::ancestors(manifest_path, None).skip(2) { if path.ends_with("target/package") { break; } @@ -538,7 +645,13 @@ }; for path in &members_paths { - self.find_path_deps(&path.join("Cargo.toml"), &root_manifest_path, false)?; + self.find_path_deps(&path.join("Cargo.toml"), &root_manifest_path, false) + .with_context(|| { + format!( + "failed to load manifest for workspace member `{}`", + path.display() + ) + })?; } if let Some(default) = default_members_paths { @@ -557,7 +670,7 @@ if exclude { continue; } - anyhow::bail!( + bail!( "package `{}` is listed in workspace’s default-members \ but is not a member.", path.display() @@ -612,14 +725,15 @@ self.member_ids.insert(pkg.package_id()); pkg.dependencies() .iter() - .map(|d| d.source_id()) - .filter(|d| d.is_path()) - .filter_map(|d| d.url().to_file_path().ok()) - .map(|p| p.join("Cargo.toml")) + .map(|d| (d.source_id(), d.package_name())) + .filter(|(s, _)| s.is_path()) + .filter_map(|(s, n)| s.url().to_file_path().ok().map(|p| (p, n))) + .map(|(p, n)| (p.join("Cargo.toml"), n)) .collect::>() }; - for candidate in candidates { - self.find_path_deps(&candidate, root_manifest, true) + for (path, name) in candidates { + self.find_path_deps(&path, root_manifest, true) + .with_context(|| format!("failed to load manifest for dependency `{}`", name)) .map_err(|err| ManifestError::new(err, manifest_path.clone()))?; } Ok(()) @@ -634,7 +748,7 @@ } pub fn resolve_behavior(&self) -> ResolveBehavior { - self.resolve_behavior.unwrap_or(ResolveBehavior::V1) + self.resolve_behavior } /// Returns `true` if this workspace uses the new CLI features behavior. @@ -679,7 +793,7 @@ MaybePackage::Virtual(_) => continue, }; if let Some(prev) = names.insert(name, member) { - anyhow::bail!( + bail!( "two packages named `{}` in this workspace:\n\ - {}\n\ - {}", @@ -704,7 +818,7 @@ .collect(); match roots.len() { 1 => Ok(()), - 0 => anyhow::bail!( + 0 => bail!( "`package.workspace` configuration points to a crate \ which is not configured with [workspace]: \n\ configuration at: {}\n\ @@ -713,7 +827,7 @@ self.root_manifest.as_ref().unwrap().display() ), _ => { - anyhow::bail!( + bail!( "multiple workspace roots found in the same workspace:\n{}", roots .iter() @@ -734,7 +848,7 @@ match root { Some(root) => { - anyhow::bail!( + bail!( "package `{}` is a member of the wrong workspace\n\ expected: {}\n\ actual: {}", @@ -744,7 +858,7 @@ ); } None => { - anyhow::bail!( + bail!( "workspace member `{}` is not hierarchically below \ the workspace root `{}`", member.display(), @@ -801,7 +915,7 @@ } } }; - anyhow::bail!( + bail!( "current package believes it's in a workspace when it's not:\n\ current: {}\n\ workspace: {}\n\n{}\n\ @@ -843,11 +957,11 @@ if !manifest.patch().is_empty() { emit_warning("patch")?; } - if manifest.resolve_behavior().is_some() - && manifest.resolve_behavior() != self.resolve_behavior - { - // Only warn if they don't match. - emit_warning("resolver")?; + if let Some(behavior) = manifest.resolve_behavior() { + if behavior != self.resolve_behavior { + // Only warn if they don't match. + emit_warning("resolver")?; + } } } } @@ -857,7 +971,7 @@ pub fn load(&self, manifest_path: &Path) -> CargoResult { match self.packages.maybe_get(manifest_path) { Some(&MaybePackage::Package(ref p)) => return Ok(p.clone()), - Some(&MaybePackage::Virtual(_)) => anyhow::bail!("cannot load workspace root"), + Some(&MaybePackage::Virtual(_)) => bail!("cannot load workspace root"), None => {} } @@ -940,10 +1054,10 @@ pub fn members_with_features( &self, specs: &[PackageIdSpec], - requested_features: &RequestedFeatures, - ) -> CargoResult> { + cli_features: &CliFeatures, + ) -> CargoResult> { assert!( - !specs.is_empty() || requested_features.all_features, + !specs.is_empty() || cli_features.all_features, "no specs requires all_features" ); if specs.is_empty() { @@ -951,13 +1065,13 @@ // all features enabled. return Ok(self .members() - .map(|m| (m, RequestedFeatures::new_all(true))) + .map(|m| (m, CliFeatures::new_all(true))) .collect()); } if self.allows_new_cli_feature_behavior() { - self.members_with_features_new(specs, requested_features) + self.members_with_features_new(specs, cli_features) } else { - Ok(self.members_with_features_old(specs, requested_features)) + Ok(self.members_with_features_old(specs, cli_features)) } } @@ -966,17 +1080,17 @@ fn members_with_features_new( &self, specs: &[PackageIdSpec], - requested_features: &RequestedFeatures, - ) -> CargoResult> { + cli_features: &CliFeatures, + ) -> CargoResult> { // Keep track of which features matched *any* member, to produce an error // if any of them did not match anywhere. - let mut found: BTreeSet = BTreeSet::new(); + let mut found: BTreeSet = BTreeSet::new(); // Returns the requested features for the given member. // This filters out any named features that the member does not have. - let mut matching_features = |member: &Package| -> RequestedFeatures { - if requested_features.features.is_empty() || requested_features.all_features { - return requested_features.clone(); + let mut matching_features = |member: &Package| -> CliFeatures { + if cli_features.features.is_empty() || cli_features.all_features { + return cli_features.clone(); } // Only include features this member defines. let summary = member.summary(); @@ -992,40 +1106,54 @@ .any(|dep| dep.is_optional() && dep.name_in_toml() == feature) }; - for feature in requested_features.features.iter() { - let mut split = feature.splitn(2, '/'); - let split = (split.next().unwrap(), split.next()); - if let (pkg, Some(pkg_feature)) = split { - let pkg = InternedString::new(pkg); - let pkg_feature = InternedString::new(pkg_feature); - if summary - .dependencies() - .iter() - .any(|dep| dep.name_in_toml() == pkg) - { - // pkg/feat for a dependency. - // Will rely on the dependency resolver to validate `feat`. - features.insert(*feature); - found.insert(*feature); - } else if pkg == member.name() && contains(pkg_feature) { - // member/feat where "feat" is a feature in member. - features.insert(pkg_feature); - found.insert(*feature); + for feature in cli_features.features.iter() { + match feature { + FeatureValue::Feature(f) => { + if contains(*f) { + // feature exists in this member. + features.insert(feature.clone()); + found.insert(feature.clone()); + } + } + // This should be enforced by CliFeatures. + FeatureValue::Dep { .. } + | FeatureValue::DepFeature { + dep_prefix: true, .. + } => panic!("unexpected dep: syntax {}", feature), + FeatureValue::DepFeature { + dep_name, + dep_feature, + dep_prefix: _, + weak: _, + } => { + if summary + .dependencies() + .iter() + .any(|dep| dep.name_in_toml() == *dep_name) + { + // pkg/feat for a dependency. + // Will rely on the dependency resolver to validate `dep_feature`. + features.insert(feature.clone()); + found.insert(feature.clone()); + } else if *dep_name == member.name() && contains(*dep_feature) { + // member/feat where "feat" is a feature in member. + // + // `weak` can be ignored here, because the member + // either is or isn't being built. + features.insert(FeatureValue::Feature(*dep_feature)); + found.insert(feature.clone()); + } } - } else if contains(*feature) { - // feature exists in this member. - features.insert(*feature); - found.insert(*feature); } } - RequestedFeatures { + CliFeatures { features: Rc::new(features), all_features: false, - uses_default_features: requested_features.uses_default_features, + uses_default_features: cli_features.uses_default_features, } }; - let members: Vec<(&Package, RequestedFeatures)> = self + let members: Vec<(&Package, CliFeatures)> = self .members() .filter(|m| specs.iter().any(|spec| spec.matches(m.package_id()))) .map(|m| (m, matching_features(m))) @@ -1033,27 +1161,28 @@ if members.is_empty() { // `cargo build -p foo`, where `foo` is not a member. // Do not allow any command-line flags (defaults only). - if !(requested_features.features.is_empty() - && !requested_features.all_features - && requested_features.uses_default_features) + if !(cli_features.features.is_empty() + && !cli_features.all_features + && cli_features.uses_default_features) { - anyhow::bail!("cannot specify features for packages outside of workspace"); + bail!("cannot specify features for packages outside of workspace"); } // Add all members from the workspace so we can ensure `-p nonmember` // is in the resolve graph. return Ok(self .members() - .map(|m| (m, RequestedFeatures::new_all(false))) + .map(|m| (m, CliFeatures::new_all(false))) .collect()); } - if *requested_features.features != found { - let missing: Vec<_> = requested_features + if *cli_features.features != found { + let mut missing: Vec<_> = cli_features .features .difference(&found) - .copied() + .map(|fv| fv.to_string()) .collect(); + missing.sort(); // TODO: typo suggestions would be good here. - anyhow::bail!( + bail!( "none of the selected packages contains these features: {}", missing.join(", ") ); @@ -1066,28 +1195,46 @@ fn members_with_features_old( &self, specs: &[PackageIdSpec], - requested_features: &RequestedFeatures, - ) -> Vec<(&Package, RequestedFeatures)> { + cli_features: &CliFeatures, + ) -> Vec<(&Package, CliFeatures)> { // Split off any features with the syntax `member-name/feature-name` into a map // so that those features can be applied directly to those workspace-members. - let mut member_specific_features: HashMap<&str, BTreeSet> = HashMap::new(); + let mut member_specific_features: HashMap> = + HashMap::new(); // Features for the member in the current directory. let mut cwd_features = BTreeSet::new(); - for feature in requested_features.features.iter() { - if let Some(index) = feature.find('/') { - let name = &feature[..index]; - let is_member = self.members().any(|member| member.name() == name); - if is_member && specs.iter().any(|spec| spec.name() == name) { - member_specific_features - .entry(name) - .or_default() - .insert(InternedString::new(&feature[index + 1..])); - } else { - cwd_features.insert(*feature); + for feature in cli_features.features.iter() { + match feature { + FeatureValue::Feature(_) => { + cwd_features.insert(feature.clone()); } - } else { - cwd_features.insert(*feature); - }; + // This should be enforced by CliFeatures. + FeatureValue::Dep { .. } + | FeatureValue::DepFeature { + dep_prefix: true, .. + } => panic!("unexpected dep: syntax {}", feature), + FeatureValue::DepFeature { + dep_name, + dep_feature, + dep_prefix: _, + weak: _, + } => { + // I think weak can be ignored here. + // * With `--features member?/feat -p member`, the ? doesn't + // really mean anything (either the member is built or it isn't). + // * With `--features nonmember?/feat`, cwd_features will + // handle processing it correctly. + let is_member = self.members().any(|member| member.name() == *dep_name); + if is_member && specs.iter().any(|spec| spec.name() == *dep_name) { + member_specific_features + .entry(*dep_name) + .or_default() + .insert(FeatureValue::Feature(*dep_feature)); + } else { + cwd_features.insert(feature.clone()); + } + } + } } let ms = self.members().filter_map(|member| { @@ -1096,10 +1243,10 @@ // The features passed on the command-line only apply to // the "current" package (determined by the cwd). Some(current) if member_id == current.package_id() => { - let feats = RequestedFeatures { + let feats = CliFeatures { features: Rc::new(cwd_features.clone()), - all_features: requested_features.all_features, - uses_default_features: requested_features.uses_default_features, + all_features: cli_features.all_features, + uses_default_features: cli_features.uses_default_features, }; Some((member, feats)) } @@ -1115,14 +1262,14 @@ // "current" package. As an extension, this allows // member-name/feature-name to set member-specific // features, which should be backwards-compatible. - let feats = RequestedFeatures { + let feats = CliFeatures { features: Rc::new( member_specific_features .remove(member.name().as_str()) .unwrap_or_default(), ), uses_default_features: true, - all_features: requested_features.all_features, + all_features: cli_features.all_features, }; Some((member, feats)) } else { @@ -1275,12 +1422,9 @@ Some(p) => p, None => return Ok(Vec::new()), }; - let res = - glob(path).chain_err(|| anyhow::format_err!("could not parse pattern `{}`", &path))?; + let res = glob(path).with_context(|| format!("could not parse pattern `{}`", &path))?; let res = res - .map(|p| { - p.chain_err(|| anyhow::format_err!("unable to match path to pattern `{}`", &path)) - }) + .map(|p| p.with_context(|| format!("unable to match path to pattern `{}`", &path))) .collect::, _>>()?; Ok(res) } diff -Nru cargo-0.52.0/src/cargo/lib.rs cargo-0.54.0/src/cargo/lib.rs --- cargo-0.52.0/src/cargo/lib.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/lib.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,34 +1,12 @@ -#![cfg_attr(test, deny(warnings))] -// While we're getting used to 2018: +// For various reasons, some idioms are still allow'ed, but we would like to +// test and enforce them. #![warn(rust_2018_idioms)] -// Clippy isn't enforced by CI (@alexcrichton isn't a fan). -#![allow(clippy::blacklisted_name)] // frequently used in tests -#![allow(clippy::cognitive_complexity)] // large project -#![allow(clippy::derive_hash_xor_eq)] // there's an intentional incoherence -#![allow(clippy::explicit_into_iter_loop)] // explicit loops are clearer -#![allow(clippy::explicit_iter_loop)] // explicit loops are clearer -#![allow(clippy::identity_op)] // used for vertical alignment -#![allow(clippy::implicit_hasher)] // large project -#![allow(clippy::large_enum_variant)] // large project -#![allow(clippy::new_without_default)] // explicit is maybe clearer -#![allow(clippy::redundant_closure)] // closures can be less verbose -#![allow(clippy::redundant_closure_call)] // closures over try catch blocks -#![allow(clippy::too_many_arguments)] // large project -#![allow(clippy::type_complexity)] // there's an exceptionally complex type -#![allow(clippy::wrong_self_convention)] // perhaps `Rc` should be special-cased in Clippy? -#![allow(clippy::write_with_newline)] // too pedantic -#![allow(clippy::inefficient_to_string)] // this causes suggestions that result in `(*s).to_string()` -#![allow(clippy::collapsible_if)] // too pedantic +#![cfg_attr(test, deny(warnings))] +// Due to some of the default clippy lints being somewhat subjective and not +// necessarily an improvement, we prefer to not use them at this time. +#![allow(clippy::all)] #![warn(clippy::needless_borrow)] -// Unit is now interned, and would probably be better as pass-by-copy, but -// doing so causes a lot of & and * shenanigans that makes the code arguably -// less clear and harder to read. -#![allow(clippy::trivially_copy_pass_by_ref)] -// exhaustively destructuring ensures future fields are handled -#![allow(clippy::unneeded_field_pattern)] -// false positives in target-specific code, for details see -// https://github.com/rust-lang/cargo/pull/7251#pullrequestreview-274914270 -#![allow(clippy::useless_conversion)] +#![warn(clippy::redundant_clone)] use crate::core::shell::Verbosity::Verbose; use crate::core::Shell; diff -Nru cargo-0.52.0/src/cargo/ops/cargo_clean.rs cargo-0.54.0/src/cargo/ops/cargo_clean.rs --- cargo-0.52.0/src/cargo/ops/cargo_clean.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_clean.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2,11 +2,13 @@ use crate::core::profiles::Profiles; use crate::core::{PackageIdSpec, TargetKind, Workspace}; use crate::ops; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::lev_distance; -use crate::util::paths; use crate::util::Config; + +use anyhow::Context as _; +use cargo_util::paths; use std::fs; use std::path::Path; @@ -222,14 +224,12 @@ config .shell() .verbose(|shell| shell.status("Removing", path.display()))?; - paths::remove_dir_all(path) - .chain_err(|| anyhow::format_err!("could not remove build directory"))?; + paths::remove_dir_all(path).with_context(|| "could not remove build directory")?; } else if m.is_ok() { config .shell() .verbose(|shell| shell.status("Removing", path.display()))?; - paths::remove_file(path) - .chain_err(|| anyhow::format_err!("failed to remove build artifact"))?; + paths::remove_file(path).with_context(|| "failed to remove build artifact")?; } Ok(()) } diff -Nru cargo-0.52.0/src/cargo/ops/cargo_compile.rs cargo-0.54.0/src/cargo/ops/cargo_compile.rs --- cargo-0.52.0/src/cargo/ops/cargo_compile.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_compile.rs 2021-04-27 14:35:53.000000000 +0000 @@ -26,17 +26,18 @@ use std::hash::{Hash, Hasher}; use std::sync::Arc; -use crate::core::compiler::standard_lib; use crate::core::compiler::unit_dependencies::build_unit_dependencies; use crate::core::compiler::unit_graph::{self, UnitDep, UnitGraph}; +use crate::core::compiler::{standard_lib, TargetInfo}; use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context}; use crate::core::compiler::{CompileKind, CompileMode, CompileTarget, RustcTargetData, Unit}; use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner}; use crate::core::profiles::{Profiles, UnitFor}; -use crate::core::resolver::features::{self, FeaturesFor, RequestedFeatures}; -use crate::core::resolver::{HasDevUnits, Resolve, ResolveOpts}; +use crate::core::resolver::features::{self, CliFeatures, FeaturesFor}; +use crate::core::resolver::{HasDevUnits, Resolve}; use crate::core::{FeatureValue, Package, PackageSet, Shell, Summary, Target}; use crate::core::{PackageId, PackageIdSpec, SourceId, TargetKind, Workspace}; +use crate::drop_println; use crate::ops; use crate::ops::resolve::WorkspaceResolve; use crate::util::config::Config; @@ -58,12 +59,8 @@ pub struct CompileOptions { /// Configuration information for a rustc build pub build_config: BuildConfig, - /// Extra features to build for the root package - pub features: Vec, - /// Flag whether all available features should be built for the root package - pub all_features: bool, - /// Flag if the default feature should be built for the root package - pub no_default_features: bool, + /// Feature flags requested by the user. + pub cli_features: CliFeatures, /// A set of packages to build. pub spec: Packages, /// Filter to apply to the root package to select which targets will be @@ -88,9 +85,7 @@ pub fn new(config: &Config, mode: CompileMode) -> CargoResult { Ok(CompileOptions { build_config: BuildConfig::new(config, None, &[], mode)?, - features: Vec::new(), - all_features: false, - no_default_features: false, + cli_features: CliFeatures::new_all(false), spec: ops::Packages::Packages(Vec::new()), filter: CompileFilter::Default { required_features_filterable: false, @@ -286,15 +281,45 @@ let interner = UnitInterner::new(); let bcx = create_bcx(ws, options, &interner)?; if options.build_config.unit_graph { - unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph)?; + unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.config())?; return Compilation::new(&bcx); } - let _p = profile::start("compiling"); let cx = Context::new(&bcx)?; cx.compile(exec) } +pub fn print<'a>( + ws: &Workspace<'a>, + options: &CompileOptions, + print_opt_value: &str, +) -> CargoResult<()> { + let CompileOptions { + ref build_config, + ref target_rustc_args, + .. + } = *options; + let config = ws.config(); + let rustc = config.load_global_rustc(Some(ws))?; + for (index, kind) in build_config.requested_kinds.iter().enumerate() { + if index != 0 { + drop_println!(config); + } + let target_info = TargetInfo::new(config, &build_config.requested_kinds, &rustc, *kind)?; + let mut process = rustc.process(); + process.args(&target_info.rustflags); + if let Some(args) = target_rustc_args { + process.args(args); + } + if let CompileKind::Target(t) = kind { + process.arg("--target").arg(t.short_name()); + } + process.arg("--print").arg(print_opt_value); + process.exec()?; + } + Ok(()) +} + pub fn create_bcx<'a, 'cfg>( ws: &'a Workspace<'cfg>, options: &'a CompileOptions, @@ -303,9 +328,7 @@ let CompileOptions { ref build_config, ref spec, - ref features, - all_features, - no_default_features, + ref cli_features, ref filter, ref target_rustdoc_args, ref target_rustc_args, @@ -341,11 +364,6 @@ let target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?; let specs = spec.to_package_id_specs(ws)?; - let dev_deps = ws.require_optional_deps() || filter.need_dev_deps(build_config.mode); - let opts = ResolveOpts::new( - dev_deps, - RequestedFeatures::from_command_line(features, all_features, !no_default_features), - ); let has_dev_units = if filter.need_dev_deps(build_config.mode) { HasDevUnits::Yes } else { @@ -355,7 +373,7 @@ ws, &target_data, &build_config.requested_kinds, - &opts, + cli_features, &specs, has_dev_units, crate::core::resolver::features::ForceAllTargets::No, @@ -449,11 +467,17 @@ }) .collect(); + // Passing `build_config.requested_kinds` instead of + // `explicit_host_kinds` here so that `generate_targets` can do + // its own special handling of `CompileKind::Host`. It will + // internally replace the host kind by the `explicit_host_kind` + // before setting as a unit. let mut units = generate_targets( ws, &to_builds, filter, - &explicit_host_kinds, + &build_config.requested_kinds, + explicit_host_kind, build_config.mode, &resolve, &workspace_resolve, @@ -772,6 +796,16 @@ } } + pub fn is_all_targets(&self) -> bool { + matches!( + *self, + CompileFilter::Only { + all_targets: true, + .. + } + ) + } + pub(crate) fn contains_glob_patterns(&self) -> bool { match self { CompileFilter::Default { .. } => false, @@ -814,6 +848,7 @@ packages: &[&Package], filter: &CompileFilter, requested_kinds: &[CompileKind], + explicit_host_kind: CompileKind, mode: CompileMode, resolve: &Resolve, workspace_resolve: &Option, @@ -882,19 +917,40 @@ }; let is_local = pkg.package_id().source_id().is_path(); - let profile = profiles.get_profile( - pkg.package_id(), - ws.is_member(pkg), - is_local, - unit_for, - target_mode, - ); // No need to worry about build-dependencies, roots are never build dependencies. let features_for = FeaturesFor::from_for_host(target.proc_macro()); let features = resolved_features.activated_features(pkg.package_id(), features_for); - for kind in requested_kinds { + // If `--target` has not been specified, then the unit + // graph is built almost like if `--target $HOST` was + // specified. See `rebuild_unit_graph_shared` for more on + // why this is done. However, if the package has its own + // `package.target` key, then this gets used instead of + // `$HOST` + let explicit_kinds = if let Some(k) = pkg.manifest().forced_kind() { + vec![k] + } else { + requested_kinds + .iter() + .map(|kind| match kind { + CompileKind::Host => { + pkg.manifest().default_kind().unwrap_or(explicit_host_kind) + } + CompileKind::Target(t) => CompileKind::Target(*t), + }) + .collect() + }; + + for kind in explicit_kinds.iter() { + let profile = profiles.get_profile( + pkg.package_id(), + ws.is_member(pkg), + is_local, + unit_for, + target_mode, + *kind, + ); let unit = interner.intern( pkg, target, diff -Nru cargo-0.52.0/src/cargo/ops/cargo_config.rs cargo-0.54.0/src/cargo/ops/cargo_config.rs --- cargo-0.52.0/src/cargo/ops/cargo_config.rs 1970-01-01 00:00:00.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_config.rs 2021-04-27 14:35:53.000000000 +0000 @@ -0,0 +1,308 @@ +//! Implementation of `cargo config` subcommand. + +use crate::util::config::{Config, ConfigKey, ConfigValue as CV, Definition}; +use crate::util::errors::CargoResult; +use crate::{drop_eprintln, drop_println}; +use anyhow::{bail, format_err, Error}; +use serde_json::json; +use std::borrow::Cow; +use std::fmt; +use std::str::FromStr; + +pub enum ConfigFormat { + Toml, + Json, + JsonValue, +} + +impl ConfigFormat { + /// For clap. + pub const POSSIBLE_VALUES: &'static [&'static str] = &["toml", "json", "json-value"]; +} + +impl FromStr for ConfigFormat { + type Err = Error; + fn from_str(s: &str) -> CargoResult { + match s { + "toml" => Ok(ConfigFormat::Toml), + "json" => Ok(ConfigFormat::Json), + "json-value" => Ok(ConfigFormat::JsonValue), + f => bail!("unknown config format `{}`", f), + } + } +} + +impl fmt::Display for ConfigFormat { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + ConfigFormat::Toml => write!(f, "toml"), + ConfigFormat::Json => write!(f, "json"), + ConfigFormat::JsonValue => write!(f, "json-value"), + } + } +} + +/// Options for `cargo config get`. +pub struct GetOptions<'a> { + pub key: Option<&'a str>, + pub format: ConfigFormat, + pub show_origin: bool, + pub merged: bool, +} + +pub fn get(config: &Config, opts: &GetOptions<'_>) -> CargoResult<()> { + if opts.show_origin { + if !matches!(opts.format, ConfigFormat::Toml) { + bail!( + "the `{}` format does not support --show-origin, try the `toml` format instead", + opts.format + ); + } + } + let key = match opts.key { + Some(key) => ConfigKey::from_str(key), + None => ConfigKey::new(), + }; + if opts.merged { + let cv = config + .get_cv_with_env(&key)? + .ok_or_else(|| format_err!("config value `{}` is not set", key))?; + match opts.format { + ConfigFormat::Toml => print_toml(config, opts, &key, &cv), + ConfigFormat::Json => print_json(config, &key, &cv, true), + ConfigFormat::JsonValue => print_json(config, &key, &cv, false), + } + if let Some(env) = maybe_env(config, &key, &cv) { + match opts.format { + ConfigFormat::Toml => print_toml_env(config, &env), + ConfigFormat::Json | ConfigFormat::JsonValue => print_json_env(config, &env), + } + } + } else { + match &opts.format { + ConfigFormat::Toml => print_toml_unmerged(config, opts, &key)?, + format => bail!( + "the `{}` format does not support --merged=no, try the `toml` format instead", + format + ), + } + } + Ok(()) +} + +/// Checks for environment variables that might be used. +fn maybe_env<'config>( + config: &'config Config, + key: &ConfigKey, + cv: &CV, +) -> Option> { + // Only fetching a table is unable to load env values. Leaf entries should + // work properly. + match cv { + CV::Table(_map, _def) => {} + _ => return None, + } + let mut env: Vec<_> = config + .env() + .iter() + .filter(|(env_key, _val)| env_key.starts_with(&format!("{}_", key.as_env_key()))) + .collect(); + env.sort_by_key(|x| x.0); + if env.is_empty() { + None + } else { + Some(env) + } +} + +fn print_toml(config: &Config, opts: &GetOptions<'_>, key: &ConfigKey, cv: &CV) { + let origin = |def: &Definition| -> String { + if !opts.show_origin { + return "".to_string(); + } + format!(" # {}", def) + }; + match cv { + CV::Boolean(val, def) => drop_println!(config, "{} = {}{}", key, val, origin(def)), + CV::Integer(val, def) => drop_println!(config, "{} = {}{}", key, val, origin(def)), + CV::String(val, def) => drop_println!( + config, + "{} = {}{}", + key, + toml::to_string(&val).unwrap(), + origin(def) + ), + CV::List(vals, _def) => { + if opts.show_origin { + drop_println!(config, "{} = [", key); + for (val, def) in vals { + drop_println!(config, " {}, # {}", toml::to_string(&val).unwrap(), def); + } + drop_println!(config, "]"); + } else { + let vals: Vec<&String> = vals.iter().map(|x| &x.0).collect(); + drop_println!(config, "{} = {}", key, toml::to_string(&vals).unwrap()); + } + } + CV::Table(table, _def) => { + let mut key_vals: Vec<_> = table.iter().collect(); + key_vals.sort_by(|a, b| a.0.cmp(b.0)); + for (table_key, val) in key_vals { + let mut subkey = key.clone(); + // push or push_sensitive shouldn't matter here, since this is + // not dealing with environment variables. + subkey.push(table_key); + print_toml(config, opts, &subkey, val); + } + } + } +} + +fn print_toml_env(config: &Config, env: &[(&String, &String)]) { + drop_println!( + config, + "# The following environment variables may affect the loaded values." + ); + for (env_key, env_value) in env { + let val = shell_escape::escape(Cow::Borrowed(env_value)); + drop_println!(config, "# {}={}", env_key, val); + } +} + +fn print_json_env(config: &Config, env: &[(&String, &String)]) { + drop_eprintln!( + config, + "note: The following environment variables may affect the loaded values." + ); + for (env_key, env_value) in env { + let val = shell_escape::escape(Cow::Borrowed(env_value)); + drop_eprintln!(config, "{}={}", env_key, val); + } +} + +fn print_json(config: &Config, key: &ConfigKey, cv: &CV, include_key: bool) { + let json_value = if key.is_root() || !include_key { + cv_to_json(cv) + } else { + let mut parts: Vec<_> = key.parts().collect(); + let last_part = parts.pop().unwrap(); + let mut root_table = json!({}); + // Create a JSON object with nested keys up to the value being displayed. + let mut table = &mut root_table; + for part in parts { + table[part] = json!({}); + table = table.get_mut(part).unwrap(); + } + table[last_part] = cv_to_json(cv); + root_table + }; + drop_println!(config, "{}", serde_json::to_string(&json_value).unwrap()); + + // Helper for recursively converting a CV to JSON. + fn cv_to_json(cv: &CV) -> serde_json::Value { + match cv { + CV::Boolean(val, _def) => json!(val), + CV::Integer(val, _def) => json!(val), + CV::String(val, _def) => json!(val), + CV::List(vals, _def) => { + let jvals: Vec<_> = vals.iter().map(|(val, _def)| json!(val)).collect(); + json!(jvals) + } + CV::Table(map, _def) => { + let mut table = json!({}); + for (key, val) in map { + table[key] = cv_to_json(val); + } + table + } + } + } +} + +fn print_toml_unmerged(config: &Config, opts: &GetOptions<'_>, key: &ConfigKey) -> CargoResult<()> { + let print_table = |cv: &CV| { + drop_println!(config, "# {}", cv.definition()); + print_toml(config, opts, &ConfigKey::new(), cv); + drop_println!(config, ""); + }; + // This removes entries from the given CV so that all that remains is the + // given key. Returns false if no entries were found. + fn trim_cv(mut cv: &mut CV, key: &ConfigKey) -> CargoResult { + for (i, part) in key.parts().enumerate() { + match cv { + CV::Table(map, _def) => { + map.retain(|key, _value| key == part); + match map.get_mut(part) { + Some(val) => cv = val, + None => return Ok(false), + } + } + _ => { + let mut key_so_far = ConfigKey::new(); + for part in key.parts().take(i) { + key_so_far.push(part); + } + bail!( + "expected table for configuration key `{}`, \ + but found {} in {}", + key_so_far, + cv.desc(), + cv.definition() + ) + } + } + } + Ok(match cv { + CV::Table(map, _def) => !map.is_empty(), + _ => true, + }) + } + + let mut cli_args = config.cli_args_as_table()?; + if trim_cv(&mut cli_args, key)? { + print_table(&cli_args); + } + + // This slurps up some extra env vars that aren't technically part of the + // "config" (or are special-cased). I'm personally fine with just keeping + // them here, though it might be confusing. The vars I'm aware of: + // + // * CARGO + // * CARGO_HOME + // * CARGO_NAME + // * CARGO_EMAIL + // * CARGO_INCREMENTAL + // * CARGO_TARGET_DIR + // * CARGO_CACHE_RUSTC_INFO + // + // All of these except CARGO, CARGO_HOME, and CARGO_CACHE_RUSTC_INFO are + // actually part of the config, but they are special-cased in the code. + // + // TODO: It might be a good idea to teach the Config loader to support + // environment variable aliases so that these special cases are less + // special, and will just naturally get loaded as part of the config. + let mut env: Vec<_> = config + .env() + .iter() + .filter(|(env_key, _val)| env_key.starts_with(key.as_env_key())) + .collect(); + if !env.is_empty() { + env.sort_by_key(|x| x.0); + drop_println!(config, "# Environment variables"); + for (key, value) in env { + // Displaying this in "shell" syntax instead of TOML, since that + // somehow makes more sense to me. + let val = shell_escape::escape(Cow::Borrowed(value)); + drop_println!(config, "# {}={}", key, val); + } + drop_println!(config, ""); + } + + let unmerged = config.load_values_unmerged()?; + for mut cv in unmerged { + if trim_cv(&mut cv, key)? { + print_table(&cv); + } + } + Ok(()) +} diff -Nru cargo-0.52.0/src/cargo/ops/cargo_doc.rs cargo-0.54.0/src/cargo/ops/cargo_doc.rs --- cargo-0.52.0/src/cargo/ops/cargo_doc.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_doc.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,5 +1,5 @@ use crate::core::compiler::RustcTargetData; -use crate::core::resolver::{features::RequestedFeatures, HasDevUnits, ResolveOpts}; +use crate::core::resolver::HasDevUnits; use crate::core::{Shell, Workspace}; use crate::ops; use crate::util::CargoResult; @@ -19,20 +19,12 @@ /// Main method for `cargo doc`. pub fn doc(ws: &Workspace<'_>, options: &DocOptions) -> CargoResult<()> { let specs = options.compile_opts.spec.to_package_id_specs(ws)?; - let opts = ResolveOpts::new( - /*dev_deps*/ true, - RequestedFeatures::from_command_line( - &options.compile_opts.features, - options.compile_opts.all_features, - !options.compile_opts.no_default_features, - ), - ); let target_data = RustcTargetData::new(ws, &options.compile_opts.build_config.requested_kinds)?; let ws_resolve = ops::resolve_ws_with_opts( ws, &target_data, &options.compile_opts.build_config.requested_kinds, - &opts, + &options.compile_opts.cli_features, &specs, HasDevUnits::No, crate::core::resolver::features::ForceAllTargets::No, diff -Nru cargo-0.52.0/src/cargo/ops/cargo_generate_lockfile.rs cargo-0.54.0/src/cargo/ops/cargo_generate_lockfile.rs --- cargo-0.52.0/src/cargo/ops/cargo_generate_lockfile.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_generate_lockfile.rs 2021-04-27 14:35:53.000000000 +0000 @@ -4,7 +4,7 @@ use termcolor::Color::{self, Cyan, Green, Red}; use crate::core::registry::PackageRegistry; -use crate::core::resolver::ResolveOpts; +use crate::core::resolver::features::{CliFeatures, HasDevUnits}; use crate::core::{PackageId, PackageIdSpec}; use crate::core::{Resolve, SourceId, Workspace}; use crate::ops; @@ -25,7 +25,8 @@ let mut resolve = ops::resolve_with_previous( &mut registry, ws, - &ResolveOpts::everything(), + &CliFeatures::new_all(true), + HasDevUnits::Yes, None, None, &[], @@ -44,10 +45,6 @@ anyhow::bail!("you can't generate a lockfile for an empty workspace.") } - if opts.config.offline() { - anyhow::bail!("you can't update in the offline mode"); - } - // Updates often require a lot of modifications to the registry, so ensure // that we're synchronized against other Cargos. let _lock = ws.config().acquire_package_cache_lock()?; @@ -65,7 +62,8 @@ ops::resolve_with_previous( &mut registry, ws, - &ResolveOpts::everything(), + &CliFeatures::new_all(true), + HasDevUnits::Yes, None, None, &[], @@ -119,7 +117,8 @@ let mut resolve = ops::resolve_with_previous( &mut registry, ws, - &ResolveOpts::everything(), + &CliFeatures::new_all(true), + HasDevUnits::Yes, Some(&previous_resolve), Some(&to_avoid), &[], diff -Nru cargo-0.52.0/src/cargo/ops/cargo_install.rs cargo-0.54.0/src/cargo/ops/cargo_install.rs --- cargo-0.52.0/src/cargo/ops/cargo_install.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_install.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,18 +3,19 @@ use std::sync::Arc; use std::{env, fs}; -use anyhow::{bail, format_err}; -use semver::VersionReq; -use tempfile::Builder as TempFileBuilder; - use crate::core::compiler::{CompileKind, DefaultExecutor, Executor, Freshness, UnitOutput}; use crate::core::{Dependency, Edition, Package, PackageId, Source, SourceId, Workspace}; use crate::ops::common_for_install_and_uninstall::*; use crate::sources::{GitSource, PathSource, SourceConfigMap}; -use crate::util::errors::{CargoResult, CargoResultExt}; -use crate::util::{paths, Config, Filesystem, Rustc, ToSemver}; +use crate::util::errors::CargoResult; +use crate::util::{Config, Filesystem, Rustc, ToSemver}; use crate::{drop_println, ops}; +use anyhow::{bail, format_err, Context as _}; +use cargo_util::paths; +use semver::VersionReq; +use tempfile::Builder as TempFileBuilder; + struct Transaction { bins: Vec, } @@ -305,7 +306,12 @@ // *something* to install. Explicit `--bin` or `--example` flags will be // checked at the start of `compile_ws`. if !opts.filter.is_specific() && !pkg.targets().iter().any(|t| t.is_bin()) { - bail!("specified package `{}` has no binaries", pkg); + bail!( + "there is nothing to install in `{}`, because it has no binaries\n\ + `cargo install` is only for installing programs, and can't be used with libraries.\n\ + To use a library crate, add it as a dependency in a Cargo project instead.", + pkg + ); } // Helper for --no-track flag to make sure it doesn't overwrite anything. @@ -345,13 +351,13 @@ check_yanked_install(&ws)?; let exec: Arc = Arc::new(DefaultExecutor); - let compile = ops::compile_ws(&ws, opts, &exec).chain_err(|| { + let compile = ops::compile_ws(&ws, opts, &exec).with_context(|| { if let Some(td) = td_opt.take() { // preserve the temporary directory, so the user can inspect it td.into_path(); } - format_err!( + format!( "failed to compile `{}`, intermediate artifacts can be \ found at `{}`", pkg, @@ -415,8 +421,8 @@ let src = staging_dir.path().join(bin); let dst = dst.join(bin); config.shell().status("Installing", dst.display())?; - fs::rename(&src, &dst).chain_err(|| { - format_err!("failed to move `{}` to `{}`", src.display(), dst.display()) + fs::rename(&src, &dst).with_context(|| { + format!("failed to move `{}` to `{}`", src.display(), dst.display()) })?; installed.bins.push(dst); successful_bins.insert(bin.to_string()); @@ -430,8 +436,8 @@ let src = staging_dir.path().join(bin); let dst = dst.join(bin); config.shell().status("Replacing", dst.display())?; - fs::rename(&src, &dst).chain_err(|| { - format_err!("failed to move `{}` to `{}`", src.display(), dst.display()) + fs::rename(&src, &dst).with_context(|| { + format!("failed to move `{}` to `{}`", src.display(), dst.display()) })?; successful_bins.insert(bin.to_string()); } @@ -458,7 +464,7 @@ } match tracker.save() { - Err(err) => replace_result.chain_err(|| err)?, + Err(err) => replace_result.with_context(|| err)?, Ok(_) => replace_result?, } } @@ -698,21 +704,19 @@ let all_self_names = exe_names(pkg, &filter); let mut to_remove: HashMap> = HashMap::new(); // For each package that we stomped on. - for other_pkg in duplicates.values() { + for other_pkg in duplicates.values().flatten() { // Only for packages with the same name. - if let Some(other_pkg) = other_pkg { - if other_pkg.name() == pkg.name() { - // Check what the old package had installed. - if let Some(installed) = tracker.installed_bins(*other_pkg) { - // If the old install has any names that no longer exist, - // add them to the list to remove. - for installed_name in installed { - if !all_self_names.contains(installed_name.as_str()) { - to_remove - .entry(*other_pkg) - .or_default() - .insert(installed_name.clone()); - } + if other_pkg.name() == pkg.name() { + // Check what the old package had installed. + if let Some(installed) = tracker.installed_bins(*other_pkg) { + // If the old install has any names that no longer exist, + // add them to the list to remove. + for installed_name in installed { + if !all_self_names.contains(installed_name.as_str()) { + to_remove + .entry(*other_pkg) + .or_default() + .insert(installed_name.clone()); } } } @@ -733,7 +737,7 @@ ), )?; paths::remove_file(&full_path) - .chain_err(|| format!("failed to remove {:?}", full_path))?; + .with_context(|| format!("failed to remove {:?}", full_path))?; } } } diff -Nru cargo-0.52.0/src/cargo/ops/cargo_new.rs cargo-0.54.0/src/cargo/ops/cargo_new.rs --- cargo-0.52.0/src/cargo/ops/cargo_new.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_new.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,13 +1,12 @@ -use crate::core::{Shell, Workspace}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::core::{Edition, Shell, Workspace}; +use crate::util::errors::CargoResult; use crate::util::{existing_vcs_repo, FossilRepo, GitRepo, HgRepo, PijulRepo}; -use crate::util::{paths, restricted_names, Config}; -use git2::Config as GitConfig; -use git2::Repository as GitRepository; +use crate::util::{restricted_names, Config}; +use anyhow::Context as _; +use cargo_util::paths; use serde::de; use serde::Deserialize; use std::collections::BTreeMap; -use std::env; use std::fmt; use std::io::{BufRead, BufReader, ErrorKind}; use std::path::{Path, PathBuf}; @@ -128,8 +127,14 @@ #[derive(Deserialize)] struct CargoNewConfig { + #[deprecated = "cargo-new no longer supports adding the authors field"] + #[allow(dead_code)] name: Option, + + #[deprecated = "cargo-new no longer supports adding the authors field"] + #[allow(dead_code)] email: Option, + #[serde(rename = "vcs")] version_control: Option, } @@ -412,8 +417,8 @@ registry: opts.registry.as_deref(), }; - mk(config, &mkopts).chain_err(|| { - anyhow::format_err!( + mk(config, &mkopts).with_context(|| { + format!( "Failed to create package `{}` at `{}`", name, path.display() @@ -496,8 +501,8 @@ registry: opts.registry.as_deref(), }; - mk(config, &mkopts).chain_err(|| { - anyhow::format_err!( + mk(config, &mkopts).with_context(|| { + format!( "Failed to create package `{}` at `{}`", name, path.display() @@ -665,32 +670,6 @@ init_vcs(path, vcs, config)?; write_ignore_file(path, &ignore, vcs)?; - let (discovered_name, discovered_email) = discover_author(path); - - // "Name " or "Name" or "" or None if neither name nor email is obtained - // cfg takes priority over the discovered ones - let author_name = cfg.name.or(discovered_name); - let author_email = cfg.email.or(discovered_email); - - let author = match (author_name, author_email) { - (Some(name), Some(email)) => { - if email.is_empty() { - Some(name) - } else { - Some(format!("{} <{}>", name, email)) - } - } - (Some(name), None) => Some(name), - (None, Some(email)) => { - if email.is_empty() { - None - } else { - Some(format!("<{}>", email)) - } - } - (None, None) => None, - }; - let mut cargotoml_path_specifier = String::new(); // Calculate what `[lib]` and `[[bin]]`s we need to append to `Cargo.toml`. @@ -729,7 +708,6 @@ r#"[package] name = "{}" version = "0.1.0" -authors = [{}] edition = {} {} # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html @@ -737,13 +715,9 @@ [dependencies] {}"#, name, - match author { - Some(value) => format!("{}", toml::Value::String(value)), - None => format!(""), - }, match opts.edition { Some(edition) => toml::Value::String(edition.to_string()), - None => toml::Value::String("2018".to_string()), + None => toml::Value::String(Edition::LATEST_STABLE.to_string()), }, match opts.registry { Some(registry) => format!( @@ -810,79 +784,3 @@ Ok(()) } - -fn get_environment_variable(variables: &[&str]) -> Option { - variables.iter().filter_map(|var| env::var(var).ok()).next() -} - -fn discover_author(path: &Path) -> (Option, Option) { - let git_config = find_git_config(path); - let git_config = git_config.as_ref(); - - let name_variables = [ - "CARGO_NAME", - "GIT_AUTHOR_NAME", - "GIT_COMMITTER_NAME", - "USER", - "USERNAME", - "NAME", - ]; - let name = get_environment_variable(&name_variables[0..3]) - .or_else(|| git_config.and_then(|g| g.get_string("user.name").ok())) - .or_else(|| get_environment_variable(&name_variables[3..])); - - let name = match name { - Some(namestr) => Some(namestr.trim().to_string()), - None => None, - }; - - let email_variables = [ - "CARGO_EMAIL", - "GIT_AUTHOR_EMAIL", - "GIT_COMMITTER_EMAIL", - "EMAIL", - ]; - let email = get_environment_variable(&email_variables[0..3]) - .or_else(|| git_config.and_then(|g| g.get_string("user.email").ok())) - .or_else(|| get_environment_variable(&email_variables[3..])); - - let email = email.map(|s| { - let mut s = s.trim(); - - // In some cases emails will already have <> remove them since they - // are already added when needed. - if s.starts_with('<') && s.ends_with('>') { - s = &s[1..s.len() - 1]; - } - - s.to_string() - }); - - (name, email) -} - -fn find_git_config(path: &Path) -> Option { - match env::var("__CARGO_TEST_ROOT") { - Ok(_) => find_tests_git_config(path), - Err(_) => find_real_git_config(path), - } -} - -fn find_tests_git_config(path: &Path) -> Option { - // Don't escape the test sandbox when looking for a git repository. - // NOTE: libgit2 has support to define the path ceiling in - // git_repository_discover, but the git2 bindings do not expose that. - for path in paths::ancestors(path) { - if let Ok(repo) = GitRepository::open(path) { - return Some(repo.config().expect("test repo should have valid config")); - } - } - GitConfig::open_default().ok() -} - -fn find_real_git_config(path: &Path) -> Option { - GitRepository::discover(path) - .and_then(|repo| repo.config()) - .or_else(|_| GitConfig::open_default()) - .ok() -} diff -Nru cargo-0.52.0/src/cargo/ops/cargo_output_metadata.rs cargo-0.54.0/src/cargo/ops/cargo_output_metadata.rs --- cargo-0.52.0/src/cargo/ops/cargo_output_metadata.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_output_metadata.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,7 +1,7 @@ use crate::core::compiler::{CompileKind, RustcTargetData}; use crate::core::dependency::DepKind; use crate::core::package::SerializedPackage; -use crate::core::resolver::{features::RequestedFeatures, HasDevUnits, Resolve, ResolveOpts}; +use crate::core::resolver::{features::CliFeatures, HasDevUnits, Resolve}; use crate::core::{Dependency, Package, PackageId, Workspace}; use crate::ops::{self, Packages}; use crate::util::interning::InternedString; @@ -14,9 +14,7 @@ const VERSION: u32 = 1; pub struct OutputMetadataOptions { - pub features: Vec, - pub no_default_features: bool, - pub all_features: bool, + pub cli_features: CliFeatures, pub no_deps: bool, pub version: u32, pub filter_platforms: Vec, @@ -115,12 +113,6 @@ let target_data = RustcTargetData::new(ws, &requested_kinds)?; // Resolve entire workspace. let specs = Packages::All.to_package_id_specs(ws)?; - let requested_features = RequestedFeatures::from_command_line( - &metadata_opts.features, - metadata_opts.all_features, - !metadata_opts.no_default_features, - ); - let resolve_opts = ResolveOpts::new(/*dev_deps*/ true, requested_features); let force_all = if metadata_opts.filter_platforms.is_empty() { crate::core::resolver::features::ForceAllTargets::Yes } else { @@ -133,7 +125,7 @@ ws, &target_data, &requested_kinds, - &resolve_opts, + &metadata_opts.cli_features, &specs, HasDevUnits::Yes, force_all, @@ -179,7 +171,7 @@ pkg_id: PackageId, resolve: &Resolve, package_map: &BTreeMap, - target_data: &RustcTargetData, + target_data: &RustcTargetData<'_>, requested_kinds: &[CompileKind], ) { if node_map.contains_key(&pkg_id) { diff -Nru cargo-0.52.0/src/cargo/ops/cargo_package.rs cargo-0.54.0/src/cargo/ops/cargo_package.rs --- cargo-0.52.0/src/cargo/ops/cargo_package.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_package.rs 2021-04-27 14:35:53.000000000 +0000 @@ -6,20 +6,21 @@ use std::rc::Rc; use std::sync::Arc; -use flate2::read::GzDecoder; -use flate2::{Compression, GzBuilder}; -use log::debug; -use tar::{Archive, Builder, EntryType, Header, HeaderMode}; - use crate::core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor}; +use crate::core::resolver::CliFeatures; use crate::core::{Feature, Shell, Verbosity, Workspace}; use crate::core::{Package, PackageId, PackageSet, Resolve, Source, SourceId}; use crate::sources::PathSource; -use crate::util::errors::{CargoResult, CargoResultExt}; -use crate::util::paths; +use crate::util::errors::CargoResult; use crate::util::toml::TomlManifest; use crate::util::{self, restricted_names, Config, FileLock}; use crate::{drop_println, ops}; +use anyhow::Context as _; +use cargo_util::paths; +use flate2::read::GzDecoder; +use flate2::{Compression, GzBuilder}; +use log::debug; +use tar::{Archive, Builder, EntryType, Header, HeaderMode}; pub struct PackageOpts<'cfg> { pub config: &'cfg Config, @@ -29,9 +30,7 @@ pub verify: bool, pub jobs: Option, pub targets: Vec, - pub features: Vec, - pub all_features: bool, - pub no_default_features: bool, + pub cli_features: CliFeatures, } const VCS_INFO_FILE: &str = ".cargo_vcs_info.json"; @@ -124,17 +123,17 @@ .status("Packaging", pkg.package_id().to_string())?; dst.file().set_len(0)?; tar(ws, ar_files, dst.file(), &filename) - .chain_err(|| anyhow::format_err!("failed to prepare local package for uploading"))?; + .with_context(|| "failed to prepare local package for uploading")?; if opts.verify { dst.seek(SeekFrom::Start(0))?; - run_verify(ws, &dst, opts).chain_err(|| "failed to verify package tarball")? + run_verify(ws, &dst, opts).with_context(|| "failed to verify package tarball")? } dst.seek(SeekFrom::Start(0))?; { let src_path = dst.path(); let dst_path = dst.parent().join(&filename); fs::rename(&src_path, &dst_path) - .chain_err(|| "failed to move temporary tarball into final location")?; + .with_context(|| "failed to move temporary tarball into final location")?; } Ok(Some(dst)) } @@ -480,7 +479,7 @@ // Prepare the encoder and its header. let filename = Path::new(filename); let encoder = GzBuilder::new() - .filename(util::path2bytes(filename)?) + .filename(paths::path2bytes(filename)?) .write(dst, Compression::best()); // Put all package files into a compressed archive. @@ -503,16 +502,16 @@ let mut header = Header::new_gnu(); match contents { FileContents::OnDisk(disk_path) => { - let mut file = File::open(&disk_path).chain_err(|| { + let mut file = File::open(&disk_path).with_context(|| { format!("failed to open for archiving: `{}`", disk_path.display()) })?; - let metadata = file.metadata().chain_err(|| { + let metadata = file.metadata().with_context(|| { format!("could not learn metadata for: `{}`", disk_path.display()) })?; header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic); header.set_cksum(); ar.append_data(&mut header, &ar_path, &mut file) - .chain_err(|| { + .with_context(|| { format!("could not archive source file `{}`", disk_path.display()) })?; } @@ -527,7 +526,7 @@ header.set_size(contents.len() as u64); header.set_cksum(); ar.append_data(&mut header, &ar_path, contents.as_bytes()) - .chain_err(|| format!("could not archive source file `{}`", rel_str))?; + .with_context(|| format!("could not archive source file `{}`", rel_str))?; } } } @@ -690,9 +689,7 @@ &ws, &ops::CompileOptions { build_config: BuildConfig::new(config, opts.jobs, &opts.targets, CompileMode::Build)?, - features: opts.features.clone(), - no_default_features: opts.no_default_features, - all_features: opts.all_features, + cli_features: opts.cli_features.clone(), spec: ops::Packages::Packages(Vec::new()), filter: ops::CompileFilter::Default { required_features_filterable: true, @@ -743,7 +740,7 @@ } Ok(result) } - let result = wrap(path).chain_err(|| format!("failed to verify output at {:?}", path))?; + let result = wrap(path).with_context(|| format!("failed to verify output at {:?}", path))?; Ok(result) } diff -Nru cargo-0.52.0/src/cargo/ops/cargo_read_manifest.rs cargo-0.54.0/src/cargo/ops/cargo_read_manifest.rs --- cargo-0.52.0/src/cargo/ops/cargo_read_manifest.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_read_manifest.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,13 +3,13 @@ use std::io; use std::path::{Path, PathBuf}; -use log::{info, trace}; - use crate::core::{EitherManifest, Package, PackageId, SourceId}; use crate::util::errors::CargoResult; use crate::util::important_paths::find_project_manifest_exact; use crate::util::toml::read_manifest; -use crate::util::{self, Config}; +use crate::util::Config; +use cargo_util::paths; +use log::{info, trace}; pub fn read_package( path: &Path, @@ -192,7 +192,7 @@ // TODO: filesystem/symlink implications? if !source_id.is_registry() { for p in nested.iter() { - let path = util::normalize_path(&path.join(p)); + let path = paths::normalize_path(&path.join(p)); let result = read_nested_packages(&path, all_packages, source_id, config, visited, errors); // Ignore broken manifests found on git repositories. diff -Nru cargo-0.52.0/src/cargo/ops/cargo_test.rs cargo-0.54.0/src/cargo/ops/cargo_test.rs --- cargo-0.52.0/src/cargo/ops/cargo_test.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_test.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,11 +1,11 @@ -use std::ffi::OsString; - use crate::core::compiler::{Compilation, CompileKind, Doctest, UnitOutput}; use crate::core::shell::Verbosity; -use crate::core::Workspace; +use crate::core::{TargetKind, Workspace}; use crate::ops; use crate::util::errors::CargoResult; -use crate::util::{CargoTestError, Config, ProcessError, Test}; +use crate::util::{add_path_args, CargoTestError, Config, Test}; +use cargo_util::ProcessError; +use std::ffi::OsString; pub struct TestOptions { pub compile_opts: ops::CompileOptions, @@ -30,7 +30,7 @@ return Ok(Some(CargoTestError::new(test, errors))); } - let (doctest, docerrors) = run_doc_tests(ws.config(), options, test_args, &compilation)?; + let (doctest, docerrors) = run_doc_tests(ws, options, test_args, &compilation)?; let test = if docerrors.is_empty() { test } else { doctest }; errors.extend(docerrors); if errors.is_empty() { @@ -85,7 +85,24 @@ } in compilation.tests.iter() { let test = unit.target.name().to_string(); - let exe_display = path.strip_prefix(cwd).unwrap_or(path).display(); + + let test_path = unit.target.src_path().path().unwrap(); + let exe_display = if let TargetKind::Test = unit.target.kind() { + format!( + "{} ({})", + test_path + .strip_prefix(unit.pkg.root()) + .unwrap_or(test_path) + .display(), + path.strip_prefix(cwd).unwrap_or(path).display() + ) + } else { + format!( + "unittests ({})", + path.strip_prefix(cwd).unwrap_or(path).display() + ) + }; + let mut cmd = compilation.target_process(path, unit.kind, &unit.pkg, *script_meta)?; cmd.args(test_args); if unit.target.harness() && config.shell().verbosity() == Verbosity::Quiet { @@ -136,13 +153,15 @@ } fn run_doc_tests( - config: &Config, + ws: &Workspace<'_>, options: &TestOptions, test_args: &[&str], compilation: &Compilation<'_>, ) -> CargoResult<(Test, Vec)> { + let config = ws.config(); let mut errors = Vec::new(); let doctest_xcompile = config.cli_unstable().doctest_xcompile; + let doctest_in_workspace = config.cli_unstable().doctest_in_workspace; for doctest_info in &compilation.to_doc_test { let Doctest { @@ -167,10 +186,18 @@ config.shell().status("Doc-tests", unit.target.name())?; let mut p = compilation.rustdoc_process(unit, *script_meta)?; - p.arg("--test") - .arg(unit.target.src_path().path().unwrap()) - .arg("--crate-name") - .arg(&unit.target.crate_name()); + p.arg("--crate-name").arg(&unit.target.crate_name()); + p.arg("--test"); + + if doctest_in_workspace { + add_path_args(ws, unit, &mut p); + // FIXME(swatinem): remove the `unstable-options` once rustdoc stabilizes the `test-run-directory` option + p.arg("-Z").arg("unstable-options"); + p.arg("--test-run-directory") + .arg(unit.pkg.root().to_path_buf()); + } else { + p.arg(unit.target.src_path().path().unwrap()); + } if doctest_xcompile { if let CompileKind::Target(target) = unit.kind { diff -Nru cargo-0.52.0/src/cargo/ops/cargo_uninstall.rs cargo-0.54.0/src/cargo/ops/cargo_uninstall.rs --- cargo-0.52.0/src/cargo/ops/cargo_uninstall.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/cargo_uninstall.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,15 +1,14 @@ -use anyhow::bail; -use std::collections::BTreeSet; -use std::env; - use crate::core::PackageId; use crate::core::{PackageIdSpec, SourceId}; use crate::ops::common_for_install_and_uninstall::*; use crate::sources::PathSource; use crate::util::errors::CargoResult; -use crate::util::paths; use crate::util::Config; use crate::util::Filesystem; +use anyhow::bail; +use cargo_util::paths; +use std::collections::BTreeSet; +use std::env; pub fn uninstall( root: Option<&str>, diff -Nru cargo-0.52.0/src/cargo/ops/common_for_install_and_uninstall.rs cargo-0.54.0/src/cargo/ops/common_for_install_and_uninstall.rs --- cargo-0.52.0/src/cargo/ops/common_for_install_and_uninstall.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/common_for_install_and_uninstall.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,15 +3,16 @@ use std::io::prelude::*; use std::io::SeekFrom; use std::path::{Path, PathBuf}; +use std::rc::Rc; -use anyhow::{bail, format_err}; +use anyhow::{bail, format_err, Context as _}; use serde::{Deserialize, Serialize}; use crate::core::compiler::Freshness; -use crate::core::{Dependency, Package, PackageId, Source, SourceId}; +use crate::core::{Dependency, FeatureValue, Package, PackageId, Source, SourceId}; use crate::ops::{self, CompileFilter, CompileOptions}; use crate::sources::PathSource; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::Config; use crate::util::{FileLock, Filesystem}; @@ -100,12 +101,11 @@ if contents.is_empty() { Ok(CrateListingV1::default()) } else { - Ok(toml::from_str(&contents) - .chain_err(|| format_err!("invalid TOML found for metadata"))?) + Ok(toml::from_str(&contents).with_context(|| "invalid TOML found for metadata")?) } })() - .chain_err(|| { - format_err!( + .with_context(|| { + format!( "failed to parse crate metadata at `{}`", v1_lock.path().to_string_lossy() ) @@ -118,13 +118,13 @@ CrateListingV2::default() } else { serde_json::from_str(&contents) - .chain_err(|| format_err!("invalid JSON found for metadata"))? + .with_context(|| "invalid JSON found for metadata")? }; v2.sync_v1(&v1); Ok(v2) })() - .chain_err(|| { - format_err!( + .with_context(|| { + format!( "failed to parse crate metadata at `{}`", v2_lock.path().to_string_lossy() ) @@ -277,15 +277,15 @@ /// Save tracking information to disk. pub fn save(&self) -> CargoResult<()> { - self.v1.save(&self.v1_lock).chain_err(|| { - format_err!( + self.v1.save(&self.v1_lock).with_context(|| { + format!( "failed to write crate metadata at `{}`", self.v1_lock.path().to_string_lossy() ) })?; - self.v2.save(&self.v2_lock).chain_err(|| { - format_err!( + self.v2.save(&self.v2_lock).with_context(|| { + format!( "failed to write crate metadata at `{}`", self.v2_lock.path().to_string_lossy() ) @@ -422,9 +422,9 @@ if let Some(info) = self.installs.get_mut(&pkg.package_id()) { info.bins.append(&mut bins.clone()); info.version_req = version_req; - info.features = feature_set(&opts.features); - info.all_features = opts.all_features; - info.no_default_features = opts.no_default_features; + info.features = feature_set(&opts.cli_features.features); + info.all_features = opts.cli_features.all_features; + info.no_default_features = !opts.cli_features.uses_default_features; info.profile = opts.build_config.requested_profile.to_string(); info.target = Some(target.to_string()); info.rustc = Some(rustc.to_string()); @@ -434,9 +434,9 @@ InstallInfo { version_req, bins: bins.clone(), - features: feature_set(&opts.features), - all_features: opts.all_features, - no_default_features: opts.no_default_features, + features: feature_set(&opts.cli_features.features), + all_features: opts.cli_features.all_features, + no_default_features: !opts.cli_features.uses_default_features, profile: opts.build_config.requested_profile.to_string(), target: Some(target.to_string()), rustc: Some(rustc.to_string()), @@ -489,9 +489,9 @@ /// /// This does not do Package/Source/Version checking. fn is_up_to_date(&self, opts: &CompileOptions, target: &str, exes: &BTreeSet) -> bool { - self.features == feature_set(&opts.features) - && self.all_features == opts.all_features - && self.no_default_features == opts.no_default_features + self.features == feature_set(&opts.cli_features.features) + && self.all_features == opts.cli_features.all_features + && self.no_default_features != opts.cli_features.uses_default_features && self.profile.as_str() == opts.build_config.requested_profile.as_str() && (self.target.is_none() || self.target.as_deref() == Some(target)) && &self.bins == exes @@ -641,9 +641,9 @@ } } -/// Helper to convert features Vec to a BTreeSet. -fn feature_set(features: &[String]) -> BTreeSet { - features.iter().cloned().collect() +/// Helper to convert features to a BTreeSet. +fn feature_set(features: &Rc>) -> BTreeSet { + features.iter().map(|s| s.to_string()).collect() } /// Helper to get the executable names from a filter. diff -Nru cargo-0.52.0/src/cargo/ops/fix.rs cargo-0.54.0/src/cargo/ops/fix.rs --- cargo-0.52.0/src/cargo/ops/fix.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/fix.rs 2021-04-27 14:35:53.000000000 +0000 @@ -45,27 +45,30 @@ use std::process::{self, Command, ExitStatus}; use std::str; -use anyhow::{Context, Error}; +use anyhow::{bail, Context, Error}; +use cargo_util::{paths, ProcessBuilder}; use log::{debug, trace, warn}; use rustfix::diagnostics::Diagnostic; use rustfix::{self, CodeFix}; -use crate::core::{Edition, Workspace}; +use crate::core::compiler::RustcTargetData; +use crate::core::resolver::features::{FeatureOpts, FeatureResolver}; +use crate::core::resolver::{HasDevUnits, ResolveBehavior}; +use crate::core::{Edition, MaybePackage, Workspace}; use crate::ops::{self, CompileOptions}; use crate::util::diagnostic_server::{Message, RustfixDiagnosticServer}; use crate::util::errors::CargoResult; -use crate::util::{self, paths, Config, ProcessBuilder}; +use crate::util::Config; use crate::util::{existing_vcs_repo, LockServer, LockServerClient}; +use crate::{drop_eprint, drop_eprintln}; const FIX_ENV: &str = "__CARGO_FIX_PLZ"; const BROKEN_CODE_ENV: &str = "__CARGO_FIX_BROKEN_CODE"; -const PREPARE_FOR_ENV: &str = "__CARGO_FIX_PREPARE_FOR"; const EDITION_ENV: &str = "__CARGO_FIX_EDITION"; const IDIOMS_ENV: &str = "__CARGO_FIX_IDIOMS"; -pub struct FixOptions<'a> { +pub struct FixOptions { pub edition: bool, - pub prepare_for: Option<&'a str>, pub idioms: bool, pub compile_opts: CompileOptions, pub allow_dirty: bool, @@ -74,12 +77,15 @@ pub broken_code: bool, } -pub fn fix(ws: &Workspace<'_>, opts: &mut FixOptions<'_>) -> CargoResult<()> { +pub fn fix(ws: &Workspace<'_>, opts: &mut FixOptions) -> CargoResult<()> { check_version_control(ws.config(), opts)?; + if opts.edition { + check_resolver_change(ws, opts)?; + } // Spin up our lock server, which our subprocesses will use to synchronize fixes. let lock_server = LockServer::new()?; - let mut wrapper = util::process(env::current_exe()?); + let mut wrapper = ProcessBuilder::new(env::current_exe()?); wrapper.env(FIX_ENV, lock_server.addr().to_string()); let _started = lock_server.start()?; @@ -91,8 +97,6 @@ if opts.edition { wrapper.env(EDITION_ENV, "1"); - } else if let Some(edition) = opts.prepare_for { - wrapper.env(PREPARE_FOR_ENV, edition); } if opts.idioms { wrapper.env(IDIOMS_ENV, "1"); @@ -125,12 +129,12 @@ Ok(()) } -fn check_version_control(config: &Config, opts: &FixOptions<'_>) -> CargoResult<()> { +fn check_version_control(config: &Config, opts: &FixOptions) -> CargoResult<()> { if opts.allow_no_vcs { return Ok(()); } if !existing_vcs_repo(config.cwd(), config.cwd()) { - anyhow::bail!( + bail!( "no VCS found for this package and `cargo fix` can potentially \ perform destructive changes; if you'd like to suppress this \ error pass `--allow-no-vcs`" @@ -185,7 +189,7 @@ files_list.push_str(" (staged)\n"); } - anyhow::bail!( + bail!( "the working directory of this package has uncommitted changes, and \ `cargo fix` can potentially perform destructive changes; if you'd \ like to suppress this error pass `--allow-dirty`, `--allow-staged`, \ @@ -197,7 +201,109 @@ ); } -pub fn fix_maybe_exec_rustc() -> CargoResult { +fn check_resolver_change(ws: &Workspace<'_>, opts: &FixOptions) -> CargoResult<()> { + let root = ws.root_maybe(); + match root { + MaybePackage::Package(root_pkg) => { + if root_pkg.manifest().resolve_behavior().is_some() { + // If explicitly specified by the user, no need to check. + return Ok(()); + } + // Only trigger if updating the root package from 2018. + let pkgs = opts.compile_opts.spec.get_packages(ws)?; + if !pkgs.iter().any(|&pkg| pkg == root_pkg) { + // The root is not being migrated. + return Ok(()); + } + if root_pkg.manifest().edition() != Edition::Edition2018 { + // V1 to V2 only happens on 2018 to 2021. + return Ok(()); + } + } + MaybePackage::Virtual(_vm) => { + // Virtual workspaces don't have a global edition to set (yet). + return Ok(()); + } + } + // 2018 without `resolver` set must be V1 + assert_eq!(ws.resolve_behavior(), ResolveBehavior::V1); + let specs = opts.compile_opts.spec.to_package_id_specs(ws)?; + let target_data = RustcTargetData::new(ws, &opts.compile_opts.build_config.requested_kinds)?; + // HasDevUnits::No because that may uncover more differences. + // This is not the same as what `cargo fix` is doing, since it is doing + // `--all-targets` which includes dev dependencies. + let ws_resolve = ops::resolve_ws_with_opts( + ws, + &target_data, + &opts.compile_opts.build_config.requested_kinds, + &opts.compile_opts.cli_features, + &specs, + HasDevUnits::No, + crate::core::resolver::features::ForceAllTargets::No, + )?; + + let feature_opts = FeatureOpts::new_behavior(ResolveBehavior::V2, HasDevUnits::No); + let v2_features = FeatureResolver::resolve( + ws, + &target_data, + &ws_resolve.targeted_resolve, + &ws_resolve.pkg_set, + &opts.compile_opts.cli_features, + &specs, + &opts.compile_opts.build_config.requested_kinds, + feature_opts, + )?; + + let differences = v2_features.compare_legacy(&ws_resolve.resolved_features); + if differences.features.is_empty() && differences.optional_deps.is_empty() { + // Nothing is different, nothing to report. + return Ok(()); + } + let config = ws.config(); + config.shell().note( + "Switching to Edition 2021 will enable the use of the version 2 feature resolver in Cargo.", + )?; + drop_eprintln!( + config, + "This may cause dependencies to resolve with a different set of features." + ); + drop_eprintln!( + config, + "More information about the resolver changes may be found \ + at https://doc.rust-lang.org/cargo/reference/features.html#feature-resolver-version-2" + ); + drop_eprintln!( + config, + "The following differences were detected with the current configuration:\n" + ); + let report = |changes: crate::core::resolver::features::DiffMap, what| { + for ((pkg_id, for_host), removed) in changes { + drop_eprint!(config, " {}", pkg_id); + if for_host { + drop_eprint!(config, " (as build dependency)"); + } + if !removed.is_empty() { + let joined: Vec<_> = removed.iter().map(|s| s.as_str()).collect(); + drop_eprint!(config, " removed {} `{}`", what, joined.join(",")); + } + drop_eprint!(config, "\n"); + } + }; + report(differences.features, "features"); + report(differences.optional_deps, "optional dependency"); + drop_eprint!(config, "\n"); + Ok(()) +} + +/// Entry point for `cargo` running as a proxy for `rustc`. +/// +/// This is called every time `cargo` is run to check if it is in proxy mode. +/// +/// Returns `false` if `fix` is not being run (not in proxy mode). Returns +/// `true` if in `fix` proxy mode, and the fix was complete without any +/// warnings or errors. If there are warnings or errors, this does not return, +/// and the process exits with the corresponding `rustc` exit code. +pub fn fix_maybe_exec_rustc(config: &Config) -> CargoResult { let lock_addr = match env::var(FIX_ENV) { Ok(s) => s, Err(_) => return Ok(false), @@ -206,17 +312,13 @@ let args = FixArgs::get()?; trace!("cargo-fix as rustc got file {:?}", args.file); - let rustc = args.rustc.as_ref().expect("fix wrapper rustc was not set"); let workspace_rustc = std::env::var("RUSTC_WORKSPACE_WRAPPER") .map(PathBuf::from) .ok(); - let rustc = util::process(rustc).wrapped(workspace_rustc.as_ref()); + let rustc = ProcessBuilder::new(&args.rustc).wrapped(workspace_rustc.as_ref()); - let mut fixes = FixedCrate::default(); - if let Some(path) = &args.file { - trace!("start rustfixing {:?}", path); - fixes = rustfix_crate(&lock_addr, &rustc, path, &args)?; - } + trace!("start rustfixing {:?}", args.file); + let fixes = rustfix_crate(&lock_addr, &rustc, &args.file, &args, config)?; // Ok now we have our final goal of testing out the changes that we applied. // If these changes went awry and actually started to cause the crate to @@ -234,7 +336,7 @@ if output.status.success() { for (path, file) in fixes.files.iter() { - Message::Fixing { + Message::Fixed { file: path.clone(), fixes: file.fixes_applied, } @@ -287,13 +389,18 @@ original_code: String, } +/// Attempts to apply fixes to a single crate. +/// +/// This runs `rustc` (possibly multiple times) to gather suggestions from the +/// compiler and applies them to the files on disk. fn rustfix_crate( lock_addr: &str, rustc: &ProcessBuilder, filename: &Path, args: &FixArgs, + config: &Config, ) -> Result { - args.verify_not_preparing_for_enabled_edition()?; + args.check_edition_and_send_status(config)?; // First up, we want to make sure that each crate is only checked by one // process at a time. If two invocations concurrently check a crate then @@ -481,7 +588,7 @@ // Attempt to read the source code for this file. If this fails then // that'd be pretty surprising, so log a message and otherwise keep // going. - let code = match util::paths::read(file.as_ref()) { + let code = match paths::read(file.as_ref()) { Ok(s) => s, Err(e) => { warn!("failed to read `{}`: {}", file, e); @@ -578,129 +685,146 @@ Ok(()) } -#[derive(Default)] +/// Various command-line options and settings used when `cargo` is running as +/// a proxy for `rustc` during the fix operation. struct FixArgs { - file: Option, - prepare_for_edition: PrepareFor, + /// This is the `.rs` file that is being fixed. + file: PathBuf, + /// If `--edition` is used to migrate to the next edition, this is the + /// edition we are migrating towards. + prepare_for_edition: Option, + /// `true` if `--edition-idioms` is enabled. idioms: bool, + /// The current edition. + /// + /// `None` if on 2015. enabled_edition: Option, + /// Other command-line arguments not reflected by other fields in + /// `FixArgs`. other: Vec, - rustc: Option, + /// Path to the `rustc` executable. + rustc: PathBuf, + /// Console output flags (`--error-format`, `--json`, etc.). + /// + /// The normal fix procedure always uses `--json`, so it overrides what + /// Cargo normally passes when applying fixes. When displaying warnings or + /// errors, it will use these flags. format_args: Vec, } -enum PrepareFor { - Next, - Edition(Edition), - None, -} - -impl Default for PrepareFor { - fn default() -> PrepareFor { - PrepareFor::None - } -} - impl FixArgs { fn get() -> Result { - let mut ret = FixArgs::default(); - - ret.rustc = env::args_os().nth(1).map(PathBuf::from); + let rustc = env::args_os() + .nth(1) + .map(PathBuf::from) + .ok_or_else(|| anyhow::anyhow!("expected rustc as first argument"))?; + let mut file = None; + let mut enabled_edition = None; + let mut other = Vec::new(); + let mut format_args = Vec::new(); for arg in env::args_os().skip(2) { let path = PathBuf::from(arg); if path.extension().and_then(|s| s.to_str()) == Some("rs") && path.exists() { - ret.file = Some(path); + file = Some(path); continue; } if let Some(s) = path.to_str() { if let Some(edition) = s.strip_prefix("--edition=") { - ret.enabled_edition = Some(edition.parse()?); + enabled_edition = Some(edition.parse()?); continue; } if s.starts_with("--error-format=") || s.starts_with("--json=") { // Cargo may add error-format in some cases, but `cargo // fix` wants to add its own. - ret.format_args.push(s.to_string()); + format_args.push(s.to_string()); continue; } } - ret.other.push(path.into()); - } - if let Ok(s) = env::var(PREPARE_FOR_ENV) { - ret.prepare_for_edition = PrepareFor::Edition(s.parse()?); - } else if env::var(EDITION_ENV).is_ok() { - ret.prepare_for_edition = PrepareFor::Next; + other.push(path.into()); } + let file = file.ok_or_else(|| anyhow::anyhow!("could not find .rs file in rustc args"))?; + let idioms = env::var(IDIOMS_ENV).is_ok(); - ret.idioms = env::var(IDIOMS_ENV).is_ok(); - Ok(ret) + let prepare_for_edition = env::var(EDITION_ENV).ok().map(|_| { + enabled_edition + .unwrap_or(Edition::Edition2015) + .saturating_next() + }); + + Ok(FixArgs { + file, + prepare_for_edition, + idioms, + enabled_edition, + other, + rustc, + format_args, + }) } fn apply(&self, cmd: &mut Command) { - if let Some(path) = &self.file { - cmd.arg(path); - } - + cmd.arg(&self.file); cmd.args(&self.other).arg("--cap-lints=warn"); if let Some(edition) = self.enabled_edition { cmd.arg("--edition").arg(edition.to_string()); - if self.idioms && edition >= Edition::Edition2018 { - cmd.arg("-Wrust-2018-idioms"); + if self.idioms && edition.supports_idiom_lint() { + cmd.arg(format!("-Wrust-{}-idioms", edition)); } } - if let Some(edition) = self.prepare_for_edition_resolve() { - cmd.arg("-W").arg(format!("rust-{}-compatibility", edition)); + if let Some(edition) = self.prepare_for_edition { + if edition.supports_compat_lint() { + cmd.arg("-W").arg(format!("rust-{}-compatibility", edition)); + } } } - /// Verifies that we're not both preparing for an enabled edition and enabling - /// the edition. - /// - /// This indicates that `cargo fix --prepare-for` is being executed out of - /// order with enabling the edition itself, meaning that we wouldn't - /// actually be able to fix anything! If it looks like this is happening - /// then yield an error to the user, indicating that this is happening. - fn verify_not_preparing_for_enabled_edition(&self) -> CargoResult<()> { - let edition = match self.prepare_for_edition_resolve() { - Some(s) => s, - None => return Ok(()), - }; - let enabled = match self.enabled_edition { - Some(s) => s, - None => return Ok(()), - }; - if edition != enabled { - return Ok(()); - } - let path = match &self.file { + /// Validates the edition, and sends a message indicating what is being + /// done. + fn check_edition_and_send_status(&self, config: &Config) -> CargoResult<()> { + let to_edition = match self.prepare_for_edition { Some(s) => s, - None => return Ok(()), + None => { + return Message::Fixing { + file: self.file.display().to_string(), + } + .post(); + } }; - - Message::EditionAlreadyEnabled { - file: path.display().to_string(), - edition: edition.to_string(), - } - .post()?; - - process::exit(1); - } - - fn prepare_for_edition_resolve(&self) -> Option { - match self.prepare_for_edition { - PrepareFor::Edition(s) => Some(s), - PrepareFor::Next => Some(self.next_edition()), - PrepareFor::None => None, - } - } - - fn next_edition(&self) -> Edition { - match self.enabled_edition { - None | Some(Edition::Edition2015) => Edition::Edition2018, - Some(Edition::Edition2018) => Edition::Edition2018, // TODO: Change to 2021 when rustc is ready for it. - Some(Edition::Edition2021) => Edition::Edition2021, + // Unfortunately determining which cargo targets are being built + // isn't easy, and each target can be a different edition. The + // cargo-as-rustc fix wrapper doesn't know anything about the + // workspace, so it can't check for the `cargo-features` unstable + // opt-in. As a compromise, this just restricts to the nightly + // toolchain. + // + // Unfortunately this results in a pretty poor error message when + // multiple jobs run in parallel (the error appears multiple + // times). Hopefully this doesn't happen often in practice. + if !to_edition.is_stable() && !config.nightly_features_allowed { + bail!( + "cannot migrate {} to edition {to_edition}\n\ + Edition {to_edition} is unstable and not allowed in this release, \ + consider trying the nightly release channel.", + self.file.display(), + to_edition = to_edition + ); + } + let from_edition = self.enabled_edition.unwrap_or(Edition::Edition2015); + if from_edition == to_edition { + Message::EditionAlreadyEnabled { + file: self.file.display().to_string(), + edition: to_edition, + } + .post() + } else { + Message::Migrating { + file: self.file.display().to_string(), + from_edition, + to_edition, + } + .post() } } } diff -Nru cargo-0.52.0/src/cargo/ops/lockfile.rs cargo-0.54.0/src/cargo/ops/lockfile.rs --- cargo-0.52.0/src/cargo/ops/lockfile.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/lockfile.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,10 +1,12 @@ use std::io::prelude::*; use crate::core::{resolver, Resolve, ResolveVersion, Workspace}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::toml as cargo_toml; use crate::util::Filesystem; +use anyhow::Context as _; + pub fn load_pkg_lockfile(ws: &Workspace<'_>) -> CargoResult> { if !ws.root().join("Cargo.lock").exists() { return Ok(None); @@ -15,14 +17,14 @@ let mut s = String::new(); f.read_to_string(&mut s) - .chain_err(|| format!("failed to read file: {}", f.path().display()))?; + .with_context(|| format!("failed to read file: {}", f.path().display()))?; let resolve = (|| -> CargoResult> { let resolve: toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?; let v: resolver::EncodableResolve = resolve.try_into()?; Ok(Some(v.into_resolve(&s, ws)?)) })() - .chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?; + .with_context(|| format!("failed to parse lock file at: {}", f.path().display()))?; Ok(resolve) } @@ -80,7 +82,7 @@ f.write_all(out.as_bytes())?; Ok(()) }) - .chain_err(|| format!("failed to write {}", ws.root().join("Cargo.lock").display()))?; + .with_context(|| format!("failed to write {}", ws.root().join("Cargo.lock").display()))?; Ok(()) } @@ -96,7 +98,7 @@ f.read_to_string(&mut s)?; Ok(s) }); - let out = serialize_resolve(resolve, orig.as_ref().ok().map(|s| &**s)); + let out = serialize_resolve(resolve, orig.as_deref().ok()); (orig.ok(), out, ws_root) } diff -Nru cargo-0.52.0/src/cargo/ops/mod.rs cargo-0.54.0/src/cargo/ops/mod.rs --- cargo-0.52.0/src/cargo/ops/mod.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,6 +1,6 @@ pub use self::cargo_clean::{clean, CleanOptions}; pub use self::cargo_compile::{ - compile, compile_with_exec, compile_ws, create_bcx, resolve_all_features, CompileOptions, + compile, compile_with_exec, compile_ws, create_bcx, print, resolve_all_features, CompileOptions, }; pub use self::cargo_compile::{CompileFilter, FilterRule, LibRule, Packages}; pub use self::cargo_doc::{doc, DocOptions}; @@ -31,6 +31,7 @@ mod cargo_clean; mod cargo_compile; +pub mod cargo_config; mod cargo_doc; mod cargo_fetch; mod cargo_generate_lockfile; diff -Nru cargo-0.52.0/src/cargo/ops/registry/auth.rs cargo-0.54.0/src/cargo/ops/registry/auth.rs --- cargo-0.52.0/src/cargo/ops/registry/auth.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/registry/auth.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,9 +1,9 @@ //! Registry authentication support. use crate::sources::CRATES_IO_REGISTRY; -use crate::util::{config, process_error, CargoResult, CargoResultExt, Config}; -use anyhow::bail; -use anyhow::format_err; +use crate::util::{config, CargoResult, Config}; +use anyhow::{bail, format_err, Context as _}; +use cargo_util::ProcessError; use std::io::{Read, Write}; use std::path::PathBuf; use std::process::{Command, Stdio}; @@ -134,7 +134,7 @@ } Action::Erase => {} } - let mut child = cmd.spawn().chain_err(|| { + let mut child = cmd.spawn().with_context(|| { let verb = match action { Action::Get => "fetch", Action::Store(_) => "store", @@ -157,7 +157,7 @@ .as_mut() .unwrap() .read_to_string(&mut buffer) - .chain_err(|| { + .with_context(|| { format!( "failed to read token from registry credential process `{}`", exe.display() @@ -176,7 +176,7 @@ token = Some(buffer); } Action::Store(token) => { - writeln!(child.stdin.as_ref().unwrap(), "{}", token).chain_err(|| { + writeln!(child.stdin.as_ref().unwrap(), "{}", token).with_context(|| { format!( "failed to send token to registry credential process `{}`", exe.display() @@ -185,7 +185,7 @@ } Action::Erase => {} } - let status = child.wait().chain_err(|| { + let status = child.wait().with_context(|| { format!( "registry credential process `{}` exit failure", exe.display() @@ -197,7 +197,7 @@ Action::Store(_) => "failed to store token to registry", Action::Erase => "failed to erase token from registry", }; - return Err(process_error( + return Err(ProcessError::new( &format!( "registry credential process `{}` {} `{}`", exe.display(), diff -Nru cargo-0.52.0/src/cargo/ops/registry.rs cargo-0.54.0/src/cargo/ops/registry.rs --- cargo-0.52.0/src/cargo/ops/registry.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/registry.rs 2021-04-27 14:35:53.000000000 +0000 @@ -7,7 +7,8 @@ use std::time::Duration; use std::{cmp, env}; -use anyhow::{bail, format_err}; +use anyhow::{bail, format_err, Context as _}; +use cargo_util::paths; use crates_io::{self, NewCrate, NewCrateDependency, Registry}; use curl::easy::{Easy, InfoType, SslOpt, SslVersion}; use log::{log, Level}; @@ -15,15 +16,16 @@ use crate::core::dependency::DepKind; use crate::core::manifest::ManifestMetadata; +use crate::core::resolver::CliFeatures; use crate::core::source::Source; use crate::core::{Package, SourceId, Workspace}; use crate::ops; use crate::sources::{RegistrySource, SourceConfigMap, CRATES_IO_REGISTRY}; use crate::util::config::{self, Config, SslVersionConfig, SslVersionConfigRange}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::important_paths::find_root_manifest_for_wd; +use crate::util::validate_package_name; use crate::util::IntoUrl; -use crate::util::{paths, validate_package_name}; use crate::{drop_print, drop_println, version}; mod auth; @@ -51,9 +53,7 @@ pub targets: Vec, pub dry_run: bool, pub registry: Option, - pub features: Vec, - pub all_features: bool, - pub no_default_features: bool, + pub cli_features: CliFeatures, } pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> { @@ -111,9 +111,7 @@ allow_dirty: opts.allow_dirty, targets: opts.targets.clone(), jobs: opts.jobs, - features: opts.features.clone(), - all_features: opts.all_features, - no_default_features: opts.no_default_features, + cli_features: opts.cli_features.clone(), }, )? .unwrap(); @@ -258,7 +256,7 @@ .as_ref() .map(|readme| { paths::read(&pkg.root().join(readme)) - .chain_err(|| format!("failed to read `readme` file for package `{}`", pkg)) + .with_context(|| format!("failed to read `readme` file for package `{}`", pkg)) }) .transpose()?; if let Some(ref file) = *license_file { @@ -286,65 +284,62 @@ None => BTreeMap::new(), }; - let publish = registry.publish( - &NewCrate { - name: pkg.name().to_string(), - vers: pkg.version().to_string(), - deps, - features: string_features, - authors: authors.clone(), - description: description.clone(), - homepage: homepage.clone(), - documentation: documentation.clone(), - keywords: keywords.clone(), - categories: categories.clone(), - readme: readme_content, - readme_file: readme.clone(), - repository: repository.clone(), - license: license.clone(), - license_file: license_file.clone(), - badges: badges.clone(), - links: links.clone(), - v: None, - }, - tarball, - ); - - match publish { - Ok(warnings) => { - if !warnings.invalid_categories.is_empty() { - let msg = format!( - "the following are not valid category slugs and were \ - ignored: {}. Please see https://crates.io/category_slugs \ - for the list of all category slugs. \ - ", - warnings.invalid_categories.join(", ") - ); - config.shell().warn(&msg)?; - } + let warnings = registry + .publish( + &NewCrate { + name: pkg.name().to_string(), + vers: pkg.version().to_string(), + deps, + features: string_features, + authors: authors.clone(), + description: description.clone(), + homepage: homepage.clone(), + documentation: documentation.clone(), + keywords: keywords.clone(), + categories: categories.clone(), + readme: readme_content, + readme_file: readme.clone(), + repository: repository.clone(), + license: license.clone(), + license_file: license_file.clone(), + badges: badges.clone(), + links: links.clone(), + v: None, + }, + tarball, + ) + .with_context(|| format!("failed to publish to registry at {}", registry.host()))?; - if !warnings.invalid_badges.is_empty() { - let msg = format!( - "the following are not valid badges and were ignored: {}. \ - Either the badge type specified is unknown or a required \ - attribute is missing. Please see \ - https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata \ - for valid badge types and their required attributes.", - warnings.invalid_badges.join(", ") - ); - config.shell().warn(&msg)?; - } + if !warnings.invalid_categories.is_empty() { + let msg = format!( + "the following are not valid category slugs and were \ + ignored: {}. Please see https://crates.io/category_slugs \ + for the list of all category slugs. \ + ", + warnings.invalid_categories.join(", ") + ); + config.shell().warn(&msg)?; + } - if !warnings.other.is_empty() { - for msg in warnings.other { - config.shell().warn(&msg)?; - } - } + if !warnings.invalid_badges.is_empty() { + let msg = format!( + "the following are not valid badges and were ignored: {}. \ + Either the badge type specified is unknown or a required \ + attribute is missing. Please see \ + https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata \ + for valid badge types and their required attributes.", + warnings.invalid_badges.join(", ") + ); + config.shell().warn(&msg)?; + } - Ok(()) + if !warnings.other.is_empty() { + for msg in warnings.other { + config.shell().warn(&msg)?; } - Err(e) => Err(e), } + + Ok(()) } /// Returns the index and token from the config file for the given registry. @@ -457,7 +452,7 @@ let cfg = src.config(); let mut updated_cfg = || { src.update() - .chain_err(|| format!("failed to update {}", sid))?; + .with_context(|| format!("failed to update {}", sid))?; src.config() }; @@ -731,9 +726,9 @@ input .lock() .read_line(&mut line) - .chain_err(|| "failed to read stdin") - .map_err(anyhow::Error::from)?; - // Automatically remove `cargo login` from an inputted token to allow direct pastes from `registry.host()`/me. + .with_context(|| "failed to read stdin")?; + // Automatically remove `cargo login` from an inputted token to + // allow direct pastes from `registry.host()`/me. line.replace("cargo login", "").trim().to_string() } }; @@ -820,9 +815,13 @@ if let Some(ref v) = opts.to_add { let v = v.iter().map(|s| &s[..]).collect::>(); - let msg = registry - .add_owners(&name, &v) - .map_err(|e| format_err!("failed to invite owners to crate {}: {}", name, e))?; + let msg = registry.add_owners(&name, &v).with_context(|| { + format!( + "failed to invite owners to crate `{}` on registry at {}", + name, + registry.host() + ) + })?; config.shell().status("Owner", msg)?; } @@ -832,15 +831,23 @@ config .shell() .status("Owner", format!("removing {:?} from crate {}", v, name))?; - registry - .remove_owners(&name, &v) - .chain_err(|| format!("failed to remove owners from crate {}", name))?; + registry.remove_owners(&name, &v).with_context(|| { + format!( + "failed to remove owners from crate `{}` on registry at {}", + name, + registry.host() + ) + })?; } if opts.list { - let owners = registry - .list_owners(&name) - .chain_err(|| format!("failed to list owners of crate {}", name))?; + let owners = registry.list_owners(&name).with_context(|| { + format!( + "failed to list owners of crate `{}` on registry at {}", + name, + registry.host() + ) + })?; for owner in owners.iter() { drop_print!(config, "{}", owner.login); match (owner.name.as_ref(), owner.email.as_ref()) { @@ -882,16 +889,19 @@ config .shell() .status("Unyank", format!("{}:{}", name, version))?; - registry - .unyank(&name, &version) - .chain_err(|| "failed to undo a yank")?; + registry.unyank(&name, &version).with_context(|| { + format!( + "failed to undo a yank from the registry at {}", + registry.host() + ) + })?; } else { config .shell() .status("Yank", format!("{}:{}", name, version))?; registry .yank(&name, &version) - .chain_err(|| "failed to yank")?; + .with_context(|| format!("failed to yank from the registry at {}", registry.host()))?; } Ok(()) @@ -937,9 +947,12 @@ } let (mut registry, _, source_id) = registry(config, None, index, reg, false, false)?; - let (crates, total_crates) = registry - .search(query, limit) - .chain_err(|| "failed to retrieve search results from the registry")?; + let (crates, total_crates) = registry.search(query, limit).with_context(|| { + format!( + "failed to retrieve search results from the registry at {}", + registry.host() + ) + })?; let names = crates .iter() diff -Nru cargo-0.52.0/src/cargo/ops/resolve.rs cargo-0.54.0/src/cargo/ops/resolve.rs --- cargo-0.52.0/src/cargo/ops/resolve.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/resolve.rs 2021-04-27 14:35:53.000000000 +0000 @@ -11,16 +11,21 @@ //! providing the most power and flexibility. use crate::core::compiler::{CompileKind, RustcTargetData}; -use crate::core::registry::PackageRegistry; -use crate::core::resolver::features::{FeatureResolver, ForceAllTargets, ResolvedFeatures}; -use crate::core::resolver::{self, HasDevUnits, Resolve, ResolveOpts}; +use crate::core::registry::{LockedPatchDependency, PackageRegistry}; +use crate::core::resolver::features::{ + CliFeatures, FeatureOpts, FeatureResolver, ForceAllTargets, RequestedFeatures, ResolvedFeatures, +}; +use crate::core::resolver::{self, HasDevUnits, Resolve, ResolveOpts, ResolveVersion}; use crate::core::summary::Summary; use crate::core::Feature; -use crate::core::{PackageId, PackageIdSpec, PackageSet, Source, SourceId, Workspace}; +use crate::core::{ + GitReference, PackageId, PackageIdSpec, PackageSet, Source, SourceId, Workspace, +}; use crate::ops; use crate::sources::PathSource; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::{profile, CanonicalUrl}; +use anyhow::Context as _; use log::{debug, trace}; use std::collections::HashSet; @@ -74,9 +79,9 @@ /// members. In this case, `opts.all_features` must be `true`. pub fn resolve_ws_with_opts<'cfg>( ws: &Workspace<'cfg>, - target_data: &RustcTargetData, + target_data: &RustcTargetData<'cfg>, requested_targets: &[CompileKind], - opts: &ResolveOpts, + cli_features: &CliFeatures, specs: &[PackageIdSpec], has_dev_units: HasDevUnits, force_all_targets: ForceAllTargets, @@ -118,7 +123,8 @@ let resolved_with_overrides = resolve_with_previous( &mut registry, ws, - opts, + cli_features, + has_dev_units, resolve.as_ref(), None, specs, @@ -128,7 +134,7 @@ let pkg_set = get_resolved_packages(&resolved_with_overrides, registry)?; let member_ids = ws - .members_with_features(specs, &opts.features)? + .members_with_features(specs, cli_features)? .into_iter() .map(|(p, _fts)| p.package_id()) .collect::>(); @@ -141,16 +147,16 @@ force_all_targets, )?; + let feature_opts = FeatureOpts::new(ws, has_dev_units, force_all_targets)?; let resolved_features = FeatureResolver::resolve( ws, target_data, &resolved_with_overrides, &pkg_set, - &opts.features, + cli_features, specs, requested_targets, - has_dev_units, - force_all_targets, + feature_opts, )?; Ok(WorkspaceResolve { @@ -169,7 +175,8 @@ let mut resolve = resolve_with_previous( registry, ws, - &ResolveOpts::everything(), + &CliFeatures::new_all(true), + HasDevUnits::Yes, prev.as_ref(), None, &[], @@ -200,7 +207,8 @@ pub fn resolve_with_previous<'cfg>( registry: &mut PackageRegistry<'cfg>, ws: &Workspace<'cfg>, - opts: &ResolveOpts, + cli_features: &CliFeatures, + has_dev_units: HasDevUnits, previous: Option<&Resolve>, to_avoid: Option<&HashSet>, specs: &[PackageIdSpec], @@ -238,7 +246,7 @@ // locked. let mut avoid_patch_ids = HashSet::new(); if register_patches { - for (url, patches) in ws.root_patch() { + for (url, patches) in ws.root_patch()?.iter() { let previous = match previous { Some(r) => r, None => { @@ -249,26 +257,91 @@ continue; } }; - let patches = patches - .iter() - .map(|dep| { - let unused = previous.unused_patches().iter().cloned(); - let candidates = previous.iter().chain(unused); - match candidates - .filter(pre_patch_keep) - .find(|&id| dep.matches_id(id)) - { - Some(id) => { - let mut locked_dep = dep.clone(); - locked_dep.lock_to(id); - (dep, Some((locked_dep, id))) + + // This is a list of pairs where the first element of the pair is + // the raw `Dependency` which matches what's listed in `Cargo.toml`. + // The second element is, if present, the "locked" version of + // the `Dependency` as well as the `PackageId` that it previously + // resolved to. This second element is calculated by looking at the + // previous resolve graph, which is primarily what's done here to + // build the `registrations` list. + let mut registrations = Vec::new(); + for dep in patches { + let candidates = || { + previous + .iter() + .chain(previous.unused_patches().iter().cloned()) + .filter(&pre_patch_keep) + }; + + let lock = match candidates().find(|id| dep.matches_id(*id)) { + // If we found an exactly matching candidate in our list of + // candidates, then that's the one to use. + Some(package_id) => { + let mut locked_dep = dep.clone(); + locked_dep.lock_to(package_id); + Some(LockedPatchDependency { + dependency: locked_dep, + package_id, + alt_package_id: None, + }) + } + None => { + // If the candidate does not have a matching source id + // then we may still have a lock candidate. If we're + // loading a v2-encoded resolve graph and `dep` is a + // git dep with `branch = 'master'`, then this should + // also match candidates without `branch = 'master'` + // (which is now treated separately in Cargo). + // + // In this scenario we try to convert candidates located + // in the resolve graph to explicitly having the + // `master` branch (if they otherwise point to + // `DefaultBranch`). If this works and our `dep` + // matches that then this is something we'll lock to. + match candidates().find(|&id| { + match master_branch_git_source(id, previous) { + Some(id) => dep.matches_id(id), + None => false, + } + }) { + Some(id_using_default) => { + let id_using_master = id_using_default.with_source_id( + dep.source_id().with_precise( + id_using_default + .source_id() + .precise() + .map(|s| s.to_string()), + ), + ); + + let mut locked_dep = dep.clone(); + locked_dep.lock_to(id_using_master); + Some(LockedPatchDependency { + dependency: locked_dep, + package_id: id_using_master, + // Note that this is where the magic + // happens, where the resolve graph + // probably has locks pointing to + // DefaultBranch sources, and by including + // this here those will get transparently + // rewritten to Branch("master") which we + // have a lock entry for. + alt_package_id: Some(id_using_default), + }) + } + + // No locked candidate was found + None => None, } - None => (dep, None), } - }) - .collect::>(); + }; + + registrations.push((dep, lock)); + } + let canonical = CanonicalUrl::new(url)?; - for (orig_patch, unlock_id) in registry.patch(url, &patches)? { + for (orig_patch, unlock_id) in registry.patch(url, ®istrations)? { // Avoid the locked patch ID. avoid_patch_ids.insert(unlock_id); // Also avoid the thing it is patching. @@ -283,12 +356,13 @@ let keep = |p: &PackageId| pre_patch_keep(p) && !avoid_patch_ids.contains(p); + let dev_deps = ws.require_optional_deps() || has_dev_units == HasDevUnits::Yes; // In the case where a previous instance of resolve is available, we // want to lock as many packages as possible to the previous version // without disturbing the graph structure. if let Some(r) = previous { trace!("previous: {:?}", r); - register_previous_locks(ws, registry, r, &keep); + register_previous_locks(ws, registry, r, &keep, dev_deps); } // Everything in the previous lock file we want to keep is prioritized // in dependency selection if it comes up, aka we want to have @@ -313,15 +387,15 @@ } let summaries: Vec<(Summary, ResolveOpts)> = ws - .members_with_features(specs, &opts.features)? + .members_with_features(specs, cli_features)? .into_iter() .map(|(member, features)| { let summary = registry.lock(member.summary().clone()); ( summary, ResolveOpts { - dev_deps: opts.dev_deps, - features, + dev_deps, + features: RequestedFeatures::CliFeatures(features), }, ) }) @@ -404,7 +478,7 @@ for (path, definition) in paths { let id = SourceId::for_path(&path)?; let mut source = PathSource::new_recursive(&path, id, ws.config()); - source.update().chain_err(|| { + source.update().with_context(|| { format!( "failed to update path override `{}` \ (defined in `{}`)", @@ -446,6 +520,7 @@ registry: &mut PackageRegistry<'_>, resolve: &Resolve, keep: &dyn Fn(&PackageId) -> bool, + dev_deps: bool, ) { let path_pkg = |id: SourceId| { if !id.is_path() { @@ -555,6 +630,11 @@ continue; } + // If dev-dependencies aren't being resolved, skip them. + if !dep.is_transitive() && !dev_deps { + continue; + } + // If this is a path dependency, then try to push it onto our // worklist. if let Some(pkg) = path_pkg(dep.source_id()) { @@ -597,7 +677,22 @@ .deps_not_replaced(node) .map(|p| p.0) .filter(keep) - .collect(); + .collect::>(); + + // In the v2 lockfile format and prior the `branch=master` dependency + // directive was serialized the same way as the no-branch-listed + // directive. Nowadays in Cargo, however, these two directives are + // considered distinct and are no longer represented the same way. To + // maintain compatibility with older lock files we register locked nodes + // for *both* the master branch and the default branch. + // + // Note that this is only applicable for loading older resolves now at + // this point. All new lock files are encoded as v3-or-later, so this is + // just compat for loading an old lock file successfully. + if let Some(node) = master_branch_git_source(node, resolve) { + registry.register_lock(node, deps.clone()); + } + registry.register_lock(node, deps); } @@ -612,3 +707,17 @@ } } } + +fn master_branch_git_source(id: PackageId, resolve: &Resolve) -> Option { + if resolve.version() <= ResolveVersion::V2 { + let source = id.source_id(); + if let Some(GitReference::DefaultBranch) = source.git_reference() { + let new_source = + SourceId::for_git(source.url(), GitReference::Branch("master".to_string())) + .unwrap() + .with_precise(source.precise().map(|s| s.to_string())); + return Some(id.with_source_id(new_source)); + } + } + None +} diff -Nru cargo-0.52.0/src/cargo/ops/tree/graph.rs cargo-0.54.0/src/cargo/ops/tree/graph.rs --- cargo-0.52.0/src/cargo/ops/tree/graph.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/tree/graph.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,7 +3,7 @@ use super::TreeOptions; use crate::core::compiler::{CompileKind, RustcTargetData}; use crate::core::dependency::DepKind; -use crate::core::resolver::features::{FeaturesFor, RequestedFeatures, ResolvedFeatures}; +use crate::core::resolver::features::{CliFeatures, FeaturesFor, ResolvedFeatures}; use crate::core::resolver::Resolve; use crate::core::{FeatureMap, FeatureValue, Package, PackageId, PackageIdSpec, Workspace}; use crate::util::interning::InternedString; @@ -248,16 +248,16 @@ resolve: &Resolve, resolved_features: &ResolvedFeatures, specs: &[PackageIdSpec], - requested_features: &RequestedFeatures, - target_data: &RustcTargetData, + cli_features: &CliFeatures, + target_data: &RustcTargetData<'_>, requested_kinds: &[CompileKind], package_map: HashMap, opts: &TreeOptions, ) -> CargoResult> { let mut graph = Graph::new(package_map); - let mut members_with_features = ws.members_with_features(specs, requested_features)?; + let mut members_with_features = ws.members_with_features(specs, cli_features)?; members_with_features.sort_unstable_by_key(|e| e.0.package_id()); - for (member, requested_features) in members_with_features { + for (member, cli_features) in members_with_features { let member_id = member.package_id(); let features_for = FeaturesFor::from_for_host(member.proc_macro()); for kind in requested_kinds { @@ -273,7 +273,7 @@ ); if opts.graph_features { let fmap = resolve.summary(member_id).features(); - add_cli_features(&mut graph, member_index, &requested_features, fmap); + add_cli_features(&mut graph, member_index, &cli_features, fmap); } } } @@ -294,7 +294,7 @@ resolved_features: &ResolvedFeatures, package_id: PackageId, features_for: FeaturesFor, - target_data: &RustcTargetData, + target_data: &RustcTargetData<'_>, requested_kind: CompileKind, opts: &TreeOptions, ) -> usize { @@ -392,7 +392,7 @@ EdgeKind::Dep(dep.kind()), ); } - for feature in dep.features() { + for feature in dep.features().iter() { add_feature( graph, *feature, @@ -459,48 +459,66 @@ fn add_cli_features( graph: &mut Graph<'_>, package_index: usize, - requested_features: &RequestedFeatures, + cli_features: &CliFeatures, feature_map: &FeatureMap, ) { // NOTE: Recursive enabling of features will be handled by // add_internal_features. - // Create a list of feature names requested on the command-line. - let mut to_add: Vec = Vec::new(); - if requested_features.all_features { - to_add.extend(feature_map.keys().copied()); - // Add optional deps. - for (dep_name, deps) in &graph.dep_name_map[&package_index] { - if deps.iter().any(|(_idx, is_optional)| *is_optional) { - to_add.push(*dep_name); - } - } + // Create a set of feature names requested on the command-line. + let mut to_add: HashSet = HashSet::new(); + if cli_features.all_features { + to_add.extend(feature_map.keys().map(|feat| FeatureValue::Feature(*feat))); } else { - if requested_features.uses_default_features { - to_add.push(InternedString::new("default")); + if cli_features.uses_default_features { + to_add.insert(FeatureValue::Feature(InternedString::new("default"))); } - to_add.extend(requested_features.features.iter().copied()); + to_add.extend(cli_features.features.iter().cloned()); }; // Add each feature as a node, and mark as "from command-line" in graph.cli_features. - for name in to_add { - if name.contains('/') { - let mut parts = name.splitn(2, '/'); - let dep_name = InternedString::new(parts.next().unwrap()); - let feat_name = InternedString::new(parts.next().unwrap()); - for (dep_index, is_optional) in graph.dep_name_map[&package_index][&dep_name].clone() { - if is_optional { - // Activate the optional dep on self. - let index = - add_feature(graph, dep_name, None, package_index, EdgeKind::Feature); + for fv in to_add { + match fv { + FeatureValue::Feature(feature) => { + let index = add_feature(graph, feature, None, package_index, EdgeKind::Feature); + graph.cli_features.insert(index); + } + // This is enforced by CliFeatures. + FeatureValue::Dep { .. } => panic!("unexpected cli dep feature {}", fv), + FeatureValue::DepFeature { + dep_name, + dep_feature, + dep_prefix: _, + weak, + } => { + let dep_connections = match graph.dep_name_map[&package_index].get(&dep_name) { + // Clone to deal with immutable borrow of `graph`. :( + Some(dep_connections) => dep_connections.clone(), + None => { + // --features bar?/feat where `bar` is not activated should be ignored. + // If this wasn't weak, then this is a bug. + if weak { + continue; + } + panic!( + "missing dep graph connection for CLI feature `{}` for member {:?}\n\ + Please file a bug report at https://github.com/rust-lang/cargo/issues", + fv, + graph.nodes.get(package_index) + ); + } + }; + for (dep_index, is_optional) in dep_connections { + if is_optional { + // Activate the optional dep on self. + let index = + add_feature(graph, dep_name, None, package_index, EdgeKind::Feature); + graph.cli_features.insert(index); + } + let index = add_feature(graph, dep_feature, None, dep_index, EdgeKind::Feature); graph.cli_features.insert(index); } - let index = add_feature(graph, feat_name, None, dep_index, EdgeKind::Feature); - graph.cli_features.insert(index); } - } else { - let index = add_feature(graph, name, None, package_index, EdgeKind::Feature); - graph.cli_features.insert(index); } } } @@ -570,6 +588,10 @@ package_index, ); } + // Dependencies are already shown in the graph as dep edges. I'm + // uncertain whether or not this might be confusing in some cases + // (like feature `"somefeat" = ["dep:somedep"]`), so maybe in the + // future consider explicitly showing this? FeatureValue::Dep { .. } => {} FeatureValue::DepFeature { dep_name, diff -Nru cargo-0.52.0/src/cargo/ops/tree/mod.rs cargo-0.54.0/src/cargo/ops/tree/mod.rs --- cargo-0.52.0/src/cargo/ops/tree/mod.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/tree/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,9 +3,7 @@ use self::format::Pattern; use crate::core::compiler::{CompileKind, RustcTargetData}; use crate::core::dependency::DepKind; -use crate::core::resolver::{ - features::RequestedFeatures, ForceAllTargets, HasDevUnits, ResolveOpts, -}; +use crate::core::resolver::{features::CliFeatures, ForceAllTargets, HasDevUnits}; use crate::core::{Package, PackageId, PackageIdSpec, Workspace}; use crate::ops::{self, Packages}; use crate::util::{CargoResult, Config}; @@ -21,9 +19,7 @@ pub use {graph::EdgeKind, graph::Node}; pub struct TreeOptions { - pub features: Vec, - pub no_default_features: bool, - pub all_features: bool, + pub cli_features: CliFeatures, /// The packages to display the tree for. pub packages: Packages, /// The platform to filter for. @@ -138,12 +134,6 @@ let requested_kinds = CompileKind::from_requested_targets(ws.config(), &requested_targets)?; let target_data = RustcTargetData::new(ws, &requested_kinds)?; let specs = opts.packages.to_package_id_specs(ws)?; - let requested_features = RequestedFeatures::from_command_line( - &opts.features, - opts.all_features, - !opts.no_default_features, - ); - let resolve_opts = ResolveOpts::new(/*dev_deps*/ true, requested_features); let has_dev = if opts .edge_kinds .contains(&EdgeKind::Dep(DepKind::Development)) @@ -161,7 +151,7 @@ ws, &target_data, &requested_kinds, - &resolve_opts, + &opts.cli_features, &specs, has_dev, force_all, @@ -178,7 +168,7 @@ &ws_resolve.targeted_resolve, &ws_resolve.resolved_features, &specs, - &resolve_opts.features, + &opts.cli_features, &target_data, &requested_kinds, package_map, diff -Nru cargo-0.52.0/src/cargo/ops/vendor.rs cargo-0.54.0/src/cargo/ops/vendor.rs --- cargo-0.52.0/src/cargo/ops/vendor.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/ops/vendor.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2,9 +2,9 @@ use crate::core::{GitReference, Workspace}; use crate::ops; use crate::sources::path::PathSource; -use crate::util::Sha256; -use crate::util::{paths, CargoResult, CargoResultExt, Config}; -use anyhow::bail; +use crate::util::{CargoResult, Config}; +use anyhow::{bail, Context as _}; +use cargo_util::{paths, Sha256}; use serde::Serialize; use std::collections::HashSet; use std::collections::{BTreeMap, BTreeSet, HashMap}; @@ -28,8 +28,7 @@ extra_workspaces.push(ws); } let workspaces = extra_workspaces.iter().chain(Some(ws)).collect::>(); - let vendor_config = - sync(config, &workspaces, opts).chain_err(|| "failed to sync".to_string())?; + let vendor_config = sync(config, &workspaces, opts).with_context(|| "failed to sync")?; if config.shell().verbosity() != Verbosity::Quiet { crate::drop_eprint!( @@ -74,10 +73,7 @@ opts: &VendorOptions<'_>, ) -> CargoResult { let canonical_destination = opts.destination.canonicalize(); - let canonical_destination = canonical_destination - .as_ref() - .map(|p| &**p) - .unwrap_or(opts.destination); + let canonical_destination = canonical_destination.as_deref().unwrap_or(opts.destination); paths::create_dir_all(&canonical_destination)?; let mut to_remove = HashSet::new(); @@ -107,11 +103,11 @@ // crate to work with. for ws in workspaces { let (packages, resolve) = - ops::resolve_ws(ws).chain_err(|| "failed to load pkg lockfile")?; + ops::resolve_ws(ws).with_context(|| "failed to load pkg lockfile")?; packages .get_many(resolve.iter()) - .chain_err(|| "failed to download packages")?; + .with_context(|| "failed to download packages")?; for pkg in resolve.iter() { // Don't delete actual source code! @@ -139,11 +135,11 @@ // tables about them. for ws in workspaces { let (packages, resolve) = - ops::resolve_ws(ws).chain_err(|| "failed to load pkg lockfile")?; + ops::resolve_ws(ws).with_context(|| "failed to load pkg lockfile")?; packages .get_many(resolve.iter()) - .chain_err(|| "failed to download packages")?; + .with_context(|| "failed to download packages")?; for pkg in resolve.iter() { // No need to vendor path crates since they're already in the @@ -155,7 +151,7 @@ pkg, packages .get_one(pkg) - .chain_err(|| "failed to fetch package")? + .with_context(|| "failed to fetch package")? .clone(), ); @@ -219,7 +215,7 @@ let paths = pathsource.list_files(pkg)?; let mut map = BTreeMap::new(); cp_sources(src, &paths, &dst, &mut map, &mut tmp_buf) - .chain_err(|| format!("failed to copy over vendored sources for: {}", id))?; + .with_context(|| format!("failed to copy over vendored sources for: {}", id))?; // Finally, emit the metadata about this package let json = serde_json::json!({ @@ -344,7 +340,7 @@ } fn copy_and_checksum(src_path: &Path, dst_path: &Path, buf: &mut [u8]) -> CargoResult { - let mut src = File::open(src_path).chain_err(|| format!("failed to open {:?}", src_path))?; + let mut src = File::open(src_path).with_context(|| format!("failed to open {:?}", src_path))?; let mut dst_opts = OpenOptions::new(); dst_opts.write(true).create(true).truncate(true); #[cfg(unix)] @@ -352,25 +348,25 @@ use std::os::unix::fs::{MetadataExt, OpenOptionsExt}; let src_metadata = src .metadata() - .chain_err(|| format!("failed to stat {:?}", src_path))?; + .with_context(|| format!("failed to stat {:?}", src_path))?; dst_opts.mode(src_metadata.mode()); } let mut dst = dst_opts .open(dst_path) - .chain_err(|| format!("failed to create {:?}", dst_path))?; + .with_context(|| format!("failed to create {:?}", dst_path))?; // Not going to bother setting mode on pre-existing files, since there // shouldn't be any under normal conditions. let mut cksum = Sha256::new(); loop { let n = src .read(buf) - .chain_err(|| format!("failed to read from {:?}", src_path))?; + .with_context(|| format!("failed to read from {:?}", src_path))?; if n == 0 { break Ok(cksum.finish_hex()); } let data = &buf[..n]; cksum.update(data); dst.write_all(data) - .chain_err(|| format!("failed to write to {:?}", dst_path))?; + .with_context(|| format!("failed to write to {:?}", dst_path))?; } } diff -Nru cargo-0.52.0/src/cargo/sources/config.rs cargo-0.54.0/src/cargo/sources/config.rs --- cargo-0.52.0/src/cargo/sources/config.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/config.rs 2021-04-27 14:35:53.000000000 +0000 @@ -7,9 +7,9 @@ use crate::core::{GitReference, PackageId, Source, SourceId}; use crate::sources::{ReplacedSource, CRATES_IO_REGISTRY}; use crate::util::config::{self, ConfigRelativePath, OptValue}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::{Config, IntoUrl}; -use anyhow::bail; +use anyhow::{bail, Context as _}; use log::debug; use std::collections::{HashMap, HashSet}; use url::Url; @@ -280,7 +280,7 @@ return Ok(()); fn url(val: &config::Value, key: &str) -> CargoResult { - let url = val.val.into_url().chain_err(|| { + let url = val.val.into_url().with_context(|| { format!( "configuration key `{}` specified an invalid \ URL (in {})", diff -Nru cargo-0.52.0/src/cargo/sources/directory.rs cargo-0.54.0/src/cargo/sources/directory.rs --- cargo-0.52.0/src/cargo/sources/directory.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/directory.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2,14 +2,15 @@ use std::fmt::{self, Debug, Formatter}; use std::path::{Path, PathBuf}; -use serde::Deserialize; - use crate::core::source::MaybePackage; use crate::core::{Dependency, Package, PackageId, Source, SourceId, Summary}; use crate::sources::PathSource; -use crate::util::errors::{CargoResult, CargoResultExt}; -use crate::util::paths; -use crate::util::{Config, Sha256}; +use crate::util::errors::CargoResult; +use crate::util::Config; + +use anyhow::Context as _; +use cargo_util::{paths, Sha256}; +use serde::Deserialize; pub struct DirectorySource<'cfg> { source_id: SourceId, @@ -73,7 +74,7 @@ fn update(&mut self) -> CargoResult<()> { self.packages.clear(); - let entries = self.root.read_dir().chain_err(|| { + let entries = self.root.read_dir().with_context(|| { format!( "failed to read root of directory source: {}", self.root.display() @@ -117,7 +118,7 @@ let mut pkg = src.root_package()?; let cksum_file = path.join(".cargo-checksum.json"); - let cksum = paths::read(&path.join(cksum_file)).chain_err(|| { + let cksum = paths::read(&path.join(cksum_file)).with_context(|| { format!( "failed to load checksum `.cargo-checksum.json` \ of {} v{}", @@ -125,7 +126,7 @@ pkg.package_id().version() ) })?; - let cksum: Checksum = serde_json::from_str(&cksum).chain_err(|| { + let cksum: Checksum = serde_json::from_str(&cksum).with_context(|| { format!( "failed to decode `.cargo-checksum.json` of \ {} v{}", @@ -172,7 +173,7 @@ let file = pkg.root().join(file); let actual = Sha256::new() .update_path(&file) - .chain_err(|| format!("failed to calculate checksum of: {}", file.display()))? + .with_context(|| format!("failed to calculate checksum of: {}", file.display()))? .finish_hex(); if &*actual != cksum { anyhow::bail!( diff -Nru cargo-0.52.0/src/cargo/sources/git/source.rs cargo-0.54.0/src/cargo/sources/git/source.rs --- cargo-0.52.0/src/cargo/sources/git/source.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/git/source.rs 2021-04-27 14:35:53.000000000 +0000 @@ -126,12 +126,10 @@ // database, then try to resolve our reference with the preexisting // repository. (None, Some(db)) if self.config.offline() => { - let rev = db - .resolve(&self.manifest_reference, None) - .with_context(|| { - "failed to lookup reference in preexisting repository, and \ + let rev = db.resolve(&self.manifest_reference).with_context(|| { + "failed to lookup reference in preexisting repository, and \ can't check for updates in offline mode (--offline)" - })?; + })?; (db, rev) } diff -Nru cargo-0.52.0/src/cargo/sources/git/utils.rs cargo-0.54.0/src/cargo/sources/git/utils.rs --- cargo-0.52.0/src/cargo/sources/git/utils.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/git/utils.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,117 +1,11 @@ //! Utilities for handling git repositories, mainly around //! authentication/cloning. -//! -//! # `DefaultBranch` vs `Branch("master")` -//! -//! Long ago in a repository not so far away, an author (*cough* me *cough*) -//! didn't understand how branches work in Git. This led the author to -//! interpret these two dependency declarations the exact same way with the -//! former literally internally desugaring to the latter: -//! -//! ```toml -//! [dependencies] -//! foo = { git = "https://example.org/foo" } -//! foo = { git = "https://example.org/foo", branch = "master" } -//! ``` -//! -//! It turns out there's this things called `HEAD` in git remotes which points -//! to the "main branch" of a repository, and the main branch is not always -//! literally called master. What Cargo would like to do is to differentiate -//! these two dependency directives, with the first meaning "depend on `HEAD`". -//! -//! Unfortunately implementing this is a breaking change. This was first -//! attempted in #8364 but resulted in #8468 which has two independent bugs -//! listed on that issue. Despite this breakage we would still like to roll out -//! this change in Cargo, but we're now going to take it very slow and try to -//! break as few people as possible along the way. These comments are intended -//! to log the current progress and what wonkiness you might see within Cargo -//! when handling `DefaultBranch` vs `Branch("master")` -//! -//! ### Repositories with `master` and a default branch -//! -//! This is one of the most obvious sources of breakage. If our `foo` example -//! in above had two branches, one called `master` and another which was -//! actually the main branch, then Cargo's change will always be a breaking -//! change. This is because what's downloaded is an entirely different branch -//! if we change the meaning of the dependency directive. -//! -//! It's expected this is quite rare, but to handle this case nonetheless when -//! Cargo fetches from a git remote and the dependency specification is -//! `DefaultBranch` then it will issue a warning if the `HEAD` reference -//! doesn't match `master`. It's expected in this situation that authors will -//! fix builds locally by specifying `branch = 'master'`. -//! -//! ### Differences in `cargo vendor` configuration -//! -//! When executing `cargo vendor` it will print out configuration which can -//! then be used to configure Cargo to use the `vendor` directory. Historically -//! this configuration looked like: -//! -//! ```toml -//! [source."https://example.org/foo"] -//! git = "https://example.org/foo" -//! branch = "master" -//! replace-with = "vendored-sources" -//! ``` -//! -//! We would like to, however, transition this to not include the `branch = -//! "master"` unless the dependency directive actually mentions a branch. -//! Conveniently older Cargo implementations all interpret a missing `branch` -//! as `branch = "master"` so it's a backwards-compatible change to remove the -//! `branch = "master"` directive. As a result, `cargo vendor` will no longer -//! emit a `branch` if the git reference is `DefaultBranch` -//! -//! ### Differences in lock file formats -//! -//! Another issue pointed out in #8364 was that `Cargo.lock` files were no -//! longer compatible on stable and nightly with each other. The underlying -//! issue is that Cargo was serializing `branch = "master"` *differently* on -//! nightly than it was on stable. Historical implementations of Cargo would -//! encode `DefaultBranch` and `Branch("master")` the same way in `Cargo.lock`, -//! so when reading a lock file we have no way of differentiating between the -//! two. -//! -//! To handle this difference in encoding of `Cargo.lock` we'll be employing -//! the standard scheme to change `Cargo.lock`: -//! -//! * Add support in Cargo for a future format, don't turn it on. -//! * Wait a long time -//! * Turn on the future format -//! -//! Here the "future format" is `branch=master` shows up if you have a `branch` -//! in `Cargo.toml`, and otherwise nothing shows up in URLs. Due to the effect -//! on crate graph resolution, however, this flows into the next point.. -//! -//! ### Unification in the Cargo dependency graph -//! -//! Today dependencies with `branch = "master"` will unify with dependencies -//! that say nothing. (that's because the latter simply desugars). This means -//! the two `foo` directives above will resolve to the same dependency. -//! -//! The best idea I've got to fix this is to basically get everyone (if anyone) -//! to stop doing this today. The crate graph resolver will start to warn if it -//! detects that multiple `Cargo.toml` directives are detected and mixed. The -//! thinking is that when we turn on the new lock file format it'll also be -//! hard breaking change for any project which still has dependencies to -//! both the `master` branch and not. -//! -//! ### What we're doing today -//! -//! The general goal of Cargo today is to internally distinguish -//! `DefaultBranch` and `Branch("master")`, but for the time being they should -//! be functionally equivalent in terms of builds. The hope is that we'll let -//! all these warnings and such bake for a good long time, and eventually we'll -//! flip some switches if your build has no warnings it'll work before and -//! after. -//! -//! That's the dream at least, we'll see how this plays out. use crate::core::GitReference; -use crate::util::errors::{CargoResult, CargoResultExt}; -use crate::util::paths; -use crate::util::process_builder::process; +use crate::util::errors::CargoResult; use crate::util::{network, Config, IntoUrl, Progress}; -use anyhow::{anyhow, Context}; +use anyhow::{anyhow, Context as _}; +use cargo_util::{paths, ProcessBuilder}; use curl::easy::List; use git2::{self, ErrorClass, ObjectType}; use log::{debug, info}; @@ -182,7 +76,7 @@ } pub fn rev_for(&self, path: &Path, reference: &GitReference) -> CargoResult { - reference.resolve(&self.db_at(path)?.repo, None) + reference.resolve(&self.db_at(path)?.repo) } pub fn checkout( @@ -207,7 +101,7 @@ } } None => { - if let Ok(rev) = reference.resolve(&db.repo, Some((&self.url, cargo_config))) { + if let Ok(rev) = reference.resolve(&db.repo) { return Ok((db, rev)); } } @@ -226,7 +120,7 @@ .context(format!("failed to clone into: {}", into.display()))?; let rev = match locked_rev { Some(rev) => rev, - None => reference.resolve(&repo, Some((&self.url, cargo_config)))?, + None => reference.resolve(&repo)?, }; Ok(( @@ -295,21 +189,13 @@ self.repo.revparse_single(&oid.to_string()).is_ok() } - pub fn resolve( - &self, - r: &GitReference, - remote_and_config: Option<(&Url, &Config)>, - ) -> CargoResult { - r.resolve(&self.repo, remote_and_config) + pub fn resolve(&self, r: &GitReference) -> CargoResult { + r.resolve(&self.repo) } } impl GitReference { - pub fn resolve( - &self, - repo: &git2::Repository, - remote_and_config: Option<(&Url, &Config)>, - ) -> CargoResult { + pub fn resolve(&self, repo: &git2::Repository) -> CargoResult { let id = match self { // Note that we resolve the named tag here in sync with where it's // fetched into via `fetch` below. @@ -320,7 +206,7 @@ let obj = obj.peel(ObjectType::Commit)?; Ok(obj.id()) })() - .chain_err(|| format!("failed to find tag `{}`", s))?, + .with_context(|| format!("failed to find tag `{}`", s))?, // Resolve the remote name since that's all we're configuring in // `fetch` below. @@ -328,44 +214,17 @@ let name = format!("origin/{}", s); let b = repo .find_branch(&name, git2::BranchType::Remote) - .chain_err(|| format!("failed to find branch `{}`", s))?; + .with_context(|| format!("failed to find branch `{}`", s))?; b.get() .target() .ok_or_else(|| anyhow::format_err!("branch `{}` did not have a target", s))? } - // See the module docs for why we're using `master` here. + // We'll be using the HEAD commit GitReference::DefaultBranch => { - let master = repo - .find_branch("origin/master", git2::BranchType::Remote) - .chain_err(|| "failed to find branch `master`")?; - let master = master - .get() - .target() - .ok_or_else(|| anyhow::format_err!("branch `master` did not have a target"))?; - - if let Some((remote, config)) = remote_and_config { - let head_id = repo.refname_to_id("refs/remotes/origin/HEAD")?; - let head = repo.find_object(head_id, None)?; - let head = head.peel(ObjectType::Commit)?.id(); - - if head != master { - config.shell().warn(&format!( - "\ - fetching `master` branch from `{}` but the `HEAD` \ - reference for this repository is not the \ - `master` branch. This behavior will change \ - in Cargo in the future and your build may \ - break, so it's recommended to place \ - `branch = \"master\"` in Cargo.toml when \ - depending on this git repository to ensure \ - that your build will continue to work.\ - ", - remote, - ))?; - } - } - master + let head_id = repo.refname_to_id("refs/remotes/origin/HEAD")?; + let head = repo.find_object(head_id, None)?; + head.peel(ObjectType::Commit)?.id() } GitReference::Rev(s) => { @@ -490,7 +349,7 @@ info!("update submodules for: {:?}", repo.workdir().unwrap()); for mut child in repo.submodules()? { - update_submodule(repo, &mut child, cargo_config).chain_err(|| { + update_submodule(repo, &mut child, cargo_config).with_context(|| { format!( "failed to update submodule `{}`", child.name().unwrap_or("") @@ -543,7 +402,7 @@ cargo_config .shell() .status("Updating", format!("git submodule `{}`", url))?; - fetch(&mut repo, url, &reference, cargo_config).chain_err(|| { + fetch(&mut repo, url, &reference, cargo_config).with_context(|| { format!( "failed to fetch submodule `{}` from {}", child.name().unwrap_or(""), @@ -899,8 +758,6 @@ } GitReference::DefaultBranch => { - // See the module docs for why we're fetching `master` here. - refspecs.push(String::from("refs/heads/master:refs/remotes/origin/master")); refspecs.push(String::from("HEAD:refs/remotes/origin/HEAD")); } @@ -977,7 +834,7 @@ tags: bool, config: &Config, ) -> CargoResult<()> { - let mut cmd = process("git"); + let mut cmd = ProcessBuilder::new("git"); cmd.arg("fetch"); if tags { cmd.arg("--tags"); @@ -1166,10 +1023,7 @@ handle.useragent("cargo")?; let mut headers = List::new(); headers.append("Accept: application/vnd.github.3.sha")?; - headers.append(&format!( - "If-None-Match: \"{}\"", - reference.resolve(repo, None)? - ))?; + headers.append(&format!("If-None-Match: \"{}\"", reference.resolve(repo)?))?; handle.http_headers(headers)?; handle.perform()?; Ok(handle.response_code()? == 304) diff -Nru cargo-0.52.0/src/cargo/sources/path.rs cargo-0.54.0/src/cargo/sources/path.rs --- cargo-0.52.0/src/cargo/sources/path.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/path.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2,16 +2,17 @@ use std::fs; use std::path::{Path, PathBuf}; +use crate::core::source::MaybePackage; +use crate::core::{Dependency, Package, PackageId, Source, SourceId, Summary}; +use crate::ops; +use crate::util::{internal, CargoResult, Config}; +use anyhow::Context as _; +use cargo_util::paths; use filetime::FileTime; use ignore::gitignore::GitignoreBuilder; use ignore::Match; use log::{trace, warn}; -use crate::core::source::MaybePackage; -use crate::core::{Dependency, Package, PackageId, Source, SourceId, Summary}; -use crate::ops; -use crate::util::{internal, paths, CargoResult, CargoResultExt, Config}; - pub struct PathSource<'cfg> { source_id: SourceId, path: PathBuf, @@ -96,7 +97,7 @@ /// are relevant for building this package, but it also contains logic to /// use other methods like .gitignore to filter the list of files. pub fn list_files(&self, pkg: &Package) -> CargoResult> { - self._list_files(pkg).chain_err(|| { + self._list_files(pkg).with_context(|| { format!( "failed to determine list of files in {}", pkg.root().display() @@ -190,7 +191,7 @@ }; let index = repo .index() - .chain_err(|| format!("failed to open git index at {}", repo.path().display()))?; + .with_context(|| format!("failed to open git index at {}", repo.path().display()))?; let repo_root = repo.workdir().ok_or_else(|| { anyhow::format_err!( "did not expect repo at {} to be bare", @@ -411,7 +412,7 @@ // TODO: drop `collect` and sort after transition period and dropping warning tests. // See rust-lang/cargo#4268 and rust-lang/cargo#4270. let mut entries: Vec = fs::read_dir(path) - .chain_err(|| format!("cannot read {:?}", path))? + .with_context(|| format!("cannot read {:?}", path))? .map(|e| e.unwrap().path()) .collect(); entries.sort_unstable_by(|a, b| a.as_os_str().cmp(b.as_os_str())); @@ -436,7 +437,7 @@ let mut max = FileTime::zero(); let mut max_path = PathBuf::new(); - for file in self.list_files(pkg).chain_err(|| { + for file in self.list_files(pkg).with_context(|| { format!( "failed to determine the most recently modified file in {}", pkg.root().display() diff -Nru cargo-0.52.0/src/cargo/sources/registry/index.rs cargo-0.54.0/src/cargo/sources/registry/index.rs --- cargo-0.52.0/src/cargo/sources/registry/index.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/registry/index.rs 2021-04-27 14:35:53.000000000 +0000 @@ -68,14 +68,15 @@ use crate::core::dependency::Dependency; use crate::core::{PackageId, SourceId, Summary}; -use crate::sources::registry::{RegistryData, RegistryPackage}; +use crate::sources::registry::{RegistryData, RegistryPackage, INDEX_V_MAX}; use crate::util::interning::InternedString; -use crate::util::paths; use crate::util::{internal, CargoResult, Config, Filesystem, ToSemver}; use anyhow::bail; +use cargo_util::paths; use log::{debug, info}; use semver::{Version, VersionReq}; use std::collections::{HashMap, HashSet}; +use std::convert::TryInto; use std::fs; use std::path::Path; use std::str; @@ -308,7 +309,11 @@ // minimize the amount of work being done here and parse as little as // necessary. let raw_data = &summaries.raw_data; - let max_version = 1; + let max_version = if namespaced_features || weak_dep_features { + INDEX_V_MAX + } else { + 1 + }; Ok(summaries .versions .iter_mut() @@ -567,6 +572,14 @@ let summary = match IndexSummary::parse(config, line, source_id) { Ok(summary) => summary, Err(e) => { + // This should only happen when there is an index + // entry from a future version of cargo that this + // version doesn't understand. Hopefully, those future + // versions of cargo correctly set INDEX_V_MAX and + // CURRENT_CACHE_VERSION, otherwise this will skip + // entries in the cache preventing those newer + // versions from reading them (that is, until the + // cache is rebuilt). log::info!("failed to parse {:?} registry package: {}", relative, e); continue; } @@ -654,9 +667,9 @@ // Implementation of serializing/deserializing the cache of summaries on disk. // Currently the format looks like: // -// +--------------+-------------+---+ -// | version byte | git sha rev | 0 | -// +--------------+-------------+---+ +// +--------------------+----------------------+-------------+---+ +// | cache version byte | index format version | git sha rev | 0 | +// +--------------------+----------------------+-------------+---+ // // followed by... // @@ -673,8 +686,14 @@ // versions of Cargo share the same cache they don't get too confused. The git // sha lets us know when the file needs to be regenerated (it needs regeneration // whenever the index itself updates). +// +// Cache versions: +// * `1`: The original version. +// * `2`: Added the "index format version" field so that if the index format +// changes, different versions of cargo won't get confused reading each +// other's caches. -const CURRENT_CACHE_VERSION: u8 = 1; +const CURRENT_CACHE_VERSION: u8 = 2; impl<'a> SummariesCache<'a> { fn parse(data: &'a [u8], last_index_update: &str) -> CargoResult> { @@ -685,6 +704,19 @@ if *first_byte != CURRENT_CACHE_VERSION { bail!("looks like a different Cargo's cache, bailing out"); } + let index_v_bytes = rest + .get(..4) + .ok_or_else(|| anyhow::anyhow!("cache expected 4 bytes for index version"))?; + let index_v = u32::from_le_bytes(index_v_bytes.try_into().unwrap()); + if index_v != INDEX_V_MAX { + bail!( + "index format version {} doesn't match the version I know ({})", + index_v, + INDEX_V_MAX + ); + } + let rest = &rest[4..]; + let mut iter = split(rest, 0); if let Some(update) = iter.next() { if update != last_index_update.as_bytes() { @@ -716,6 +748,7 @@ .sum(); let mut contents = Vec::with_capacity(size); contents.push(CURRENT_CACHE_VERSION); + contents.extend(&u32::to_le_bytes(INDEX_V_MAX)); contents.extend_from_slice(index_version.as_bytes()); contents.push(0); for (version, data) in self.versions.iter() { @@ -765,12 +798,19 @@ /// /// The `line` provided is expected to be valid JSON. fn parse(config: &Config, line: &[u8], source_id: SourceId) -> CargoResult { + // ****CAUTION**** Please be extremely careful with returning errors + // from this function. Entries that error are not included in the + // index cache, and can cause cargo to get confused when switching + // between different versions that understand the index differently. + // Make sure to consider the INDEX_V_MAX and CURRENT_CACHE_VERSION + // values carefully when making changes here. let RegistryPackage { name, vers, cksum, deps, - features, + mut features, + features2, yanked, links, v, @@ -782,6 +822,11 @@ .into_iter() .map(|dep| dep.into_dep(source_id)) .collect::>>()?; + if let Some(features2) = features2 { + for (name, values) in features2 { + features.entry(name).or_default().extend(values); + } + } let mut summary = Summary::new(config, pkgid, deps, &features, links)?; summary.set_checksum(cksum); Ok(IndexSummary { @@ -792,7 +837,7 @@ } } -fn split<'a>(haystack: &'a [u8], needle: u8) -> impl Iterator + 'a { +fn split(haystack: &[u8], needle: u8) -> impl Iterator { struct Split<'a> { haystack: &'a [u8], needle: u8, diff -Nru cargo-0.52.0/src/cargo/sources/registry/local.rs cargo-0.54.0/src/cargo/sources/registry/local.rs --- cargo-0.52.0/src/cargo/sources/registry/local.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/registry/local.rs 2021-04-27 14:35:53.000000000 +0000 @@ -2,8 +2,8 @@ use crate::sources::registry::{MaybeLock, RegistryConfig, RegistryData}; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; -use crate::util::paths; -use crate::util::{Config, Filesystem, Sha256}; +use crate::util::{Config, Filesystem}; +use cargo_util::{paths, Sha256}; use std::fs::File; use std::io::prelude::*; use std::io::SeekFrom; diff -Nru cargo-0.52.0/src/cargo/sources/registry/mod.rs cargo-0.54.0/src/cargo/sources/registry/mod.rs --- cargo-0.52.0/src/cargo/sources/registry/mod.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/registry/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -165,6 +165,7 @@ use std::io::Write; use std::path::{Path, PathBuf}; +use anyhow::Context as _; use flate2::read::GzDecoder; use log::debug; use semver::{Version, VersionReq}; @@ -175,7 +176,6 @@ use crate::core::source::MaybePackage; use crate::core::{Package, PackageId, Source, SourceId, Summary}; use crate::sources::PathSource; -use crate::util::errors::CargoResultExt; use crate::util::hex; use crate::util::interning::InternedString; use crate::util::into_url::IntoUrl; @@ -250,6 +250,10 @@ pub api: Option, } +/// The maximum version of the `v` field in the index this version of cargo +/// understands. +pub(crate) const INDEX_V_MAX: u32 = 2; + /// A single line in the index representing a single version of a package. #[derive(Deserialize)] pub struct RegistryPackage<'a> { @@ -258,6 +262,13 @@ #[serde(borrow)] deps: Vec>, features: BTreeMap>, + /// This field contains features with new, extended syntax. Specifically, + /// namespaced features (`dep:`) and weak dependencies (`pkg?/feat`). + /// + /// This is separated from `features` because versions older than 1.19 + /// will fail to load due to not being able to parse the new syntax, even + /// with a `Cargo.lock` file. + features2: Option>>, cksum: String, /// If `true`, Cargo will skip this version when resolving. /// @@ -274,10 +285,12 @@ /// If this is None, it defaults to version 1. Entries with unknown /// versions are ignored. /// + /// Version `2` format adds the `features2` field. + /// /// This provides a method to safely introduce changes to index entries /// and allow older versions of cargo to ignore newer entries it doesn't /// understand. This is honored as of 1.51, so unfortunately older - /// versions will ignore it, and potentially misinterpret version 1 and + /// versions will ignore it, and potentially misinterpret version 2 and /// newer entries. /// /// The intent is that versions older than 1.51 will work with a @@ -587,10 +600,10 @@ let prefix = unpack_dir.file_name().unwrap(); let parent = unpack_dir.parent().unwrap(); for entry in tar.entries()? { - let mut entry = entry.chain_err(|| "failed to iterate over archive")?; + let mut entry = entry.with_context(|| "failed to iterate over archive")?; let entry_path = entry .path() - .chain_err(|| "failed to read entry path")? + .with_context(|| "failed to read entry path")? .into_owned(); // We're going to unpack this tarball into the global source @@ -610,7 +623,7 @@ // Unpacking failed let mut result = entry.unpack_in(parent).map_err(anyhow::Error::from); if cfg!(windows) && restricted_names::is_windows_reserved_path(&entry_path) { - result = result.chain_err(|| { + result = result.with_context(|| { format!( "`{}` appears to contain a reserved Windows path, \ it cannot be extracted on Windows", @@ -618,7 +631,8 @@ ) }); } - result.chain_err(|| format!("failed to unpack entry at `{}`", entry_path.display()))?; + result + .with_context(|| format!("failed to unpack entry at `{}`", entry_path.display()))?; } // The lock file is created after unpacking so we overwrite a lock file @@ -628,7 +642,7 @@ .read(true) .write(true) .open(&path) - .chain_err(|| format!("failed to open `{}`", path.display()))?; + .with_context(|| format!("failed to open `{}`", path.display()))?; // Write to the lock file to indicate that unpacking was successful. write!(ok, "ok")?; @@ -647,7 +661,7 @@ fn get_pkg(&mut self, package: PackageId, path: &File) -> CargoResult { let path = self .unpack_package(package, path) - .chain_err(|| format!("failed to unpack package `{}`", package))?; + .with_context(|| format!("failed to unpack package `{}`", package))?; let mut src = PathSource::new(&path, self.source_id, self.config); src.update()?; let mut pkg = match src.download(package)? { diff -Nru cargo-0.52.0/src/cargo/sources/registry/remote.rs cargo-0.54.0/src/cargo/sources/registry/remote.rs --- cargo-0.52.0/src/cargo/sources/registry/remote.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/registry/remote.rs 2021-04-27 14:35:53.000000000 +0000 @@ -5,10 +5,11 @@ RegistryConfig, RegistryData, CRATE_TEMPLATE, LOWER_PREFIX_TEMPLATE, PREFIX_TEMPLATE, VERSION_TEMPLATE, }; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::interning::InternedString; -use crate::util::paths; -use crate::util::{Config, Filesystem, Sha256}; +use crate::util::{Config, Filesystem}; +use anyhow::Context as _; +use cargo_util::{paths, Sha256}; use lazycell::LazyCell; use log::{debug, trace}; use std::cell::{Cell, Ref, RefCell}; @@ -97,7 +98,7 @@ let mut opts = git2::RepositoryInitOptions::new(); opts.external_template(false); Ok(git2::Repository::init_opts(&path, &opts) - .chain_err(|| "failed to initialize index git repository")?) + .with_context(|| "failed to initialize index git repository")?) } } }) @@ -106,7 +107,7 @@ fn head(&self) -> CargoResult { if self.head.get().is_none() { let repo = self.repo()?; - let oid = self.index_git_ref.resolve(repo, None)?; + let oid = self.index_git_ref.resolve(repo)?; self.head.set(Some(oid)); } Ok(self.head.get().unwrap()) @@ -241,7 +242,7 @@ let url = self.source_id.url(); let repo = self.repo.borrow_mut().unwrap(); git::fetch(repo, url.as_str(), &self.index_git_ref, self.config) - .chain_err(|| format!("failed to fetch `{}`", url))?; + .with_context(|| format!("failed to fetch `{}`", url))?; self.config.updated_sources().insert(self.source_id); // Create a dummy file to record the mtime for when we updated the @@ -312,7 +313,7 @@ .read(true) .write(true) .open(&path) - .chain_err(|| format!("failed to open `{}`", path.display()))?; + .with_context(|| format!("failed to open `{}`", path.display()))?; let meta = dst.metadata()?; if meta.len() > 0 { return Ok(dst); diff -Nru cargo-0.52.0/src/cargo/sources/replaced.rs cargo-0.54.0/src/cargo/sources/replaced.rs --- cargo-0.52.0/src/cargo/sources/replaced.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/sources/replaced.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,6 +1,8 @@ use crate::core::source::MaybePackage; use crate::core::{Dependency, Package, PackageId, Source, SourceId, Summary}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; + +use anyhow::Context as _; pub struct ReplacedSource<'cfg> { to_replace: SourceId, @@ -47,7 +49,7 @@ .query(&dep, &mut |summary| { f(summary.map_source(replace_with, to_replace)) }) - .chain_err(|| format!("failed to query replaced source {}", self.to_replace))?; + .with_context(|| format!("failed to query replaced source {}", self.to_replace))?; Ok(()) } @@ -59,14 +61,14 @@ .fuzzy_query(&dep, &mut |summary| { f(summary.map_source(replace_with, to_replace)) }) - .chain_err(|| format!("failed to query replaced source {}", self.to_replace))?; + .with_context(|| format!("failed to query replaced source {}", self.to_replace))?; Ok(()) } fn update(&mut self) -> CargoResult<()> { self.inner .update() - .chain_err(|| format!("failed to update replaced source {}", self.to_replace))?; + .with_context(|| format!("failed to update replaced source {}", self.to_replace))?; Ok(()) } @@ -75,7 +77,7 @@ let pkg = self .inner .download(id) - .chain_err(|| format!("failed to download replaced source {}", self.to_replace))?; + .with_context(|| format!("failed to download replaced source {}", self.to_replace))?; Ok(match pkg { MaybePackage::Ready(pkg) => { MaybePackage::Ready(pkg.map_source(self.replace_with, self.to_replace)) @@ -89,7 +91,7 @@ let pkg = self .inner .finish_download(id, data) - .chain_err(|| format!("failed to download replaced source {}", self.to_replace))?; + .with_context(|| format!("failed to download replaced source {}", self.to_replace))?; Ok(pkg.map_source(self.replace_with, self.to_replace)) } diff -Nru cargo-0.52.0/src/cargo/util/command_prelude.rs cargo-0.54.0/src/cargo/util/command_prelude.rs --- cargo-0.52.0/src/cargo/util/command_prelude.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/command_prelude.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,17 +1,19 @@ use crate::core::compiler::{BuildConfig, MessageFormat}; -use crate::core::Workspace; +use crate::core::resolver::CliFeatures; +use crate::core::{Edition, Workspace}; use crate::ops::{CompileFilter, CompileOptions, NewOptions, Packages, VersionControl}; use crate::sources::CRATES_IO_REGISTRY; use crate::util::important_paths::find_root_manifest_for_wd; use crate::util::interning::InternedString; use crate::util::restricted_names::is_glob_pattern; -use crate::util::{paths, toml::TomlProfile, validate_package_name}; use crate::util::{ print_available_benches, print_available_binaries, print_available_examples, print_available_packages, print_available_tests, }; +use crate::util::{toml::TomlProfile, validate_package_name}; use crate::CargoResult; use anyhow::bail; +use cargo_util::paths; use clap::{self, SubCommand}; use std::ffi::{OsStr, OsString}; use std::path::PathBuf; @@ -188,7 +190,7 @@ ._arg(opt("lib", "Use a library template")) ._arg( opt("edition", "Edition to set for the crate generated") - .possible_values(&["2015", "2018", "2021"]) + .possible_values(Edition::CLI_VALUES) .value_name("YEAR"), ) ._arg( @@ -219,6 +221,13 @@ "Ignore `rust-version` specification in packages (unstable)", )) } + + fn arg_future_incompat_report(self) -> Self { + self._arg(opt( + "future-incompat-report", + "Ouputs a future incompatibility report at the end of the build (unstable)", + )) + } } impl AppExt for App { @@ -462,6 +471,7 @@ build_config.requested_profile = self.get_profile_name(config, "dev", profile_checking)?; build_config.build_plan = self._is_present("build-plan"); build_config.unit_graph = self._is_present("unit-graph"); + build_config.future_incompat_report = self._is_present("future-incompat-report"); if build_config.build_plan { config .cli_unstable() @@ -472,12 +482,22 @@ .cli_unstable() .fail_if_stable_opt("--unit-graph", 8002)?; } + if build_config.future_incompat_report { + config + .cli_unstable() + // TODO: Tracking issue + .fail_if_stable_opt("--future-incompat-report", 9241)?; + + if !config.cli_unstable().future_incompat_report { + anyhow::bail!( + "Usage of `--future-incompat-report` requires `-Z future-incompat-report`" + ) + } + } let opts = CompileOptions { build_config, - features: self._values_of("features"), - all_features: self._is_present("all-features"), - no_default_features: self._is_present("no-default-features"), + cli_features: self.cli_features()?, spec, filter: CompileFilter::from_raw_arguments( self._is_present("lib"), @@ -519,6 +539,14 @@ Ok(opts) } + fn cli_features(&self) -> CargoResult { + CliFeatures::from_command_line( + &self._values_of("features"), + self._is_present("all-features"), + !self._is_present("no-default-features"), + ) + } + fn compile_options_for_single_package( &self, config: &Config, diff -Nru cargo-0.52.0/src/cargo/util/config/de.rs cargo-0.54.0/src/cargo/util/config/de.rs --- cargo-0.52.0/src/cargo/util/config/de.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/config/de.rs 2021-04-27 14:35:53.000000000 +0000 @@ -40,62 +40,6 @@ }; } -impl<'config> Deserializer<'config> { - /// This is a helper for getting a CV from a file or env var. - /// - /// If this returns CV::List, then don't look at the value. Handling lists - /// is deferred to ConfigSeqAccess. - fn get_cv_with_env(&self) -> Result, ConfigError> { - // Determine if value comes from env, cli, or file, and merge env if - // possible. - let cv = self.config.get_cv(&self.key)?; - let env = self.config.env.get(self.key.as_env_key()); - let env_def = Definition::Environment(self.key.as_env_key().to_string()); - let use_env = match (&cv, env) { - (Some(cv), Some(_)) => env_def.is_higher_priority(cv.definition()), - (None, Some(_)) => true, - _ => false, - }; - - if !use_env { - return Ok(cv); - } - - // Future note: If you ever need to deserialize a non-self describing - // map type, this should implement a starts_with check (similar to how - // ConfigMapAccess does). - let env = env.unwrap(); - if env == "true" { - Ok(Some(CV::Boolean(true, env_def))) - } else if env == "false" { - Ok(Some(CV::Boolean(false, env_def))) - } else if let Ok(i) = env.parse::() { - Ok(Some(CV::Integer(i, env_def))) - } else if self.config.cli_unstable().advanced_env - && env.starts_with('[') - && env.ends_with(']') - { - // Parsing is deferred to ConfigSeqAccess. - Ok(Some(CV::List(Vec::new(), env_def))) - } else { - // Try to merge if possible. - match cv { - Some(CV::List(cv_list, _cv_def)) => { - // Merging is deferred to ConfigSeqAccess. - Ok(Some(CV::List(cv_list, env_def))) - } - _ => { - // Note: CV::Table merging is not implemented, as env - // vars do not support table values. In the future, we - // could check for `{}`, and interpret it as TOML if - // that seems useful. - Ok(Some(CV::String(env.to_string(), env_def))) - } - } - } - } -} - impl<'de, 'config> de::Deserializer<'de> for Deserializer<'config> { type Error = ConfigError; @@ -103,7 +47,7 @@ where V: de::Visitor<'de>, { - let cv = self.get_cv_with_env()?; + let cv = self.config.get_cv_with_env(&self.key)?; if let Some(cv) = cv { let res: (Result, Definition) = match cv { CV::Integer(i, def) => (visitor.visit_i64(i), def), diff -Nru cargo-0.52.0/src/cargo/util/config/key.rs cargo-0.54.0/src/cargo/util/config/key.rs --- cargo-0.52.0/src/cargo/util/config/key.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/config/key.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,3 +1,4 @@ +use std::borrow::Cow; use std::fmt; /// Key for a configuration variable. @@ -84,16 +85,32 @@ } /// Returns an iterator of the key parts as strings. - pub(super) fn parts(&self) -> impl Iterator { + pub(crate) fn parts(&self) -> impl Iterator { self.parts.iter().map(|p| p.0.as_ref()) } + + /// Returns whether or not this is a key for the root table. + pub fn is_root(&self) -> bool { + self.parts.is_empty() + } } impl fmt::Display for ConfigKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - // Note: This is not a perfect TOML representation. This really should - // check if the parts should be quoted. - let parts: Vec<&str> = self.parts().collect(); + let parts: Vec<_> = self.parts().map(|part| escape_key_part(part)).collect(); parts.join(".").fmt(f) } } + +fn escape_key_part<'a>(part: &'a str) -> Cow<'a, str> { + let ok = part.chars().all(|c| { + matches!(c, + 'a'..='z' | 'A'..='Z' | '0'..='9' | '-' | '_') + }); + if ok { + Cow::Borrowed(part) + } else { + // This is a bit messy, but toml doesn't expose a function to do this. + Cow::Owned(toml::to_string(&toml::Value::String(part.to_string())).unwrap()) + } +} diff -Nru cargo-0.52.0/src/cargo/util/config/mod.rs cargo-0.54.0/src/cargo/util/config/mod.rs --- cargo-0.52.0/src/cargo/util/config/mod.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/config/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -49,10 +49,12 @@ //! translate from `ConfigValue` and environment variables to the caller's //! desired type. +use std::borrow::Cow; use std::cell::{RefCell, RefMut}; use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::collections::{HashMap, HashSet}; use std::env; +use std::ffi::OsStr; use std::fmt; use std::fs::{self, File}; use std::io::prelude::*; @@ -63,21 +65,21 @@ use std::sync::Once; use std::time::Instant; -use anyhow::{anyhow, bail, format_err}; -use curl::easy::Easy; -use lazycell::LazyCell; -use serde::Deserialize; -use url::Url; - use self::ConfigValue as CV; use crate::core::compiler::rustdoc::RustdocExternMap; use crate::core::shell::Verbosity; -use crate::core::{nightly_features_allowed, CliUnstable, Shell, SourceId, Workspace}; +use crate::core::{features, CliUnstable, Shell, SourceId, Workspace}; use crate::ops; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::toml as cargo_toml; -use crate::util::{paths, validate_package_name}; +use crate::util::validate_package_name; use crate::util::{FileLock, Filesystem, IntoUrl, IntoUrlWithBase, Rustc}; +use anyhow::{anyhow, bail, format_err, Context as _}; +use cargo_util::paths; +use curl::easy::Easy; +use lazycell::LazyCell; +use serde::Deserialize; +use url::Url; mod de; use de::Deserializer; @@ -86,7 +88,7 @@ pub use value::{Definition, OptValue, Value}; mod key; -use key::ConfigKey; +pub use key::ConfigKey; mod path; pub use path::{ConfigRelativePath, PathAndArgs}; @@ -132,6 +134,8 @@ cli_config: Option>, /// The current working directory of cargo cwd: PathBuf, + /// Directory where config file searching should stop (inclusive). + search_stop_path: Option, /// The location of the cargo executable (path to current process) cargo_exe: LazyCell, /// The location of the rustdoc executable @@ -165,6 +169,8 @@ target_dir: Option, /// Environment variables, separated to assist testing. env: HashMap, + /// Environment variables, converted to uppercase to check for case mismatch + upper_case_env: HashMap, /// Tracks which sources have been updated to avoid multiple updates. updated_sources: LazyCell>>, /// Lock, if held, of the global package cache along with the number of @@ -177,6 +183,23 @@ target_cfgs: LazyCell>, doc_extern_map: LazyCell, progress_config: ProgressConfig, + env_config: LazyCell, + /// This should be false if: + /// - this is an artifact of the rustc distribution process for "stable" or for "beta" + /// - this is an `#[test]` that does not opt in with `enable_nightly_features` + /// - this is a integration test that uses `ProcessBuilder` + /// that does not opt in with `masquerade_as_nightly_cargo` + /// This should be true if: + /// - this is an artifact of the rustc distribution process for "nightly" + /// - this is being used in the rustc distribution process internally + /// - this is a cargo executable that was built from source + /// - this is an `#[test]` that called `enable_nightly_features` + /// - this is a integration test that uses `ProcessBuilder` + /// that called `masquerade_as_nightly_cargo` + /// It's public to allow tests use nightly features. + /// NOTE: this should be set before `configure()`. If calling this from an integration test, + /// consider using `ConfigBuilder::enable_nightly_features` instead. + pub nightly_features_allowed: bool, } impl Config { @@ -209,6 +232,15 @@ }) .collect(); + let upper_case_env = if cfg!(windows) { + HashMap::new() + } else { + env.clone() + .into_iter() + .map(|(k, _)| (k.to_uppercase().replace("-", "_"), k)) + .collect() + }; + let cache_rustc_info = match env.get("CARGO_CACHE_RUSTC_INFO") { Some(cache) => cache != "0", _ => true, @@ -218,6 +250,7 @@ home_path: Filesystem::new(homedir), shell: RefCell::new(shell), cwd, + search_stop_path: None, values: LazyCell::new(), cli_config: None, cargo_exe: LazyCell::new(), @@ -241,6 +274,7 @@ creation_time: Instant::now(), target_dir: None, env, + upper_case_env, updated_sources: LazyCell::new(), package_cache_lock: RefCell::new(None), http_config: LazyCell::new(), @@ -249,6 +283,8 @@ target_cfgs: LazyCell::new(), doc_extern_map: LazyCell::new(), progress_config: ProgressConfig::default(), + env_config: LazyCell::new(), + nightly_features_allowed: matches!(&*features::channel(), "nightly" | "dev"), } } @@ -258,8 +294,8 @@ /// any config files from disk. Those will be loaded lazily as-needed. pub fn default() -> CargoResult { let shell = Shell::new(); - let cwd = - env::current_dir().chain_err(|| "couldn't get the current directory of the process")?; + let cwd = env::current_dir() + .with_context(|| "couldn't get the current directory of the process")?; let homedir = homedir(&cwd).ok_or_else(|| { anyhow!( "Cargo couldn't find your home directory. \ @@ -296,10 +332,9 @@ /// Gets the default Cargo registry. pub fn default_registry(&self) -> CargoResult> { - Ok(match self.get_string("registry.default")? { - Some(registry) => Some(registry.val), - None => None, - }) + Ok(self + .get_string("registry.default")? + .map(|registry| registry.val)) } /// Gets a reference to the shell, e.g., for writing error messages. @@ -376,7 +411,7 @@ let exe = from_current_exe() .or_else(|_| from_argv()) - .chain_err(|| "couldn't get the path to cargo executable")?; + .with_context(|| "couldn't get the path to cargo executable")?; Ok(exe) }) .map(AsRef::as_ref) @@ -422,6 +457,14 @@ } } + /// Sets the path where ancestor config file searching will stop. The + /// given path is included, but its ancestors are not. + pub fn set_search_stop_path>(&mut self, path: P) { + let path = path.into(); + debug_assert!(self.cwd.starts_with(&path)); + self.search_stop_path = Some(path); + } + /// Reloads on-disk configuration values, starting at the given path and /// walking up its ancestors. pub fn reload_rooted_at>(&mut self, path: P) -> CargoResult<()> { @@ -445,11 +488,28 @@ pub fn target_dir(&self) -> CargoResult> { if let Some(dir) = &self.target_dir { Ok(Some(dir.clone())) - } else if let Some(dir) = env::var_os("CARGO_TARGET_DIR") { + } else if let Some(dir) = self.env.get("CARGO_TARGET_DIR") { + // Check if the CARGO_TARGET_DIR environment variable is set to an empty string. + if dir.is_empty() { + bail!( + "the target directory is set to an empty string in the \ + `CARGO_TARGET_DIR` environment variable" + ) + } + Ok(Some(Filesystem::new(self.cwd.join(dir)))) } else if let Some(val) = &self.build_config()?.target_dir { - let val = val.resolve_path(self); - Ok(Some(Filesystem::new(val))) + let path = val.resolve_path(self); + + // Check if the target directory is set to an empty string in the config.toml file. + if val.raw_value().is_empty() { + bail!( + "the target directory is set to an empty string in {}", + val.value().definition + ) + } + + Ok(Some(Filesystem::new(path))) } else { Ok(None) } @@ -462,6 +522,14 @@ fn get_cv(&self, key: &ConfigKey) -> CargoResult> { log::trace!("get cv {:?}", key); let vals = self.values()?; + if key.is_root() { + // Returning the entire root table (for example `cargo config get` + // with no key). The definition here shouldn't matter. + return Ok(Some(CV::Table( + vals.clone(), + Definition::Path(PathBuf::new()), + ))); + } let mut parts = key.parts().enumerate(); let mut val = match vals.get(parts.next().unwrap().1) { Some(val) => val, @@ -479,12 +547,14 @@ | CV::String(_, def) | CV::List(_, def) | CV::Boolean(_, def) => { - let key_so_far: Vec<&str> = key.parts().take(i).collect(); + let mut key_so_far = ConfigKey::new(); + for part in key.parts().take(i) { + key_so_far.push(part); + } bail!( "expected table for configuration key `{}`, \ but found {} in {}", - // This join doesn't handle quoting properly. - key_so_far.join("."), + key_so_far, val.desc(), def ) @@ -494,11 +564,94 @@ Ok(Some(val.clone())) } + /// This is a helper for getting a CV from a file or env var. + pub(crate) fn get_cv_with_env(&self, key: &ConfigKey) -> CargoResult> { + // Determine if value comes from env, cli, or file, and merge env if + // possible. + let cv = self.get_cv(key)?; + if key.is_root() { + // Root table can't have env value. + return Ok(cv); + } + let env = self.env.get(key.as_env_key()); + let env_def = Definition::Environment(key.as_env_key().to_string()); + let use_env = match (&cv, env) { + // Lists are always merged. + (Some(CV::List(..)), Some(_)) => true, + (Some(cv), Some(_)) => env_def.is_higher_priority(cv.definition()), + (None, Some(_)) => true, + _ => false, + }; + + if !use_env { + return Ok(cv); + } + + // Future note: If you ever need to deserialize a non-self describing + // map type, this should implement a starts_with check (similar to how + // ConfigMapAccess does). + let env = env.unwrap(); + if env == "true" { + Ok(Some(CV::Boolean(true, env_def))) + } else if env == "false" { + Ok(Some(CV::Boolean(false, env_def))) + } else if let Ok(i) = env.parse::() { + Ok(Some(CV::Integer(i, env_def))) + } else if self.cli_unstable().advanced_env && env.starts_with('[') && env.ends_with(']') { + match cv { + Some(CV::List(mut cv_list, cv_def)) => { + // Merge with config file. + self.get_env_list(key, &mut cv_list)?; + Ok(Some(CV::List(cv_list, cv_def))) + } + Some(cv) => { + // This can't assume StringList or UnmergedStringList. + // Return an error, which is the behavior of merging + // multiple config.toml files with the same scenario. + bail!( + "unable to merge array env for config `{}`\n\ + file: {:?}\n\ + env: {}", + key, + cv, + env + ); + } + None => { + let mut cv_list = Vec::new(); + self.get_env_list(key, &mut cv_list)?; + Ok(Some(CV::List(cv_list, env_def))) + } + } + } else { + // Try to merge if possible. + match cv { + Some(CV::List(mut cv_list, cv_def)) => { + // Merge with config file. + self.get_env_list(key, &mut cv_list)?; + Ok(Some(CV::List(cv_list, cv_def))) + } + _ => { + // Note: CV::Table merging is not implemented, as env + // vars do not support table values. In the future, we + // could check for `{}`, and interpret it as TOML if + // that seems useful. + Ok(Some(CV::String(env.to_string(), env_def))) + } + } + } + } + /// Helper primarily for testing. pub fn set_env(&mut self, env: HashMap) { self.env = env; } + /// Returns all environment variables. + pub(crate) fn env(&self) -> &HashMap { + &self.env + } + fn get_env(&self, key: &ConfigKey) -> Result, ConfigError> where T: FromStr, @@ -514,7 +667,10 @@ definition, })) } - None => Ok(None), + None => { + self.check_environment_key_case_mismatch(key); + Ok(None) + } } } @@ -534,9 +690,27 @@ return true; } } + self.check_environment_key_case_mismatch(key); + false } + fn check_environment_key_case_mismatch(&self, key: &ConfigKey) { + if cfg!(windows) { + // In the case of windows the check for case mismatch in keys can be skipped + // as windows already converts its environment keys into the desired format. + return; + } + + if let Some(env_key) = self.upper_case_env.get(key.as_env_key()) { + let _ = self.shell().warn(format!( + "Environment variables are expected to use uppercase letters and underscores, \ + the variable `{}` will be ignored and have no effect", + env_key + )); + } + } + /// Get a string config value. /// /// See `get` for more details. @@ -629,7 +803,10 @@ ) -> CargoResult<()> { let env_val = match self.env.get(key.as_env_key()) { Some(v) => v, - None => return Ok(()), + None => { + self.check_environment_key_case_mismatch(key); + return Ok(()); + } }; let def = Definition::Environment(key.as_env_key().to_string()); @@ -704,7 +881,10 @@ unstable_flags: &[String], cli_config: &[String], ) -> CargoResult<()> { - for warning in self.unstable_flags.parse(unstable_flags)? { + for warning in self + .unstable_flags + .parse(unstable_flags, self.nightly_features_allowed)? + { self.shell().warn(warning)?; } if !unstable_flags.is_empty() { @@ -717,6 +897,15 @@ self.cli_config = Some(cli_config.iter().map(|s| s.to_string()).collect()); self.merge_cli_args()?; } + if self.unstable_flags.config_include { + // If the config was already loaded (like when fetching the + // `[alias]` table), it was loaded with includes disabled because + // the `unstable_flags` hadn't been set up, yet. Any values + // fetched before this step will not process includes, but that + // should be fine (`[alias]` is one of the only things loaded + // before configure). This can be removed when stabilized. + self.reload_rooted_at(self.cwd.clone())?; + } let extra_verbose = verbose >= 2; let verbose = verbose != 0; @@ -743,10 +932,7 @@ (false, _, false) => Verbosity::Normal, }; - let cli_target_dir = match target_dir.as_ref() { - Some(dir) => Some(Filesystem::new(dir.clone())), - None => None, - }; + let cli_target_dir = target_dir.as_ref().map(|dir| Filesystem::new(dir.clone())); self.shell().set_verbosity(verbosity); self.shell().set_color_choice(color)?; @@ -770,7 +956,7 @@ fn load_unstable_flags_from_config(&mut self) -> CargoResult<()> { // If nightly features are enabled, allow setting Z-flags from config // using the `unstable` table. Ignore that block otherwise. - if nightly_features_allowed() { + if self.nightly_features_allowed { self.unstable_flags = self .get::>("unstable")? .unwrap_or_default(); @@ -779,7 +965,7 @@ // allows the CLI to override config files for both enabling // and disabling, and doing it up top allows CLI Zflags to // control config parsing behavior. - self.unstable_flags.parse(unstable_flags_cli)?; + self.unstable_flags.parse(unstable_flags_cli, true)?; } } @@ -819,6 +1005,39 @@ self.load_values_from(&self.cwd) } + pub(crate) fn load_values_unmerged(&self) -> CargoResult> { + let mut result = Vec::new(); + let mut seen = HashSet::new(); + let home = self.home_path.clone().into_path_unlocked(); + self.walk_tree(&self.cwd, &home, |path| { + let mut cv = self._load_file(path, &mut seen, false)?; + if self.cli_unstable().config_include { + self.load_unmerged_include(&mut cv, &mut seen, &mut result)?; + } + result.push(cv); + Ok(()) + }) + .with_context(|| "could not load Cargo configuration")?; + Ok(result) + } + + fn load_unmerged_include( + &self, + cv: &mut CV, + seen: &mut HashSet, + output: &mut Vec, + ) -> CargoResult<()> { + let includes = self.include_paths(cv, false)?; + for (path, abs_path, def) in includes { + let mut cv = self._load_file(&abs_path, seen, false).with_context(|| { + format!("failed to load config include `{}` from `{}`", path, def) + })?; + self.load_unmerged_include(&mut cv, seen, output)?; + output.push(cv); + } + Ok(()) + } + fn load_values_from(&self, path: &Path) -> CargoResult> { // This definition path is ignored, this is just a temporary container // representing the entire file. @@ -826,12 +1045,13 @@ let home = self.home_path.clone().into_path_unlocked(); self.walk_tree(path, &home, |path| { - let value = self.load_file(path)?; - cfg.merge(value, false) - .chain_err(|| format!("failed to merge configuration at `{}`", path.display()))?; + let value = self.load_file(path, true)?; + cfg.merge(value, false).with_context(|| { + format!("failed to merge configuration at `{}`", path.display()) + })?; Ok(()) }) - .chain_err(|| "could not load Cargo configuration")?; + .with_context(|| "could not load Cargo configuration")?; match cfg { CV::Table(map, _) => Ok(map), @@ -839,12 +1059,17 @@ } } - fn load_file(&self, path: &Path) -> CargoResult { + fn load_file(&self, path: &Path, includes: bool) -> CargoResult { let mut seen = HashSet::new(); - self._load_file(path, &mut seen) + self._load_file(path, &mut seen, includes) } - fn _load_file(&self, path: &Path, seen: &mut HashSet) -> CargoResult { + fn _load_file( + &self, + path: &Path, + seen: &mut HashSet, + includes: bool, + ) -> CargoResult { if !seen.insert(path.to_path_buf()) { bail!( "config `include` cycle detected with path `{}`", @@ -852,17 +1077,22 @@ ); } let contents = fs::read_to_string(path) - .chain_err(|| format!("failed to read configuration file `{}`", path.display()))?; - let toml = cargo_toml::parse(&contents, path, self) - .chain_err(|| format!("could not parse TOML configuration in `{}`", path.display()))?; - let value = CV::from_toml(Definition::Path(path.to_path_buf()), toml).chain_err(|| { - format!( - "failed to load TOML configuration from `{}`", - path.display() - ) + .with_context(|| format!("failed to read configuration file `{}`", path.display()))?; + let toml = cargo_toml::parse(&contents, path, self).with_context(|| { + format!("could not parse TOML configuration in `{}`", path.display()) })?; - let value = self.load_includes(value, seen)?; - Ok(value) + let value = + CV::from_toml(Definition::Path(path.to_path_buf()), toml).with_context(|| { + format!( + "failed to load TOML configuration from `{}`", + path.display() + ) + })?; + if includes { + self.load_includes(value, seen) + } else { + Ok(value) + } } /// Load any `include` files listed in the given `value`. @@ -872,49 +1102,72 @@ /// `seen` is used to check for cyclic includes. fn load_includes(&self, mut value: CV, seen: &mut HashSet) -> CargoResult { // Get the list of files to load. - let (includes, def) = match &mut value { - CV::Table(table, _def) => match table.remove("include") { - Some(CV::String(s, def)) => (vec![(s, def.clone())], def), - Some(CV::List(list, def)) => (list, def), - Some(other) => bail!( - "`include` expected a string or list, but found {} in `{}`", - other.desc(), - other.definition() - ), - None => { - return Ok(value); - } - }, - _ => unreachable!(), - }; + let includes = self.include_paths(&mut value, true)?; // Check unstable. if !self.cli_unstable().config_include { - self.shell().warn(format!("config `include` in `{}` ignored, the -Zconfig-include command-line flag is required", - def))?; return Ok(value); } // Accumulate all values here. let mut root = CV::Table(HashMap::new(), value.definition().clone()); - for (path, def) in includes { - let abs_path = match &def { - Definition::Path(p) => p.parent().unwrap().join(&path), - Definition::Environment(_) | Definition::Cli => self.cwd().join(&path), - }; - self._load_file(&abs_path, seen) + for (path, abs_path, def) in includes { + self._load_file(&abs_path, seen, true) .and_then(|include| root.merge(include, true)) - .chain_err(|| format!("failed to load config include `{}` from `{}`", path, def))?; + .with_context(|| { + format!("failed to load config include `{}` from `{}`", path, def) + })?; } root.merge(value, true)?; Ok(root) } - /// Add config arguments passed on the command line. - fn merge_cli_args(&mut self) -> CargoResult<()> { + /// Converts the `include` config value to a list of absolute paths. + fn include_paths( + &self, + cv: &mut CV, + remove: bool, + ) -> CargoResult> { + let abs = |path: &String, def: &Definition| -> (String, PathBuf, Definition) { + let abs_path = match def { + Definition::Path(p) => p.parent().unwrap().join(&path), + Definition::Environment(_) | Definition::Cli => self.cwd().join(&path), + }; + (path.to_string(), abs_path, def.clone()) + }; + let table = match cv { + CV::Table(table, _def) => table, + _ => unreachable!(), + }; + let owned; + let include = if remove { + owned = table.remove("include"); + owned.as_ref() + } else { + table.get("include") + }; + let includes = match include { + Some(CV::String(s, def)) => { + vec![abs(s, def)] + } + Some(CV::List(list, _def)) => list.iter().map(|(s, def)| abs(s, def)).collect(), + Some(other) => bail!( + "`include` expected a string or list, but found {} in `{}`", + other.desc(), + other.definition() + ), + None => { + return Ok(Vec::new()); + } + }; + Ok(includes) + } + + /// Parses the CLI config args and returns them as a table. + pub(crate) fn cli_args_as_table(&self) -> CargoResult { + let mut loaded_args = CV::Table(HashMap::new(), Definition::Cli); let cli_args = match &self.cli_config { Some(cli_args) => cli_args, - None => return Ok(()), + None => return Ok(loaded_args), }; - let mut loaded_args = CV::Table(HashMap::new(), Definition::Cli); for arg in cli_args { let arg_as_path = self.cwd.join(arg); let tmp_table = if !arg.is_empty() && arg_as_path.exists() { @@ -933,7 +1186,7 @@ // TODO: This should probably use a more narrow parser, reject // comments, blank lines, [headers], etc. let toml_v: toml::Value = toml::de::from_str(arg) - .chain_err(|| format!("failed to parse --config argument `{}`", arg))?; + .with_context(|| format!("failed to parse --config argument `{}`", arg))?; let toml_table = toml_v.as_table().unwrap(); if toml_table.len() != 1 { bail!( @@ -943,29 +1196,34 @@ ); } CV::from_toml(Definition::Cli, toml_v) - .chain_err(|| format!("failed to convert --config argument `{}`", arg))? + .with_context(|| format!("failed to convert --config argument `{}`", arg))? }; let mut seen = HashSet::new(); let tmp_table = self .load_includes(tmp_table, &mut seen) - .chain_err(|| "failed to load --config include".to_string())?; + .with_context(|| "failed to load --config include".to_string())?; loaded_args .merge(tmp_table, true) - .chain_err(|| format!("failed to merge --config argument `{}`", arg))?; + .with_context(|| format!("failed to merge --config argument `{}`", arg))?; } - // Force values to be loaded. - let _ = self.values()?; - let values = self.values_mut()?; - let loaded_map = match loaded_args { + Ok(loaded_args) + } + + /// Add config arguments passed on the command line. + fn merge_cli_args(&mut self) -> CargoResult<()> { + let loaded_map = match self.cli_args_as_table()? { CV::Table(table, _def) => table, _ => unreachable!(), }; + // Force values to be loaded. + let _ = self.values()?; + let values = self.values_mut()?; for (key, value) in loaded_map.into_iter() { match values.entry(key) { Vacant(entry) => { entry.insert(value); } - Occupied(mut entry) => entry.get_mut().merge(value, true).chain_err(|| { + Occupied(mut entry) => entry.get_mut().merge(value, true).with_context(|| { format!( "failed to merge --config key `{}` into `{}`", entry.key(), @@ -1028,7 +1286,7 @@ { let mut stash: HashSet = HashSet::new(); - for current in paths::ancestors(pwd) { + for current in paths::ancestors(pwd, self.search_stop_path.as_deref()) { if let Some(path) = self.get_file_path(¤t.join(".cargo"), "config", true)? { walk(&path)?; stash.insert(path); @@ -1051,7 +1309,7 @@ pub fn get_registry_index(&self, registry: &str) -> CargoResult { validate_package_name(registry, "registry name", "")?; if let Some(index) = self.get_string(&format!("registries.{}.index", registry))? { - self.resolve_registry_index(&index).chain_err(|| { + self.resolve_registry_index(&index).with_context(|| { format!( "invalid index URL for registry `{}` defined in {}", registry, index.definition @@ -1096,7 +1354,7 @@ None => return Ok(()), }; - let mut value = self.load_file(&credentials)?; + let mut value = self.load_file(&credentials, true)?; // Backwards compatibility for old `.cargo/credentials` layout. { let (value_map, def) = match value { @@ -1197,6 +1455,11 @@ &self.progress_config } + pub fn env_config(&self) -> CargoResult<&EnvConfig> { + self.env_config + .try_borrow_with(|| self.get::("env")) + } + /// This is used to validate the `term` table has valid syntax. /// /// This is necessary because loading the term settings happens very @@ -1315,7 +1578,7 @@ return Ok(PackageCacheLock(self)); } - Err(e).chain_err(|| "failed to acquire package cache lock")?; + Err(e).with_context(|| "failed to acquire package cache lock")?; } } } @@ -1467,7 +1730,7 @@ val.into_iter() .map(|(key, value)| { let value = CV::from_toml(def.clone(), value) - .chain_err(|| format!("failed to parse key `{}`", key))?; + .with_context(|| format!("failed to parse key `{}`", key))?; Ok((key, value)) }) .collect::>()?, @@ -1513,7 +1776,7 @@ Occupied(mut entry) => { let new_def = value.definition().clone(); let entry = entry.get_mut(); - entry.merge(value, force).chain_err(|| { + entry.merge(value, force).with_context(|| { format!( "failed to merge key `{}` between \ {} and {}", @@ -1646,7 +1909,7 @@ }; let mut contents = String::new(); - file.read_to_string(&mut contents).chain_err(|| { + file.read_to_string(&mut contents).with_context(|| { format!( "failed to read configuration file `{}`", file.path().display() @@ -1715,10 +1978,10 @@ let contents = toml.to_string(); file.seek(SeekFrom::Start(0))?; file.write_all(contents.as_bytes()) - .chain_err(|| format!("failed to write to `{}`", file.path().display()))?; + .with_context(|| format!("failed to write to `{}`", file.path().display()))?; file.file().set_len(contents.len() as u64)?; set_permissions(file.file(), 0o600) - .chain_err(|| format!("failed to set permissions of `{}`", file.path().display()))?; + .with_context(|| format!("failed to set permissions of `{}`", file.path().display()))?; return Ok(()); @@ -1906,6 +2169,54 @@ deserializer.deserialize_option(ProgressVisitor) } +#[derive(Debug, Deserialize)] +#[serde(untagged)] +enum EnvConfigValueInner { + Simple(String), + WithOptions { + value: String, + #[serde(default)] + force: bool, + #[serde(default)] + relative: bool, + }, +} + +#[derive(Debug, Deserialize)] +#[serde(transparent)] +pub struct EnvConfigValue { + inner: Value, +} + +impl EnvConfigValue { + pub fn is_force(&self) -> bool { + match self.inner.val { + EnvConfigValueInner::Simple(_) => false, + EnvConfigValueInner::WithOptions { force, .. } => force, + } + } + + pub fn resolve<'a>(&'a self, config: &Config) -> Cow<'a, OsStr> { + match self.inner.val { + EnvConfigValueInner::Simple(ref s) => Cow::Borrowed(OsStr::new(s.as_str())), + EnvConfigValueInner::WithOptions { + ref value, + relative, + .. + } => { + if relative { + let p = self.inner.definition.root(config).join(&value); + Cow::Owned(p.into_os_string()) + } else { + Cow::Borrowed(OsStr::new(value.as_str())) + } + } + } + } +} + +pub type EnvConfig = HashMap; + /// A type to deserialize a list of strings from a toml file. /// /// Supports deserializing either a whitespace-separated list of arguments in a diff -Nru cargo-0.52.0/src/cargo/util/config/path.rs cargo-0.54.0/src/cargo/util/config/path.rs --- cargo-0.52.0/src/cargo/util/config/path.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/config/path.rs 2021-04-27 14:35:53.000000000 +0000 @@ -10,6 +10,11 @@ pub struct ConfigRelativePath(Value); impl ConfigRelativePath { + /// Returns the underlying value. + pub fn value(&self) -> &Value { + &self.0 + } + /// Returns the raw underlying configuration value for this key. pub fn raw_value(&self) -> &str { &self.0.val diff -Nru cargo-0.52.0/src/cargo/util/diagnostic_server.rs cargo-0.54.0/src/cargo/util/diagnostic_server.rs --- cargo-0.52.0/src/cargo/util/diagnostic_server.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/diagnostic_server.rs 2021-04-27 14:35:53.000000000 +0000 @@ -10,11 +10,13 @@ use std::thread::{self, JoinHandle}; use anyhow::{Context, Error}; +use cargo_util::ProcessBuilder; use log::warn; use serde::{Deserialize, Serialize}; +use crate::core::Edition; use crate::util::errors::CargoResult; -use crate::util::{Config, ProcessBuilder}; +use crate::util::Config; const DIAGNOSICS_SERVER_VAR: &str = "__CARGO_FIX_DIAGNOSTICS_SERVER"; const PLEASE_REPORT_THIS_BUG: &str = @@ -28,10 +30,18 @@ fixing code with the `--broken-code` flag\n\n\ "; -#[derive(Deserialize, Serialize)] +#[derive(Deserialize, Serialize, Hash, Eq, PartialEq, Clone)] pub enum Message { + Migrating { + file: String, + from_edition: Edition, + to_edition: Edition, + }, Fixing { file: String, + }, + Fixed { + file: String, fixes: u32, }, FixFailed { @@ -45,12 +55,7 @@ }, EditionAlreadyEnabled { file: String, - edition: String, - }, - IdiomEditionMismatch { - file: String, - idioms: String, - edition: Option, + edition: Edition, }, } @@ -80,25 +85,40 @@ pub struct DiagnosticPrinter<'a> { config: &'a Config, - edition_already_enabled: HashSet, - idiom_mismatch: HashSet, + dedupe: HashSet, } impl<'a> DiagnosticPrinter<'a> { pub fn new(config: &'a Config) -> DiagnosticPrinter<'a> { DiagnosticPrinter { config, - edition_already_enabled: HashSet::new(), - idiom_mismatch: HashSet::new(), + dedupe: HashSet::new(), } } pub fn print(&mut self, msg: &Message) -> CargoResult<()> { match msg { - Message::Fixing { file, fixes } => { + Message::Migrating { + file, + from_edition, + to_edition, + } => { + if !self.dedupe.insert(msg.clone()) { + return Ok(()); + } + self.config.shell().status( + "Migrating", + &format!("{} from {} edition to {}", file, from_edition, to_edition), + ) + } + Message::Fixing { file } => self + .config + .shell() + .verbose(|shell| shell.status("Fixing", file)), + Message::Fixed { file, fixes } => { let msg = if *fixes == 1 { "fix" } else { "fixes" }; let msg = format!("{} ({} {})", file, fixes, msg); - self.config.shell().status("Fixing", msg) + self.config.shell().status("Fixed", msg) } Message::ReplaceFailed { file, message } => { let msg = format!("error applying suggestions to `{}`\n", file); @@ -158,57 +178,13 @@ Ok(()) } Message::EditionAlreadyEnabled { file, edition } => { - // Like above, only warn once per file - if !self.edition_already_enabled.insert(file.clone()) { + if !self.dedupe.insert(msg.clone()) { return Ok(()); } - - let msg = format!( - "\ -cannot prepare for the {} edition when it is enabled, so cargo cannot -automatically fix errors in `{}` - -To prepare for the {0} edition you should first remove `edition = '{0}'` from -your `Cargo.toml` and then rerun this command. Once all warnings have been fixed -then you can re-enable the `edition` key in `Cargo.toml`. For some more -information about transitioning to the {0} edition see: - - https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html -", - edition, - file, - ); - self.config.shell().error(&msg)?; - Ok(()) - } - Message::IdiomEditionMismatch { - file, - idioms, - edition, - } => { - // Same as above - if !self.idiom_mismatch.insert(file.clone()) { - return Ok(()); - } - self.config.shell().error(&format!( - "\ -cannot migrate to the idioms of the {} edition for `{}` -because it is compiled {}, which doesn't match {0} - -consider migrating to the {0} edition by adding `edition = '{0}'` to -`Cargo.toml` and then rerunning this command; a more detailed transition -guide can be found at - - https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html -", - idioms, - file, - match edition { - Some(s) => format!("with the {} edition", s), - None => "without an edition".to_string(), - }, - ))?; - Ok(()) + self.config.shell().warn(&format!( + "`{}` is already on the latest edition ({}), unable to migrate further", + file, edition + )) } } } diff -Nru cargo-0.52.0/src/cargo/util/errors.rs cargo-0.54.0/src/cargo/util/errors.rs --- cargo-0.52.0/src/cargo/util/errors.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/errors.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,34 +3,12 @@ use crate::core::{TargetKind, Workspace}; use crate::ops::CompileOptions; use anyhow::Error; +use cargo_util::ProcessError; use std::fmt; use std::path::PathBuf; -use std::process::{ExitStatus, Output}; -use std::str; pub type CargoResult = anyhow::Result; -// TODO: should delete this trait and just use `with_context` instead -pub trait CargoResultExt { - fn chain_err(self, f: F) -> CargoResult - where - F: FnOnce() -> D, - D: fmt::Display + Send + Sync + 'static; -} - -impl CargoResultExt for Result -where - E: Into, -{ - fn chain_err(self, f: F) -> CargoResult - where - F: FnOnce() -> D, - D: fmt::Display + Send + Sync + 'static, - { - self.map_err(|e| e.into().context(f())) - } -} - #[derive(Debug)] pub struct HttpNot200 { pub code: u32, @@ -187,41 +165,6 @@ impl<'a> ::std::iter::FusedIterator for ManifestCauses<'a> {} // ============================================================================= -// Process errors -#[derive(Debug)] -pub struct ProcessError { - /// A detailed description to show to the user why the process failed. - pub desc: String, - - /// The exit status of the process. - /// - /// This can be `None` if the process failed to launch (like process not - /// found) or if the exit status wasn't a code but was instead something - /// like termination via a signal. - pub code: Option, - - /// The stdout from the process. - /// - /// This can be `None` if the process failed to launch, or the output was - /// not captured. - pub stdout: Option>, - - /// The stderr from the process. - /// - /// This can be `None` if the process failed to launch, or the output was - /// not captured. - pub stderr: Option>, -} - -impl fmt::Display for ProcessError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.desc.fmt(f) - } -} - -impl std::error::Error for ProcessError {} - -// ============================================================================= // Cargo test errors. /// Error when testcases fail @@ -360,154 +303,6 @@ // ============================================================================= // Construction helpers -/// Creates a new process error. -/// -/// `status` can be `None` if the process did not launch. -/// `output` can be `None` if the process did not launch, or output was not captured. -pub fn process_error( - msg: &str, - status: Option, - output: Option<&Output>, -) -> ProcessError { - let exit = match status { - Some(s) => exit_status_to_string(s), - None => "never executed".to_string(), - }; - - process_error_raw( - msg, - status.and_then(|s| s.code()), - &exit, - output.map(|s| s.stdout.as_slice()), - output.map(|s| s.stderr.as_slice()), - ) -} - -pub fn process_error_raw( - msg: &str, - code: Option, - status: &str, - stdout: Option<&[u8]>, - stderr: Option<&[u8]>, -) -> ProcessError { - let mut desc = format!("{} ({})", msg, status); - - if let Some(out) = stdout { - match str::from_utf8(out) { - Ok(s) if !s.trim().is_empty() => { - desc.push_str("\n--- stdout\n"); - desc.push_str(s); - } - Ok(..) | Err(..) => {} - } - } - if let Some(out) = stderr { - match str::from_utf8(out) { - Ok(s) if !s.trim().is_empty() => { - desc.push_str("\n--- stderr\n"); - desc.push_str(s); - } - Ok(..) | Err(..) => {} - } - } - - ProcessError { - desc, - code, - stdout: stdout.map(|s| s.to_vec()), - stderr: stderr.map(|s| s.to_vec()), - } -} - -pub fn exit_status_to_string(status: ExitStatus) -> String { - return status_to_string(status); - - #[cfg(unix)] - fn status_to_string(status: ExitStatus) -> String { - use std::os::unix::process::*; - - if let Some(signal) = status.signal() { - let name = match signal as libc::c_int { - libc::SIGABRT => ", SIGABRT: process abort signal", - libc::SIGALRM => ", SIGALRM: alarm clock", - libc::SIGFPE => ", SIGFPE: erroneous arithmetic operation", - libc::SIGHUP => ", SIGHUP: hangup", - libc::SIGILL => ", SIGILL: illegal instruction", - libc::SIGINT => ", SIGINT: terminal interrupt signal", - libc::SIGKILL => ", SIGKILL: kill", - libc::SIGPIPE => ", SIGPIPE: write on a pipe with no one to read", - libc::SIGQUIT => ", SIGQUIT: terminal quit signal", - libc::SIGSEGV => ", SIGSEGV: invalid memory reference", - libc::SIGTERM => ", SIGTERM: termination signal", - libc::SIGBUS => ", SIGBUS: access to undefined memory", - #[cfg(not(target_os = "haiku"))] - libc::SIGSYS => ", SIGSYS: bad system call", - libc::SIGTRAP => ", SIGTRAP: trace/breakpoint trap", - _ => "", - }; - format!("signal: {}{}", signal, name) - } else { - status.to_string() - } - } - - #[cfg(windows)] - fn status_to_string(status: ExitStatus) -> String { - use winapi::shared::minwindef::DWORD; - use winapi::um::winnt::*; - - let mut base = status.to_string(); - let extra = match status.code().unwrap() as DWORD { - STATUS_ACCESS_VIOLATION => "STATUS_ACCESS_VIOLATION", - STATUS_IN_PAGE_ERROR => "STATUS_IN_PAGE_ERROR", - STATUS_INVALID_HANDLE => "STATUS_INVALID_HANDLE", - STATUS_INVALID_PARAMETER => "STATUS_INVALID_PARAMETER", - STATUS_NO_MEMORY => "STATUS_NO_MEMORY", - STATUS_ILLEGAL_INSTRUCTION => "STATUS_ILLEGAL_INSTRUCTION", - STATUS_NONCONTINUABLE_EXCEPTION => "STATUS_NONCONTINUABLE_EXCEPTION", - STATUS_INVALID_DISPOSITION => "STATUS_INVALID_DISPOSITION", - STATUS_ARRAY_BOUNDS_EXCEEDED => "STATUS_ARRAY_BOUNDS_EXCEEDED", - STATUS_FLOAT_DENORMAL_OPERAND => "STATUS_FLOAT_DENORMAL_OPERAND", - STATUS_FLOAT_DIVIDE_BY_ZERO => "STATUS_FLOAT_DIVIDE_BY_ZERO", - STATUS_FLOAT_INEXACT_RESULT => "STATUS_FLOAT_INEXACT_RESULT", - STATUS_FLOAT_INVALID_OPERATION => "STATUS_FLOAT_INVALID_OPERATION", - STATUS_FLOAT_OVERFLOW => "STATUS_FLOAT_OVERFLOW", - STATUS_FLOAT_STACK_CHECK => "STATUS_FLOAT_STACK_CHECK", - STATUS_FLOAT_UNDERFLOW => "STATUS_FLOAT_UNDERFLOW", - STATUS_INTEGER_DIVIDE_BY_ZERO => "STATUS_INTEGER_DIVIDE_BY_ZERO", - STATUS_INTEGER_OVERFLOW => "STATUS_INTEGER_OVERFLOW", - STATUS_PRIVILEGED_INSTRUCTION => "STATUS_PRIVILEGED_INSTRUCTION", - STATUS_STACK_OVERFLOW => "STATUS_STACK_OVERFLOW", - STATUS_DLL_NOT_FOUND => "STATUS_DLL_NOT_FOUND", - STATUS_ORDINAL_NOT_FOUND => "STATUS_ORDINAL_NOT_FOUND", - STATUS_ENTRYPOINT_NOT_FOUND => "STATUS_ENTRYPOINT_NOT_FOUND", - STATUS_CONTROL_C_EXIT => "STATUS_CONTROL_C_EXIT", - STATUS_DLL_INIT_FAILED => "STATUS_DLL_INIT_FAILED", - STATUS_FLOAT_MULTIPLE_FAULTS => "STATUS_FLOAT_MULTIPLE_FAULTS", - STATUS_FLOAT_MULTIPLE_TRAPS => "STATUS_FLOAT_MULTIPLE_TRAPS", - STATUS_REG_NAT_CONSUMPTION => "STATUS_REG_NAT_CONSUMPTION", - STATUS_HEAP_CORRUPTION => "STATUS_HEAP_CORRUPTION", - STATUS_STACK_BUFFER_OVERRUN => "STATUS_STACK_BUFFER_OVERRUN", - STATUS_ASSERTION_FAILURE => "STATUS_ASSERTION_FAILURE", - _ => return base, - }; - base.push_str(", "); - base.push_str(extra); - base - } -} - -pub fn is_simple_exit_code(code: i32) -> bool { - // Typical unix exit codes are 0 to 127. - // Windows doesn't have anything "typical", and is a - // 32-bit number (which appears signed here, but is really - // unsigned). However, most of the interesting NTSTATUS - // codes are very large. This is just a rough - // approximation of which codes are "normal" and which - // ones are abnormal termination. - code >= 0 && code <= 127 -} - pub fn internal(error: S) -> anyhow::Error { InternalError::new(anyhow::format_err!("{}", error)).into() } diff -Nru cargo-0.52.0/src/cargo/util/flock.rs cargo-0.54.0/src/cargo/util/flock.rs --- cargo-0.52.0/src/cargo/util/flock.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/flock.rs 2021-04-27 14:35:53.000000000 +0000 @@ -3,12 +3,12 @@ use std::io::{Read, Seek, SeekFrom, Write}; use std::path::{Display, Path, PathBuf}; -use termcolor::Color::Cyan; - -use crate::util::errors::{CargoResult, CargoResultExt}; -use crate::util::paths; +use crate::util::errors::CargoResult; use crate::util::Config; +use anyhow::Context as _; +use cargo_util::paths; use sys::*; +use termcolor::Color::Cyan; #[derive(Debug)] pub struct FileLock { @@ -225,7 +225,7 @@ Err(anyhow::Error::from(e)) } }) - .chain_err(|| format!("failed to open: {}", path.display()))?; + .with_context(|| format!("failed to open: {}", path.display()))?; match state { State::Exclusive => { acquire(config, msg, &path, &|| try_lock_exclusive(&f), &|| { @@ -314,7 +314,7 @@ let msg = format!("waiting for file lock on {}", msg); config.shell().status_with_color("Blocking", &msg, Cyan)?; - lock_block().chain_err(|| format!("failed to lock file: {}", path.display()))?; + lock_block().with_context(|| format!("failed to lock file: {}", path.display()))?; return Ok(()); #[cfg(all(target_os = "linux", not(target_env = "musl")))] diff -Nru cargo-0.52.0/src/cargo/util/important_paths.rs cargo-0.54.0/src/cargo/util/important_paths.rs --- cargo-0.52.0/src/cargo/util/important_paths.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/important_paths.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,11 +1,11 @@ use crate::util::errors::CargoResult; -use crate::util::paths; +use cargo_util::paths; use std::path::{Path, PathBuf}; /// Finds the root `Cargo.toml`. pub fn find_root_manifest_for_wd(cwd: &Path) -> CargoResult { let file = "Cargo.toml"; - for current in paths::ancestors(cwd) { + for current in paths::ancestors(cwd, None) { let manifest = current.join(file); if manifest.exists() { return Ok(manifest); diff -Nru cargo-0.52.0/src/cargo/util/machine_message.rs cargo-0.54.0/src/cargo/util/machine_message.rs --- cargo-0.52.0/src/cargo/util/machine_message.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/machine_message.rs 2021-04-27 14:35:53.000000000 +0000 @@ -20,6 +20,7 @@ #[derive(Serialize)] pub struct FromCompiler<'a> { pub package_id: PackageId, + pub manifest_path: &'a Path, pub target: &'a Target, pub message: Box, } @@ -33,6 +34,7 @@ #[derive(Serialize)] pub struct Artifact<'a> { pub package_id: PackageId, + pub manifest_path: PathBuf, pub target: &'a Target, pub profile: ArtifactProfile, pub features: Vec, diff -Nru cargo-0.52.0/src/cargo/util/mod.rs cargo-0.54.0/src/cargo/util/mod.rs --- cargo-0.52.0/src/cargo/util/mod.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,12 +1,12 @@ +use std::fmt; use std::time::Duration; pub use self::canonical_url::CanonicalUrl; pub use self::config::{homedir, Config, ConfigValue}; pub use self::dependency_queue::DependencyQueue; pub use self::diagnostic_server::RustfixDiagnosticServer; -pub use self::errors::{exit_status_to_string, internal, process_error, process_error_raw}; -pub use self::errors::{CargoResult, CargoResultExt, CliResult, Test}; -pub use self::errors::{CargoTestError, CliError, ProcessError}; +pub use self::errors::{internal, CargoResult, CliResult, Test}; +pub use self::errors::{CargoTestError, CliError}; pub use self::flock::{FileLock, Filesystem}; pub use self::graph::Graph; pub use self::hasher::StableHasher; @@ -15,20 +15,15 @@ pub use self::into_url_with_base::IntoUrlWithBase; pub use self::lev_distance::{closest, closest_msg, lev_distance}; pub use self::lockserver::{LockServer, LockServerClient, LockServerStarted}; -pub use self::paths::{bytes2path, dylib_path, join_paths, path2bytes}; -pub use self::paths::{dylib_path_envvar, normalize_path}; -pub use self::process_builder::{process, ProcessBuilder}; pub use self::progress::{Progress, ProgressStyle}; pub use self::queue::Queue; -pub use self::read2::read2; pub use self::restricted_names::validate_package_name; pub use self::rustc::Rustc; -pub use self::sha256::Sha256; pub use self::to_semver::ToSemver; pub use self::vcs::{existing_vcs_repo, FossilRepo, GitRepo, HgRepo, PijulRepo}; pub use self::workspace::{ - print_available_benches, print_available_binaries, print_available_examples, - print_available_packages, print_available_tests, + add_path_args, path_args, print_available_benches, print_available_binaries, + print_available_examples, print_available_packages, print_available_tests, }; mod canonical_url; @@ -51,15 +46,11 @@ mod lockserver; pub mod machine_message; pub mod network; -pub mod paths; -pub mod process_builder; pub mod profile; mod progress; mod queue; -mod read2; pub mod restricted_names; pub mod rustc; -mod sha256; pub mod to_semver; pub mod toml; mod vcs; @@ -75,9 +66,30 @@ } } -/// Whether or not this running in a Continuous Integration environment. -pub fn is_ci() -> bool { - std::env::var("CI").is_ok() || std::env::var("TF_BUILD").is_ok() +pub fn iter_join_onto(mut w: W, iter: I, delim: &str) -> fmt::Result +where + W: fmt::Write, + I: IntoIterator, + T: std::fmt::Display, +{ + let mut it = iter.into_iter().peekable(); + while let Some(n) = it.next() { + write!(w, "{}", n)?; + if it.peek().is_some() { + write!(w, "{}", delim)?; + } + } + Ok(()) +} + +pub fn iter_join(iter: I, delim: &str) -> String +where + I: IntoIterator, + T: std::fmt::Display, +{ + let mut s = String::new(); + let _ = iter_join_onto(&mut s, iter, delim); + s } pub fn indented_lines(text: &str) -> String { diff -Nru cargo-0.52.0/src/cargo/util/paths.rs cargo-0.54.0/src/cargo/util/paths.rs --- cargo-0.52.0/src/cargo/util/paths.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/paths.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,630 +0,0 @@ -use std::env; -use std::ffi::{OsStr, OsString}; -use std::fs::{self, File, OpenOptions}; -use std::io; -use std::io::prelude::*; -use std::iter; -use std::path::{Component, Path, PathBuf}; - -use filetime::FileTime; -use tempfile::Builder as TempFileBuilder; - -use crate::util::errors::{CargoResult, CargoResultExt}; - -pub fn join_paths>(paths: &[T], env: &str) -> CargoResult { - env::join_paths(paths.iter()) - .chain_err(|| { - let paths = paths.iter().map(Path::new).collect::>(); - format!("failed to join path array: {:?}", paths) - }) - .chain_err(|| { - format!( - "failed to join search paths together\n\ - Does ${} have an unterminated quote character?", - env - ) - }) -} - -pub fn dylib_path_envvar() -> &'static str { - if cfg!(windows) { - "PATH" - } else if cfg!(target_os = "macos") { - // When loading and linking a dynamic library or bundle, dlopen - // searches in LD_LIBRARY_PATH, DYLD_LIBRARY_PATH, PWD, and - // DYLD_FALLBACK_LIBRARY_PATH. - // In the Mach-O format, a dynamic library has an "install path." - // Clients linking against the library record this path, and the - // dynamic linker, dyld, uses it to locate the library. - // dyld searches DYLD_LIBRARY_PATH *before* the install path. - // dyld searches DYLD_FALLBACK_LIBRARY_PATH only if it cannot - // find the library in the install path. - // Setting DYLD_LIBRARY_PATH can easily have unintended - // consequences. - // - // Also, DYLD_LIBRARY_PATH appears to have significant performance - // penalty starting in 10.13. Cargo's testsuite ran more than twice as - // slow with it on CI. - "DYLD_FALLBACK_LIBRARY_PATH" - } else { - "LD_LIBRARY_PATH" - } -} - -pub fn dylib_path() -> Vec { - match env::var_os(dylib_path_envvar()) { - Some(var) => env::split_paths(&var).collect(), - None => Vec::new(), - } -} - -pub fn normalize_path(path: &Path) -> PathBuf { - let mut components = path.components().peekable(); - let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { - components.next(); - PathBuf::from(c.as_os_str()) - } else { - PathBuf::new() - }; - - for component in components { - match component { - Component::Prefix(..) => unreachable!(), - Component::RootDir => { - ret.push(component.as_os_str()); - } - Component::CurDir => {} - Component::ParentDir => { - ret.pop(); - } - Component::Normal(c) => { - ret.push(c); - } - } - } - ret -} - -pub fn resolve_executable(exec: &Path) -> CargoResult { - if exec.components().count() == 1 { - let paths = env::var_os("PATH").ok_or_else(|| anyhow::format_err!("no PATH"))?; - let candidates = env::split_paths(&paths).flat_map(|path| { - let candidate = path.join(&exec); - let with_exe = if env::consts::EXE_EXTENSION.is_empty() { - None - } else { - Some(candidate.with_extension(env::consts::EXE_EXTENSION)) - }; - iter::once(candidate).chain(with_exe) - }); - for candidate in candidates { - if candidate.is_file() { - // PATH may have a component like "." in it, so we still need to - // canonicalize. - return Ok(candidate.canonicalize()?); - } - } - - anyhow::bail!("no executable for `{}` found in PATH", exec.display()) - } else { - Ok(exec.canonicalize()?) - } -} - -pub fn read(path: &Path) -> CargoResult { - match String::from_utf8(read_bytes(path)?) { - Ok(s) => Ok(s), - Err(_) => anyhow::bail!("path at `{}` was not valid utf-8", path.display()), - } -} - -pub fn read_bytes(path: &Path) -> CargoResult> { - fs::read(path).chain_err(|| format!("failed to read `{}`", path.display())) -} - -pub fn write, C: AsRef<[u8]>>(path: P, contents: C) -> CargoResult<()> { - let path = path.as_ref(); - fs::write(path, contents.as_ref()).chain_err(|| format!("failed to write `{}`", path.display())) -} - -pub fn write_if_changed, C: AsRef<[u8]>>(path: P, contents: C) -> CargoResult<()> { - (|| -> CargoResult<()> { - let contents = contents.as_ref(); - let mut f = OpenOptions::new() - .read(true) - .write(true) - .create(true) - .open(&path)?; - let mut orig = Vec::new(); - f.read_to_end(&mut orig)?; - if orig != contents { - f.set_len(0)?; - f.seek(io::SeekFrom::Start(0))?; - f.write_all(contents)?; - } - Ok(()) - })() - .chain_err(|| format!("failed to write `{}`", path.as_ref().display()))?; - Ok(()) -} - -pub fn append(path: &Path, contents: &[u8]) -> CargoResult<()> { - (|| -> CargoResult<()> { - let mut f = OpenOptions::new() - .write(true) - .append(true) - .create(true) - .open(path)?; - - f.write_all(contents)?; - Ok(()) - })() - .chain_err(|| format!("failed to write `{}`", path.display()))?; - Ok(()) -} - -/// Creates a new file. -pub fn create>(path: P) -> CargoResult { - let path = path.as_ref(); - File::create(path).chain_err(|| format!("failed to create file `{}`", path.display())) -} - -/// Opens an existing file. -pub fn open>(path: P) -> CargoResult { - let path = path.as_ref(); - File::open(path).chain_err(|| format!("failed to open file `{}`", path.display())) -} - -pub fn mtime(path: &Path) -> CargoResult { - let meta = fs::metadata(path).chain_err(|| format!("failed to stat `{}`", path.display()))?; - Ok(FileTime::from_last_modification_time(&meta)) -} - -/// Returns the maximum mtime of the given path, recursing into -/// subdirectories, and following symlinks. -pub fn mtime_recursive(path: &Path) -> CargoResult { - let meta = fs::metadata(path).chain_err(|| format!("failed to stat `{}`", path.display()))?; - if !meta.is_dir() { - return Ok(FileTime::from_last_modification_time(&meta)); - } - let max_meta = walkdir::WalkDir::new(path) - .follow_links(true) - .into_iter() - .filter_map(|e| match e { - Ok(e) => Some(e), - Err(e) => { - // Ignore errors while walking. If Cargo can't access it, the - // build script probably can't access it, either. - log::debug!("failed to determine mtime while walking directory: {}", e); - None - } - }) - .filter_map(|e| { - if e.path_is_symlink() { - // Use the mtime of both the symlink and its target, to - // handle the case where the symlink is modified to a - // different target. - let sym_meta = match std::fs::symlink_metadata(e.path()) { - Ok(m) => m, - Err(err) => { - // I'm not sure when this is really possible (maybe a - // race with unlinking?). Regardless, if Cargo can't - // read it, the build script probably can't either. - log::debug!( - "failed to determine mtime while fetching symlink metdata of {}: {}", - e.path().display(), - err - ); - return None; - } - }; - let sym_mtime = FileTime::from_last_modification_time(&sym_meta); - // Walkdir follows symlinks. - match e.metadata() { - Ok(target_meta) => { - let target_mtime = FileTime::from_last_modification_time(&target_meta); - Some(sym_mtime.max(target_mtime)) - } - Err(err) => { - // Can't access the symlink target. If Cargo can't - // access it, the build script probably can't access - // it either. - log::debug!( - "failed to determine mtime of symlink target for {}: {}", - e.path().display(), - err - ); - Some(sym_mtime) - } - } - } else { - let meta = match e.metadata() { - Ok(m) => m, - Err(err) => { - // I'm not sure when this is really possible (maybe a - // race with unlinking?). Regardless, if Cargo can't - // read it, the build script probably can't either. - log::debug!( - "failed to determine mtime while fetching metadata of {}: {}", - e.path().display(), - err - ); - return None; - } - }; - Some(FileTime::from_last_modification_time(&meta)) - } - }) - .max() - // or_else handles the case where there are no files in the directory. - .unwrap_or_else(|| FileTime::from_last_modification_time(&meta)); - Ok(max_meta) -} - -/// Record the current time on the filesystem (using the filesystem's clock) -/// using a file at the given directory. Returns the current time. -pub fn set_invocation_time(path: &Path) -> CargoResult { - // note that if `FileTime::from_system_time(SystemTime::now());` is determined to be sufficient, - // then this can be removed. - let timestamp = path.join("invoked.timestamp"); - write( - ×tamp, - "This file has an mtime of when this was started.", - )?; - let ft = mtime(×tamp)?; - log::debug!("invocation time for {:?} is {}", path, ft); - Ok(ft) -} - -#[cfg(unix)] -pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> { - use std::os::unix::prelude::*; - Ok(path.as_os_str().as_bytes()) -} -#[cfg(windows)] -pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> { - match path.as_os_str().to_str() { - Some(s) => Ok(s.as_bytes()), - None => Err(anyhow::format_err!( - "invalid non-unicode path: {}", - path.display() - )), - } -} - -#[cfg(unix)] -pub fn bytes2path(bytes: &[u8]) -> CargoResult { - use std::os::unix::prelude::*; - Ok(PathBuf::from(OsStr::from_bytes(bytes))) -} -#[cfg(windows)] -pub fn bytes2path(bytes: &[u8]) -> CargoResult { - use std::str; - match str::from_utf8(bytes) { - Ok(s) => Ok(PathBuf::from(s)), - Err(..) => Err(anyhow::format_err!("invalid non-unicode path")), - } -} - -pub fn ancestors(path: &Path) -> PathAncestors<'_> { - PathAncestors::new(path) -} - -pub struct PathAncestors<'a> { - current: Option<&'a Path>, - stop_at: Option, -} - -impl<'a> PathAncestors<'a> { - fn new(path: &Path) -> PathAncestors<'_> { - PathAncestors { - current: Some(path), - //HACK: avoid reading `~/.cargo/config` when testing Cargo itself. - stop_at: env::var("__CARGO_TEST_ROOT").ok().map(PathBuf::from), - } - } -} - -impl<'a> Iterator for PathAncestors<'a> { - type Item = &'a Path; - - fn next(&mut self) -> Option<&'a Path> { - if let Some(path) = self.current { - self.current = path.parent(); - - if let Some(ref stop_at) = self.stop_at { - if path == stop_at { - self.current = None; - } - } - - Some(path) - } else { - None - } - } -} - -pub fn create_dir_all(p: impl AsRef) -> CargoResult<()> { - _create_dir_all(p.as_ref()) -} - -fn _create_dir_all(p: &Path) -> CargoResult<()> { - fs::create_dir_all(p).chain_err(|| format!("failed to create directory `{}`", p.display()))?; - Ok(()) -} - -pub fn remove_dir_all>(p: P) -> CargoResult<()> { - _remove_dir_all(p.as_ref()) -} - -fn _remove_dir_all(p: &Path) -> CargoResult<()> { - if p.symlink_metadata() - .chain_err(|| format!("could not get metadata for `{}` to remove", p.display()))? - .file_type() - .is_symlink() - { - return remove_file(p); - } - let entries = p - .read_dir() - .chain_err(|| format!("failed to read directory `{}`", p.display()))?; - for entry in entries { - let entry = entry?; - let path = entry.path(); - if entry.file_type()?.is_dir() { - remove_dir_all(&path)?; - } else { - remove_file(&path)?; - } - } - remove_dir(&p) -} - -pub fn remove_dir>(p: P) -> CargoResult<()> { - _remove_dir(p.as_ref()) -} - -fn _remove_dir(p: &Path) -> CargoResult<()> { - fs::remove_dir(p).chain_err(|| format!("failed to remove directory `{}`", p.display()))?; - Ok(()) -} - -pub fn remove_file>(p: P) -> CargoResult<()> { - _remove_file(p.as_ref()) -} - -fn _remove_file(p: &Path) -> CargoResult<()> { - let mut err = match fs::remove_file(p) { - Ok(()) => return Ok(()), - Err(e) => e, - }; - - if err.kind() == io::ErrorKind::PermissionDenied && set_not_readonly(p).unwrap_or(false) { - match fs::remove_file(p) { - Ok(()) => return Ok(()), - Err(e) => err = e, - } - } - - Err(err).chain_err(|| format!("failed to remove file `{}`", p.display()))?; - Ok(()) -} - -fn set_not_readonly(p: &Path) -> io::Result { - let mut perms = p.metadata()?.permissions(); - if !perms.readonly() { - return Ok(false); - } - perms.set_readonly(false); - fs::set_permissions(p, perms)?; - Ok(true) -} - -/// Hardlink (file) or symlink (dir) src to dst if possible, otherwise copy it. -/// -/// If the destination already exists, it is removed before linking. -pub fn link_or_copy(src: impl AsRef, dst: impl AsRef) -> CargoResult<()> { - let src = src.as_ref(); - let dst = dst.as_ref(); - _link_or_copy(src, dst) -} - -fn _link_or_copy(src: &Path, dst: &Path) -> CargoResult<()> { - log::debug!("linking {} to {}", src.display(), dst.display()); - if same_file::is_same_file(src, dst).unwrap_or(false) { - return Ok(()); - } - - // NB: we can't use dst.exists(), as if dst is a broken symlink, - // dst.exists() will return false. This is problematic, as we still need to - // unlink dst in this case. symlink_metadata(dst).is_ok() will tell us - // whether dst exists *without* following symlinks, which is what we want. - if fs::symlink_metadata(dst).is_ok() { - remove_file(&dst)?; - } - - let link_result = if src.is_dir() { - #[cfg(target_os = "redox")] - use std::os::redox::fs::symlink; - #[cfg(unix)] - use std::os::unix::fs::symlink; - #[cfg(windows)] - // FIXME: This should probably panic or have a copy fallback. Symlinks - // are not supported in all windows environments. Currently symlinking - // is only used for .dSYM directories on macos, but this shouldn't be - // accidentally relied upon. - use std::os::windows::fs::symlink_dir as symlink; - - let dst_dir = dst.parent().unwrap(); - let src = if src.starts_with(dst_dir) { - src.strip_prefix(dst_dir).unwrap() - } else { - src - }; - symlink(src, dst) - } else if env::var_os("__CARGO_COPY_DONT_LINK_DO_NOT_USE_THIS").is_some() { - // This is a work-around for a bug in macOS 10.15. When running on - // APFS, there seems to be a strange race condition with - // Gatekeeper where it will forcefully kill a process launched via - // `cargo run` with SIGKILL. Copying seems to avoid the problem. - // This shouldn't affect anyone except Cargo's test suite because - // it is very rare, and only seems to happen under heavy load and - // rapidly creating lots of executables and running them. - // See https://github.com/rust-lang/cargo/issues/7821 for the - // gory details. - fs::copy(src, dst).map(|_| ()) - } else { - fs::hard_link(src, dst) - }; - link_result - .or_else(|err| { - log::debug!("link failed {}. falling back to fs::copy", err); - fs::copy(src, dst).map(|_| ()) - }) - .chain_err(|| { - format!( - "failed to link or copy `{}` to `{}`", - src.display(), - dst.display() - ) - })?; - Ok(()) -} - -/// Copies a file from one location to another. -pub fn copy, Q: AsRef>(from: P, to: Q) -> CargoResult { - let from = from.as_ref(); - let to = to.as_ref(); - fs::copy(from, to) - .chain_err(|| format!("failed to copy `{}` to `{}`", from.display(), to.display())) -} - -/// Changes the filesystem mtime (and atime if possible) for the given file. -/// -/// This intentionally does not return an error, as this is sometimes not -/// supported on network filesystems. For the current uses in Cargo, this is a -/// "best effort" approach, and errors shouldn't be propagated. -pub fn set_file_time_no_err>(path: P, time: FileTime) { - let path = path.as_ref(); - match filetime::set_file_times(path, time, time) { - Ok(()) => log::debug!("set file mtime {} to {}", path.display(), time), - Err(e) => log::warn!( - "could not set mtime of {} to {}: {:?}", - path.display(), - time, - e - ), - } -} - -/// Strips `base` from `path`. -/// -/// This canonicalizes both paths before stripping. This is useful if the -/// paths are obtained in different ways, and one or the other may or may not -/// have been normalized in some way. -pub fn strip_prefix_canonical>( - path: P, - base: P, -) -> Result { - // Not all filesystems support canonicalize. Just ignore if it doesn't work. - let safe_canonicalize = |path: &Path| match path.canonicalize() { - Ok(p) => p, - Err(e) => { - log::warn!("cannot canonicalize {:?}: {:?}", path, e); - path.to_path_buf() - } - }; - let canon_path = safe_canonicalize(path.as_ref()); - let canon_base = safe_canonicalize(base.as_ref()); - canon_path.strip_prefix(canon_base).map(|p| p.to_path_buf()) -} - -/// Creates an excluded from cache directory atomically with its parents as needed. -/// -/// The atomicity only covers creating the leaf directory and exclusion from cache. Any missing -/// parent directories will not be created in an atomic manner. -/// -/// This function is idempotent and in addition to that it won't exclude ``p`` from cache if it -/// already exists. -pub fn create_dir_all_excluded_from_backups_atomic(p: impl AsRef) -> CargoResult<()> { - let path = p.as_ref(); - if path.is_dir() { - return Ok(()); - } - - let parent = path.parent().unwrap(); - let base = path.file_name().unwrap(); - create_dir_all(parent)?; - // We do this in two steps (first create a temporary directory and exlucde - // it from backups, then rename it to the desired name. If we created the - // directory directly where it should be and then excluded it from backups - // we would risk a situation where cargo is interrupted right after the directory - // creation but before the exclusion the the directory would remain non-excluded from - // backups because we only perform exclusion right after we created the directory - // ourselves. - // - // We need the tempdir created in parent instead of $TMP, because only then we can be - // easily sure that rename() will succeed (the new name needs to be on the same mount - // point as the old one). - let tempdir = TempFileBuilder::new().prefix(base).tempdir_in(parent)?; - exclude_from_backups(tempdir.path()); - // Previously std::fs::create_dir_all() (through paths::create_dir_all()) was used - // here to create the directory directly and fs::create_dir_all() explicitly treats - // the directory being created concurrently by another thread or process as success, - // hence the check below to follow the existing behavior. If we get an error at - // rename() and suddently the directory (which didn't exist a moment earlier) exists - // we can infer from it it's another cargo process doing work. - if let Err(e) = fs::rename(tempdir.path(), path) { - if !path.exists() { - return Err(anyhow::Error::from(e)); - } - } - Ok(()) -} - -/// Marks the directory as excluded from archives/backups. -/// -/// This is recommended to prevent derived/temporary files from bloating backups. There are two -/// mechanisms used to achieve this right now: -/// -/// * A dedicated resource property excluding from Time Machine backups on macOS -/// * CACHEDIR.TAG files supported by various tools in a platform-independent way -fn exclude_from_backups(path: &Path) { - exclude_from_time_machine(path); - let _ = std::fs::write( - path.join("CACHEDIR.TAG"), - "Signature: 8a477f597d28d172789f06886806bc55 -# This file is a cache directory tag created by cargo. -# For information about cache directory tags see https://bford.info/cachedir/ -", - ); - // Similarly to exclude_from_time_machine() we ignore errors here as it's an optional feature. -} - -#[cfg(not(target_os = "macos"))] -fn exclude_from_time_machine(_: &Path) {} - -#[cfg(target_os = "macos")] -/// Marks files or directories as excluded from Time Machine on macOS -fn exclude_from_time_machine(path: &Path) { - use core_foundation::base::TCFType; - use core_foundation::{number, string, url}; - use std::ptr; - - // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey - let is_excluded_key: Result = "NSURLIsExcludedFromBackupKey".parse(); - let path = url::CFURL::from_path(path, false); - if let (Some(path), Ok(is_excluded_key)) = (path, is_excluded_key) { - unsafe { - url::CFURLSetResourcePropertyForKey( - path.as_concrete_TypeRef(), - is_excluded_key.as_concrete_TypeRef(), - number::kCFBooleanTrue as *const _, - ptr::null_mut(), - ); - } - } - // Errors are ignored, since it's an optional feature and failure - // doesn't prevent Cargo from working -} diff -Nru cargo-0.52.0/src/cargo/util/process_builder.rs cargo-0.54.0/src/cargo/util/process_builder.rs --- cargo-0.52.0/src/cargo/util/process_builder.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/process_builder.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,414 +0,0 @@ -use crate::util::{process_error, read2, CargoResult, CargoResultExt}; -use anyhow::bail; -use jobserver::Client; -use shell_escape::escape; -use std::collections::BTreeMap; -use std::env; -use std::ffi::{OsStr, OsString}; -use std::fmt; -use std::iter::once; -use std::path::Path; -use std::process::{Command, Output, Stdio}; - -/// A builder object for an external process, similar to `std::process::Command`. -#[derive(Clone, Debug)] -pub struct ProcessBuilder { - /// The program to execute. - program: OsString, - /// A list of arguments to pass to the program. - args: Vec, - /// Any environment variables that should be set for the program. - env: BTreeMap>, - /// The directory to run the program from. - cwd: Option, - /// The `make` jobserver. See the [jobserver crate][jobserver_docs] for - /// more information. - /// - /// [jobserver_docs]: https://docs.rs/jobserver/0.1.6/jobserver/ - jobserver: Option, - /// `true` to include environment variable in display. - display_env_vars: bool, -} - -impl fmt::Display for ProcessBuilder { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "`")?; - - if self.display_env_vars { - for (key, val) in self.env.iter() { - if let Some(val) = val { - let val = escape(val.to_string_lossy()); - if cfg!(windows) { - write!(f, "set {}={}&& ", key, val)?; - } else { - write!(f, "{}={} ", key, val)?; - } - } - } - } - - write!(f, "{}", self.program.to_string_lossy())?; - - for arg in &self.args { - write!(f, " {}", escape(arg.to_string_lossy()))?; - } - - write!(f, "`") - } -} - -impl ProcessBuilder { - /// (chainable) Sets the executable for the process. - pub fn program>(&mut self, program: T) -> &mut ProcessBuilder { - self.program = program.as_ref().to_os_string(); - self - } - - /// (chainable) Adds `arg` to the args list. - pub fn arg>(&mut self, arg: T) -> &mut ProcessBuilder { - self.args.push(arg.as_ref().to_os_string()); - self - } - - /// (chainable) Adds multiple `args` to the args list. - pub fn args>(&mut self, args: &[T]) -> &mut ProcessBuilder { - self.args - .extend(args.iter().map(|t| t.as_ref().to_os_string())); - self - } - - /// (chainable) Replaces the args list with the given `args`. - pub fn args_replace>(&mut self, args: &[T]) -> &mut ProcessBuilder { - self.args = args.iter().map(|t| t.as_ref().to_os_string()).collect(); - self - } - - /// (chainable) Sets the current working directory of the process. - pub fn cwd>(&mut self, path: T) -> &mut ProcessBuilder { - self.cwd = Some(path.as_ref().to_os_string()); - self - } - - /// (chainable) Sets an environment variable for the process. - pub fn env>(&mut self, key: &str, val: T) -> &mut ProcessBuilder { - self.env - .insert(key.to_string(), Some(val.as_ref().to_os_string())); - self - } - - /// (chainable) Unsets an environment variable for the process. - pub fn env_remove(&mut self, key: &str) -> &mut ProcessBuilder { - self.env.insert(key.to_string(), None); - self - } - - /// Gets the executable name. - pub fn get_program(&self) -> &OsString { - &self.program - } - - /// Gets the program arguments. - pub fn get_args(&self) -> &[OsString] { - &self.args - } - - /// Gets the current working directory for the process. - pub fn get_cwd(&self) -> Option<&Path> { - self.cwd.as_ref().map(Path::new) - } - - /// Gets an environment variable as the process will see it (will inherit from environment - /// unless explicitally unset). - pub fn get_env(&self, var: &str) -> Option { - self.env - .get(var) - .cloned() - .or_else(|| Some(env::var_os(var))) - .and_then(|s| s) - } - - /// Gets all environment variables explicitly set or unset for the process (not inherited - /// vars). - pub fn get_envs(&self) -> &BTreeMap> { - &self.env - } - - /// Sets the `make` jobserver. See the [jobserver crate][jobserver_docs] for - /// more information. - /// - /// [jobserver_docs]: https://docs.rs/jobserver/0.1.6/jobserver/ - pub fn inherit_jobserver(&mut self, jobserver: &Client) -> &mut Self { - self.jobserver = Some(jobserver.clone()); - self - } - - /// Enables environment variable display. - pub fn display_env_vars(&mut self) -> &mut Self { - self.display_env_vars = true; - self - } - - /// Runs the process, waiting for completion, and mapping non-success exit codes to an error. - pub fn exec(&self) -> CargoResult<()> { - let mut command = self.build_command(); - let exit = command.status().chain_err(|| { - process_error(&format!("could not execute process {}", self), None, None) - })?; - - if exit.success() { - Ok(()) - } else { - Err(process_error( - &format!("process didn't exit successfully: {}", self), - Some(exit), - None, - ) - .into()) - } - } - - /// Replaces the current process with the target process. - /// - /// On Unix, this executes the process using the Unix syscall `execvp`, which will block - /// this process, and will only return if there is an error. - /// - /// On Windows this isn't technically possible. Instead we emulate it to the best of our - /// ability. One aspect we fix here is that we specify a handler for the Ctrl-C handler. - /// In doing so (and by effectively ignoring it) we should emulate proxying Ctrl-C - /// handling to the application at hand, which will either terminate or handle it itself. - /// According to Microsoft's documentation at - /// . - /// the Ctrl-C signal is sent to all processes attached to a terminal, which should - /// include our child process. If the child terminates then we'll reap them in Cargo - /// pretty quickly, and if the child handles the signal then we won't terminate - /// (and we shouldn't!) until the process itself later exits. - pub fn exec_replace(&self) -> CargoResult<()> { - imp::exec_replace(self) - } - - /// Executes the process, returning the stdio output, or an error if non-zero exit status. - pub fn exec_with_output(&self) -> CargoResult { - let mut command = self.build_command(); - - let output = command.output().chain_err(|| { - process_error(&format!("could not execute process {}", self), None, None) - })?; - - if output.status.success() { - Ok(output) - } else { - Err(process_error( - &format!("process didn't exit successfully: {}", self), - Some(output.status), - Some(&output), - ) - .into()) - } - } - - /// Executes a command, passing each line of stdout and stderr to the supplied callbacks, which - /// can mutate the string data. - /// - /// If any invocations of these function return an error, it will be propagated. - /// - /// If `capture_output` is true, then all the output will also be buffered - /// and stored in the returned `Output` object. If it is false, no caching - /// is done, and the callbacks are solely responsible for handling the - /// output. - pub fn exec_with_streaming( - &self, - on_stdout_line: &mut dyn FnMut(&str) -> CargoResult<()>, - on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>, - capture_output: bool, - ) -> CargoResult { - let mut stdout = Vec::new(); - let mut stderr = Vec::new(); - - let mut cmd = self.build_command(); - cmd.stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .stdin(Stdio::null()); - - let mut callback_error = None; - let status = (|| { - let mut child = cmd.spawn()?; - let out = child.stdout.take().unwrap(); - let err = child.stderr.take().unwrap(); - read2(out, err, &mut |is_out, data, eof| { - let idx = if eof { - data.len() - } else { - match data.iter().rposition(|b| *b == b'\n') { - Some(i) => i + 1, - None => return, - } - }; - { - // scope for new_lines - let new_lines = if capture_output { - let dst = if is_out { &mut stdout } else { &mut stderr }; - let start = dst.len(); - let data = data.drain(..idx); - dst.extend(data); - &dst[start..] - } else { - &data[..idx] - }; - for line in String::from_utf8_lossy(new_lines).lines() { - if callback_error.is_some() { - break; - } - let callback_result = if is_out { - on_stdout_line(line) - } else { - on_stderr_line(line) - }; - if let Err(e) = callback_result { - callback_error = Some(e); - } - } - } - if !capture_output { - data.drain(..idx); - } - })?; - child.wait() - })() - .chain_err(|| process_error(&format!("could not execute process {}", self), None, None))?; - let output = Output { - stdout, - stderr, - status, - }; - - { - let to_print = if capture_output { Some(&output) } else { None }; - if let Some(e) = callback_error { - let cx = process_error( - &format!("failed to parse process output: {}", self), - Some(output.status), - to_print, - ); - bail!(anyhow::Error::new(cx).context(e)); - } else if !output.status.success() { - bail!(process_error( - &format!("process didn't exit successfully: {}", self), - Some(output.status), - to_print, - )); - } - } - - Ok(output) - } - - /// Converts `ProcessBuilder` into a `std::process::Command`, and handles the jobserver, if - /// present. - pub fn build_command(&self) -> Command { - let mut command = Command::new(&self.program); - if let Some(cwd) = self.get_cwd() { - command.current_dir(cwd); - } - for arg in &self.args { - command.arg(arg); - } - for (k, v) in &self.env { - match *v { - Some(ref v) => { - command.env(k, v); - } - None => { - command.env_remove(k); - } - } - } - if let Some(ref c) = self.jobserver { - c.configure(&mut command); - } - command - } - - /// Wraps an existing command with the provided wrapper, if it is present and valid. - /// - /// # Examples - /// - /// ```rust - /// use cargo::util::{ProcessBuilder, process}; - /// // Running this would execute `rustc` - /// let cmd: ProcessBuilder = process("rustc"); - /// - /// // Running this will execute `sccache rustc` - /// let cmd = cmd.wrapped(Some("sccache")); - /// ``` - pub fn wrapped(mut self, wrapper: Option>) -> Self { - let wrapper = if let Some(wrapper) = wrapper.as_ref() { - wrapper.as_ref() - } else { - return self; - }; - - if wrapper.is_empty() { - return self; - } - - let args = once(self.program).chain(self.args.into_iter()).collect(); - - self.program = wrapper.to_os_string(); - self.args = args; - - self - } -} - -/// A helper function to create a `ProcessBuilder`. -pub fn process>(cmd: T) -> ProcessBuilder { - ProcessBuilder { - program: cmd.as_ref().to_os_string(), - args: Vec::new(), - cwd: None, - env: BTreeMap::new(), - jobserver: None, - display_env_vars: false, - } -} - -#[cfg(unix)] -mod imp { - use crate::util::{process_error, ProcessBuilder}; - use crate::CargoResult; - use std::os::unix::process::CommandExt; - - pub fn exec_replace(process_builder: &ProcessBuilder) -> CargoResult<()> { - let mut command = process_builder.build_command(); - let error = command.exec(); - Err(anyhow::Error::from(error).context(process_error( - &format!("could not execute process {}", process_builder), - None, - None, - ))) - } -} - -#[cfg(windows)] -mod imp { - use crate::util::{process_error, ProcessBuilder}; - use crate::CargoResult; - use winapi::shared::minwindef::{BOOL, DWORD, FALSE, TRUE}; - use winapi::um::consoleapi::SetConsoleCtrlHandler; - - unsafe extern "system" fn ctrlc_handler(_: DWORD) -> BOOL { - // Do nothing; let the child process handle it. - TRUE - } - - pub fn exec_replace(process_builder: &ProcessBuilder) -> CargoResult<()> { - unsafe { - if SetConsoleCtrlHandler(Some(ctrlc_handler), TRUE) == FALSE { - return Err(process_error("Could not set Ctrl-C handler.", None, None).into()); - } - } - - // Just execute the process as normal. - process_builder.exec() - } -} diff -Nru cargo-0.52.0/src/cargo/util/progress.rs cargo-0.54.0/src/cargo/util/progress.rs --- cargo-0.52.0/src/cargo/util/progress.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/progress.rs 2021-04-27 14:35:53.000000000 +0000 @@ -4,8 +4,8 @@ use crate::core::shell::Verbosity; use crate::util::config::ProgressWhen; -use crate::util::{is_ci, CargoResult, Config}; - +use crate::util::{CargoResult, Config}; +use cargo_util::is_ci; use unicode_width::UnicodeWidthChar; pub struct Progress<'cfg> { diff -Nru cargo-0.52.0/src/cargo/util/read2.rs cargo-0.54.0/src/cargo/util/read2.rs --- cargo-0.52.0/src/cargo/util/read2.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/read2.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,178 +0,0 @@ -pub use self::imp::read2; - -#[cfg(unix)] -mod imp { - use std::io; - use std::io::prelude::*; - use std::mem; - use std::os::unix::prelude::*; - use std::process::{ChildStderr, ChildStdout}; - - pub fn read2( - mut out_pipe: ChildStdout, - mut err_pipe: ChildStderr, - data: &mut dyn FnMut(bool, &mut Vec, bool), - ) -> io::Result<()> { - unsafe { - libc::fcntl(out_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); - libc::fcntl(err_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); - } - - let mut out_done = false; - let mut err_done = false; - let mut out = Vec::new(); - let mut err = Vec::new(); - - let mut fds: [libc::pollfd; 2] = unsafe { mem::zeroed() }; - fds[0].fd = out_pipe.as_raw_fd(); - fds[0].events = libc::POLLIN; - fds[1].fd = err_pipe.as_raw_fd(); - fds[1].events = libc::POLLIN; - let mut nfds = 2; - let mut errfd = 1; - - while nfds > 0 { - // wait for either pipe to become readable using `select` - let r = unsafe { libc::poll(fds.as_mut_ptr(), nfds, -1) }; - if r == -1 { - let err = io::Error::last_os_error(); - if err.kind() == io::ErrorKind::Interrupted { - continue; - } - return Err(err); - } - - // Read as much as we can from each pipe, ignoring EWOULDBLOCK or - // EAGAIN. If we hit EOF, then this will happen because the underlying - // reader will return Ok(0), in which case we'll see `Ok` ourselves. In - // this case we flip the other fd back into blocking mode and read - // whatever's leftover on that file descriptor. - let handle = |res: io::Result<_>| match res { - Ok(_) => Ok(true), - Err(e) => { - if e.kind() == io::ErrorKind::WouldBlock { - Ok(false) - } else { - Err(e) - } - } - }; - if !err_done && fds[errfd].revents != 0 && handle(err_pipe.read_to_end(&mut err))? { - err_done = true; - nfds -= 1; - } - data(false, &mut err, err_done); - if !out_done && fds[0].revents != 0 && handle(out_pipe.read_to_end(&mut out))? { - out_done = true; - fds[0].fd = err_pipe.as_raw_fd(); - errfd = 0; - nfds -= 1; - } - data(true, &mut out, out_done); - } - Ok(()) - } -} - -#[cfg(windows)] -mod imp { - use std::io; - use std::os::windows::prelude::*; - use std::process::{ChildStderr, ChildStdout}; - use std::slice; - - use miow::iocp::{CompletionPort, CompletionStatus}; - use miow::pipe::NamedPipe; - use miow::Overlapped; - use winapi::shared::winerror::ERROR_BROKEN_PIPE; - - struct Pipe<'a> { - dst: &'a mut Vec, - overlapped: Overlapped, - pipe: NamedPipe, - done: bool, - } - - pub fn read2( - out_pipe: ChildStdout, - err_pipe: ChildStderr, - data: &mut dyn FnMut(bool, &mut Vec, bool), - ) -> io::Result<()> { - let mut out = Vec::new(); - let mut err = Vec::new(); - - let port = CompletionPort::new(1)?; - port.add_handle(0, &out_pipe)?; - port.add_handle(1, &err_pipe)?; - - unsafe { - let mut out_pipe = Pipe::new(out_pipe, &mut out); - let mut err_pipe = Pipe::new(err_pipe, &mut err); - - out_pipe.read()?; - err_pipe.read()?; - - let mut status = [CompletionStatus::zero(), CompletionStatus::zero()]; - - while !out_pipe.done || !err_pipe.done { - for status in port.get_many(&mut status, None)? { - if status.token() == 0 { - out_pipe.complete(status); - data(true, out_pipe.dst, out_pipe.done); - out_pipe.read()?; - } else { - err_pipe.complete(status); - data(false, err_pipe.dst, err_pipe.done); - err_pipe.read()?; - } - } - } - - Ok(()) - } - } - - impl<'a> Pipe<'a> { - unsafe fn new(p: P, dst: &'a mut Vec) -> Pipe<'a> { - Pipe { - dst, - pipe: NamedPipe::from_raw_handle(p.into_raw_handle()), - overlapped: Overlapped::zero(), - done: false, - } - } - - unsafe fn read(&mut self) -> io::Result<()> { - let dst = slice_to_end(self.dst); - match self.pipe.read_overlapped(dst, self.overlapped.raw()) { - Ok(_) => Ok(()), - Err(e) => { - if e.raw_os_error() == Some(ERROR_BROKEN_PIPE as i32) { - self.done = true; - Ok(()) - } else { - Err(e) - } - } - } - } - - unsafe fn complete(&mut self, status: &CompletionStatus) { - let prev = self.dst.len(); - self.dst.set_len(prev + status.bytes_transferred() as usize); - if status.bytes_transferred() == 0 { - self.done = true; - } - } - } - - unsafe fn slice_to_end(v: &mut Vec) -> &mut [u8] { - if v.capacity() == 0 { - v.reserve(16); - } - if v.capacity() == v.len() { - v.reserve(1); - } - slice::from_raw_parts_mut(v.as_mut_ptr().add(v.len()), v.capacity() - v.len()) - } -} diff -Nru cargo-0.52.0/src/cargo/util/rustc.rs cargo-0.54.0/src/cargo/util/rustc.rs --- cargo-0.52.0/src/cargo/util/rustc.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/rustc.rs 2021-04-27 14:35:53.000000000 +0000 @@ -4,12 +4,13 @@ use std::path::{Path, PathBuf}; use std::sync::Mutex; +use anyhow::Context as _; +use cargo_util::{paths, ProcessBuilder, ProcessError}; use log::{debug, info, warn}; use serde::{Deserialize, Serialize}; use crate::util::interning::InternedString; -use crate::util::paths; -use crate::util::{self, profile, CargoResult, CargoResultExt, ProcessBuilder, StableHasher}; +use crate::util::{profile, CargoResult, StableHasher}; /// Information on the `rustc` executable #[derive(Debug)] @@ -45,11 +46,17 @@ ) -> CargoResult { let _p = profile::start("Rustc::new"); - let mut cache = Cache::load(&path, rustup_rustc, cache_location); + let mut cache = Cache::load( + wrapper.as_deref(), + workspace_wrapper.as_deref(), + &path, + rustup_rustc, + cache_location, + ); - let mut cmd = util::process(&path); + let mut cmd = ProcessBuilder::new(&path); cmd.arg("-vV"); - let verbose_version = cache.cached_output(&cmd)?.0; + let verbose_version = cache.cached_output(&cmd, 0)?.0; let extract = |field: &str| -> CargoResult<&str> { verbose_version @@ -66,7 +73,7 @@ }; let host = InternedString::new(extract("host: ")?); - let version = semver::Version::parse(extract("release: ")?).chain_err(|| { + let version = semver::Version::parse(extract("release: ")?).with_context(|| { format!( "rustc version does not appear to be a valid semver version, from:\n{}", verbose_version @@ -86,22 +93,39 @@ /// Gets a process builder set up to use the found rustc version, with a wrapper if `Some`. pub fn process(&self) -> ProcessBuilder { - util::process(self.path.as_path()).wrapped(self.wrapper.as_ref()) + ProcessBuilder::new(self.path.as_path()).wrapped(self.wrapper.as_ref()) } /// Gets a process builder set up to use the found rustc version, with a wrapper if `Some`. pub fn workspace_process(&self) -> ProcessBuilder { - util::process(self.path.as_path()) + ProcessBuilder::new(self.path.as_path()) .wrapped(self.workspace_wrapper.as_ref()) .wrapped(self.wrapper.as_ref()) } pub fn process_no_wrapper(&self) -> ProcessBuilder { - util::process(&self.path) + ProcessBuilder::new(&self.path) } - pub fn cached_output(&self, cmd: &ProcessBuilder) -> CargoResult<(String, String)> { - self.cache.lock().unwrap().cached_output(cmd) + /// Gets the output for the given command. + /// + /// This will return the cached value if available, otherwise it will run + /// the command and cache the output. + /// + /// `extra_fingerprint` is extra data to include in the cache fingerprint. + /// Use this if there is other information about the environment that may + /// affect the output that is not part of `cmd`. + /// + /// Returns a tuple of strings `(stdout, stderr)`. + pub fn cached_output( + &self, + cmd: &ProcessBuilder, + extra_fingerprint: u64, + ) -> CargoResult<(String, String)> { + self.cache + .lock() + .unwrap() + .cached_output(cmd, extra_fingerprint) } } @@ -137,8 +161,17 @@ } impl Cache { - fn load(rustc: &Path, rustup_rustc: &Path, cache_location: Option) -> Cache { - match (cache_location, rustc_fingerprint(rustc, rustup_rustc)) { + fn load( + wrapper: Option<&Path>, + workspace_wrapper: Option<&Path>, + rustc: &Path, + rustup_rustc: &Path, + cache_location: Option, + ) -> Cache { + match ( + cache_location, + rustc_fingerprint(wrapper, workspace_wrapper, rustc, rustup_rustc), + ) { (Some(cache_location), Ok(rustc_fingerprint)) => { let empty = CacheData { rustc_fingerprint, @@ -187,8 +220,12 @@ } } - fn cached_output(&mut self, cmd: &ProcessBuilder) -> CargoResult<(String, String)> { - let key = process_fingerprint(cmd); + fn cached_output( + &mut self, + cmd: &ProcessBuilder, + extra_fingerprint: u64, + ) -> CargoResult<(String, String)> { + let key = process_fingerprint(cmd, extra_fingerprint); if self.data.outputs.contains_key(&key) { debug!("rustc info cache hit"); } else { @@ -197,13 +234,13 @@ let output = cmd .build_command() .output() - .chain_err(|| format!("could not execute process {} (never executed)", cmd))?; + .with_context(|| format!("could not execute process {} (never executed)", cmd))?; let stdout = String::from_utf8(output.stdout) .map_err(|e| anyhow::anyhow!("{}: {:?}", e, e.as_bytes())) - .chain_err(|| anyhow::anyhow!("`{}` didn't return utf8 output", cmd))?; + .with_context(|| format!("`{}` didn't return utf8 output", cmd))?; let stderr = String::from_utf8(output.stderr) .map_err(|e| anyhow::anyhow!("{}: {:?}", e, e.as_bytes())) - .chain_err(|| anyhow::anyhow!("`{}` didn't return utf8 output", cmd))?; + .with_context(|| format!("`{}` didn't return utf8 output", cmd))?; self.data.outputs.insert( key, Output { @@ -211,7 +248,7 @@ status: if output.status.success() { String::new() } else { - util::exit_status_to_string(output.status) + cargo_util::exit_status_to_string(output.status) }, code: output.status.code(), stdout, @@ -224,7 +261,7 @@ if output.success { Ok((output.stdout.clone(), output.stderr.clone())) } else { - Err(util::process_error_raw( + Err(ProcessError::new_raw( &format!("process didn't exit successfully: {}", cmd), output.code, &output.status, @@ -251,13 +288,29 @@ } } -fn rustc_fingerprint(path: &Path, rustup_rustc: &Path) -> CargoResult { +fn rustc_fingerprint( + wrapper: Option<&Path>, + workspace_wrapper: Option<&Path>, + rustc: &Path, + rustup_rustc: &Path, +) -> CargoResult { let mut hasher = StableHasher::new(); - let path = paths::resolve_executable(path)?; - path.hash(&mut hasher); - - paths::mtime(&path)?.hash(&mut hasher); + let hash_exe = |hasher: &mut _, path| -> CargoResult<()> { + let path = paths::resolve_executable(path)?; + path.hash(hasher); + + paths::mtime(&path)?.hash(hasher); + Ok(()) + }; + + hash_exe(&mut hasher, rustc)?; + if let Some(wrapper) = wrapper { + hash_exe(&mut hasher, wrapper)?; + } + if let Some(workspace_wrapper) = workspace_wrapper { + hash_exe(&mut hasher, workspace_wrapper)?; + } // Rustup can change the effective compiler without touching // the `rustc` binary, so we try to account for this here. @@ -270,7 +323,7 @@ // // If we don't see rustup env vars, but it looks like the compiler // is managed by rustup, we conservatively bail out. - let maybe_rustup = rustup_rustc == path; + let maybe_rustup = rustup_rustc == rustc; match ( maybe_rustup, env::var("RUSTUP_HOME"), @@ -295,8 +348,9 @@ Ok(hasher.finish()) } -fn process_fingerprint(cmd: &ProcessBuilder) -> u64 { +fn process_fingerprint(cmd: &ProcessBuilder, extra_fingerprint: u64) -> u64 { let mut hasher = StableHasher::new(); + extra_fingerprint.hash(&mut hasher); cmd.get_args().hash(&mut hasher); let mut env = cmd.get_envs().iter().collect::>(); env.sort_unstable(); diff -Nru cargo-0.52.0/src/cargo/util/sha256.rs cargo-0.54.0/src/cargo/util/sha256.rs --- cargo-0.52.0/src/cargo/util/sha256.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/sha256.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,55 +0,0 @@ -use crate::util::{paths, CargoResult, CargoResultExt}; -use crypto_hash::{Algorithm, Hasher}; -use std::fs::File; -use std::io::{self, Read, Write}; -use std::path::Path; - -pub struct Sha256(Hasher); - -impl Sha256 { - pub fn new() -> Sha256 { - let hasher = Hasher::new(Algorithm::SHA256); - Sha256(hasher) - } - - pub fn update(&mut self, bytes: &[u8]) -> &mut Sha256 { - let _ = self.0.write_all(bytes); - self - } - - pub fn update_file(&mut self, mut file: &File) -> io::Result<&mut Sha256> { - let mut buf = [0; 64 * 1024]; - loop { - let n = file.read(&mut buf)?; - if n == 0 { - break Ok(self); - } - self.update(&buf[..n]); - } - } - - pub fn update_path>(&mut self, path: P) -> CargoResult<&mut Sha256> { - let path = path.as_ref(); - let file = paths::open(path)?; - self.update_file(&file) - .chain_err(|| format!("failed to read `{}`", path.display()))?; - Ok(self) - } - - pub fn finish(&mut self) -> [u8; 32] { - let mut ret = [0u8; 32]; - let data = self.0.finish(); - ret.copy_from_slice(&data[..]); - ret - } - - pub fn finish_hex(&mut self) -> String { - hex::encode(self.finish()) - } -} - -impl Default for Sha256 { - fn default() -> Self { - Self::new() - } -} diff -Nru cargo-0.52.0/src/cargo/util/toml/mod.rs cargo-0.54.0/src/cargo/util/toml/mod.rs --- cargo-0.52.0/src/cargo/util/toml/mod.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/toml/mod.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,11 +1,13 @@ use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::fmt; +use std::marker::PhantomData; use std::path::{Path, PathBuf}; use std::rc::Rc; use std::str; -use anyhow::{anyhow, bail}; +use anyhow::{anyhow, bail, Context as _}; use cargo_platform::Platform; +use cargo_util::paths; use log::{debug, trace}; use semver::{self, VersionReq}; use serde::de; @@ -13,18 +15,17 @@ use serde::{Deserialize, Serialize}; use url::Url; +use crate::core::compiler::{CompileKind, CompileTarget}; use crate::core::dependency::DepKind; use crate::core::manifest::{ManifestMetadata, TargetSourcePath, Warnings}; -use crate::core::nightly_features_allowed; -use crate::core::profiles::Strip; use crate::core::resolver::ResolveBehavior; use crate::core::{Dependency, Manifest, PackageId, Summary, Target}; use crate::core::{Edition, EitherManifest, Feature, Features, VirtualManifest, Workspace}; use crate::core::{GitReference, PackageIdSpec, SourceId, WorkspaceConfig, WorkspaceRootConfig}; use crate::sources::{CRATES_IO_INDEX, CRATES_IO_REGISTRY}; -use crate::util::errors::{CargoResult, CargoResultExt, ManifestError}; +use crate::util::errors::{CargoResult, ManifestError}; use crate::util::interning::InternedString; -use crate::util::{self, paths, validate_package_name, Config, IntoUrl}; +use crate::util::{self, config::ConfigRelativePath, validate_package_name, Config, IntoUrl}; mod targets; use self::targets::targets; @@ -50,7 +51,7 @@ let contents = paths::read(path).map_err(|err| ManifestError::new(err, path.into()))?; do_read_manifest(&contents, path, source_id, config) - .chain_err(|| format!("failed to parse manifest at `{}`", path.display())) + .with_context(|| format!("failed to parse manifest at `{}`", path.display())) .map_err(|err| ManifestError::new(err, path.into())) } @@ -201,25 +202,25 @@ #[derive(Clone, Debug, Serialize)] #[serde(untagged)] -pub enum TomlDependency { +pub enum TomlDependency

{ /// In the simple format, only a version is specified, eg. /// `package = ""` Simple(String), /// The simple format is equivalent to a detailed dependency /// specifying only a version, eg. /// `package = { version = "" }` - Detailed(DetailedTomlDependency), + Detailed(DetailedTomlDependency

), } -impl<'de> de::Deserialize<'de> for TomlDependency { +impl<'de, P: Deserialize<'de>> de::Deserialize<'de> for TomlDependency

{ fn deserialize(deserializer: D) -> Result where D: de::Deserializer<'de>, { - struct TomlDependencyVisitor; + struct TomlDependencyVisitor

(PhantomData

); - impl<'de> de::Visitor<'de> for TomlDependencyVisitor { - type Value = TomlDependency; + impl<'de, P: Deserialize<'de>> de::Visitor<'de> for TomlDependencyVisitor

{ + type Value = TomlDependency

; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str( @@ -244,13 +245,29 @@ } } - deserializer.deserialize_any(TomlDependencyVisitor) + deserializer.deserialize_any(TomlDependencyVisitor(PhantomData)) } } -#[derive(Deserialize, Serialize, Clone, Debug, Default)] +pub trait ResolveToPath { + fn resolve(&self, config: &Config) -> PathBuf; +} + +impl ResolveToPath for String { + fn resolve(&self, _: &Config) -> PathBuf { + self.into() + } +} + +impl ResolveToPath for ConfigRelativePath { + fn resolve(&self, c: &Config) -> PathBuf { + self.resolve_path(c) + } +} + +#[derive(Deserialize, Serialize, Clone, Debug)] #[serde(rename_all = "kebab-case")] -pub struct DetailedTomlDependency { +pub struct DetailedTomlDependency

{ version: Option, registry: Option, /// The URL of the `registry` field. @@ -260,7 +277,9 @@ /// registry names configured, so Cargo can't rely on just the name for /// crates published by other users. registry_index: Option, - path: Option, + // `path` is relative to the file it appears in. If that's a `Cargo.toml`, it'll be relative to + // that TOML file, and if it's a `.cargo/config` file, it'll be relative to that file. + path: Option

, git: Option, branch: Option, tag: Option, @@ -274,6 +293,28 @@ public: Option, } +// Explicit implementation so we avoid pulling in P: Default +impl

Default for DetailedTomlDependency

{ + fn default() -> Self { + Self { + version: Default::default(), + registry: Default::default(), + registry_index: Default::default(), + path: Default::default(), + git: Default::default(), + branch: Default::default(), + tag: Default::default(), + rev: Default::default(), + features: Default::default(), + optional: Default::default(), + default_features: Default::default(), + default_features2: Default::default(), + package: Default::default(), + public: Default::default(), + } + } +} + /// This type is used to deserialize `Cargo.toml` files. #[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "kebab-case")] @@ -378,53 +419,13 @@ } } -#[derive(Clone, Debug, Serialize, Eq, PartialEq)] -#[serde(untagged)] +#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] +#[serde(untagged, expecting = "expected a boolean or an integer")] pub enum U32OrBool { U32(u32), Bool(bool), } -impl<'de> de::Deserialize<'de> for U32OrBool { - fn deserialize(deserializer: D) -> Result - where - D: de::Deserializer<'de>, - { - struct Visitor; - - impl<'de> de::Visitor<'de> for Visitor { - type Value = U32OrBool; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("a boolean or an integer") - } - - fn visit_bool(self, b: bool) -> Result - where - E: de::Error, - { - Ok(U32OrBool::Bool(b)) - } - - fn visit_i64(self, u: i64) -> Result - where - E: de::Error, - { - Ok(U32OrBool::U32(u as u32)) - } - - fn visit_u64(self, u: u64) -> Result - where - E: de::Error, - { - Ok(U32OrBool::U32(u as u32)) - } - } - - deserializer.deserialize_any(Visitor) - } -} - #[derive(Deserialize, Serialize, Clone, Debug, Default, Eq, PartialEq)] #[serde(default, rename_all = "kebab-case")] pub struct TomlProfile { @@ -442,7 +443,7 @@ pub build_override: Option>, pub dir_name: Option, pub inherits: Option, - pub strip: Option, + pub strip: Option, } #[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash)] @@ -686,8 +687,8 @@ self.dir_name = Some(*v); } - if let Some(v) = profile.strip { - self.strip = Some(v); + if let Some(v) = &profile.strip { + self.strip = Some(v.clone()); } } } @@ -729,46 +730,13 @@ } } -#[derive(Clone, Debug, Serialize, Eq, PartialEq)] -#[serde(untagged)] +#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] +#[serde(untagged, expecting = "expected a boolean or a string")] pub enum StringOrBool { String(String), Bool(bool), } -impl<'de> de::Deserialize<'de> for StringOrBool { - fn deserialize(deserializer: D) -> Result - where - D: de::Deserializer<'de>, - { - struct Visitor; - - impl<'de> de::Visitor<'de> for Visitor { - type Value = StringOrBool; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("a boolean or a string") - } - - fn visit_bool(self, b: bool) -> Result - where - E: de::Error, - { - Ok(StringOrBool::Bool(b)) - } - - fn visit_str(self, s: &str) -> Result - where - E: de::Error, - { - Ok(StringOrBool::String(s.to_string())) - } - } - - deserializer.deserialize_any(Visitor) - } -} - #[derive(PartialEq, Clone, Debug, Serialize)] #[serde(untagged)] pub enum VecStringOrBool { @@ -826,6 +794,10 @@ authors: Option>, build: Option, metabuild: Option, + #[serde(rename = "default-target")] + default_target: Option, + #[serde(rename = "forced-target")] + forced_target: Option, links: Option, exclude: Option>, include: Option>, @@ -848,8 +820,11 @@ license: Option, license_file: Option, repository: Option, - metadata: Option, resolver: Option, + + // Note that this field must come last due to the way toml serialization + // works which requires tables to be emitted after all values. + metadata: Option, } #[derive(Debug, Deserialize, Serialize)] @@ -858,8 +833,11 @@ #[serde(rename = "default-members")] default_members: Option>, exclude: Option>, - metadata: Option, resolver: Option, + + // Note that this field must come last due to the way toml serialization + // works which requires tables to be emitted after all values. + metadata: Option, } impl TomlProject { @@ -1038,7 +1016,7 @@ // Parse features first so they will be available when parsing other parts of the TOML. let empty = Vec::new(); let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty); - let features = Features::new(cargo_features, &mut warnings)?; + let features = Features::new(cargo_features, config, &mut warnings)?; let project = me.project.as_ref().or_else(|| me.package.as_ref()); let project = project.ok_or_else(|| anyhow!("no `package` section found"))?; @@ -1055,20 +1033,29 @@ let edition = if let Some(ref edition) = project.edition { features .require(Feature::edition()) - .chain_err(|| "editions are unstable")?; + .with_context(|| "editions are unstable")?; edition .parse() - .chain_err(|| "failed to parse the `edition` key")? + .with_context(|| "failed to parse the `edition` key")? } else { Edition::Edition2015 }; + if edition == Edition::Edition2021 { + features.require(Feature::edition2021())?; + } else if !edition.is_stable() { + // Guard in case someone forgets to add .require() + return Err(util::errors::internal(format!( + "edition {} should be gated", + edition + ))); + } let rust_version = if let Some(rust_version) = &project.rust_version { if features.require(Feature::rust_version()).is_err() { let mut msg = "`rust-version` is not supported on this version of Cargo and will be ignored" .to_string(); - if nightly_features_allowed() { + if config.nightly_features_allowed { msg.push_str( "\n\n\ consider adding `cargo-features = [\"rust-version\"]` to the manifest", @@ -1331,9 +1318,24 @@ } } + let default_kind = project + .default_target + .as_ref() + .map(|t| CompileTarget::new(&*t)) + .transpose()? + .map(CompileKind::Target); + let forced_kind = project + .forced_target + .as_ref() + .map(|t| CompileTarget::new(&*t)) + .transpose()? + .map(CompileKind::Target); + let custom_metadata = project.metadata.clone(); let mut manifest = Manifest::new( summary, + default_kind, + forced_kind, targets, exclude, include, @@ -1424,7 +1426,7 @@ let mut deps = Vec::new(); let empty = Vec::new(); let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty); - let features = Features::new(cargo_features, &mut warnings)?; + let features = Features::new(cargo_features, config, &mut warnings)?; let (replace, patch) = { let mut cx = Context { @@ -1488,7 +1490,7 @@ } let mut replace = Vec::new(); for (spec, replacement) in self.replace.iter().flatten() { - let mut spec = PackageIdSpec::parse(spec).chain_err(|| { + let mut spec = PackageIdSpec::parse(spec).with_context(|| { format!( "replacements must specify a valid semver \ version to replace, but `{}` does not", @@ -1532,7 +1534,7 @@ .config .get_registry_index(url) .or_else(|_| url.into_url()) - .chain_err(|| { + .with_context(|| { format!("[patch] entry `{}` should be a URL or registry name", url) })?, }; @@ -1620,7 +1622,37 @@ Ok(()) } -impl TomlDependency { +impl TomlDependency

{ + pub(crate) fn to_dependency_split( + &self, + name: &str, + pkgid: Option, + source_id: SourceId, + nested_paths: &mut Vec, + config: &Config, + warnings: &mut Vec, + platform: Option, + root: &Path, + features: &Features, + kind: Option, + ) -> CargoResult { + self.to_dependency( + name, + &mut Context { + pkgid, + deps: &mut Vec::new(), + source_id, + nested_paths, + config, + warnings, + platform, + root, + features, + }, + kind, + ) + } + fn to_dependency( &self, name: &str, @@ -1628,7 +1660,7 @@ kind: Option, ) -> CargoResult { match *self { - TomlDependency::Simple(ref version) => DetailedTomlDependency { + TomlDependency::Simple(ref version) => DetailedTomlDependency::

{ version: Some(version.clone()), ..Default::default() } @@ -1645,7 +1677,7 @@ } } -impl DetailedTomlDependency { +impl DetailedTomlDependency

{ fn to_dependency( &self, name_in_toml: &str, @@ -1693,6 +1725,35 @@ } } + // Early detection of potentially misused feature syntax + // instead of generating a "feature not found" error. + if let Some(features) = &self.features { + for feature in features { + if feature.contains('/') { + bail!( + "feature `{}` in dependency `{}` is not allowed to contain slashes\n\ + If you want to enable features of a transitive dependency, \ + the direct dependency needs to re-export those features from \ + the `[features]` table.", + feature, + name_in_toml + ); + } + if feature.starts_with("dep:") { + bail!( + "feature `{}` in dependency `{}` is not allowed to use explicit \ + `dep:` syntax\n\ + If you want to enable an optional dependency, specify the name \ + of the optional dependency without the `dep:` prefix, or specify \ + a feature from the dependency's `[features]` table that enables \ + the optional dependency.", + feature, + name_in_toml + ); + } + } + } + let new_source_id = match ( self.git.as_ref(), self.path.as_ref(), @@ -1755,7 +1816,8 @@ SourceId::for_git(&loc, reference)? } (None, Some(path), _, _) => { - cx.nested_paths.push(PathBuf::from(path)); + let path = path.resolve(cx.config); + cx.nested_paths.push(path.clone()); // If the source ID for the package we're parsing is a path // source, then we normalize the path here to get rid of // components like `..`. @@ -1766,7 +1828,7 @@ // built from. if cx.source_id.is_path() { let path = cx.root.join(path); - let path = util::normalize_path(&path); + let path = paths::normalize_path(&path); SourceId::for_path(&path)? } else { cx.source_id diff -Nru cargo-0.52.0/src/cargo/util/toml/targets.rs cargo-0.54.0/src/cargo/util/toml/targets.rs --- cargo-0.52.0/src/cargo/util/toml/targets.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/toml/targets.rs 2021-04-27 14:35:53.000000000 +0000 @@ -20,9 +20,11 @@ }; use crate::core::compiler::CrateType; use crate::core::{Edition, Feature, Features, Target}; -use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::errors::CargoResult; use crate::util::restricted_names; +use anyhow::Context as _; + pub fn targets( features: &Features, manifest: &TomlManifest, @@ -787,11 +789,11 @@ if let Some(edition) = toml.edition.clone() { features .require(Feature::edition()) - .chain_err(|| "editions are unstable")?; + .with_context(|| "editions are unstable")?; target.set_edition( edition .parse() - .chain_err(|| "failed to parse the `edition` key")?, + .with_context(|| "failed to parse the `edition` key")?, ); } Ok(()) diff -Nru cargo-0.52.0/src/cargo/util/vcs.rs cargo-0.54.0/src/cargo/util/vcs.rs --- cargo-0.52.0/src/cargo/util/vcs.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/vcs.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,5 +1,6 @@ -use crate::util::paths; -use crate::util::{process, CargoResult}; +use crate::util::CargoResult; +use cargo_util::paths; +use cargo_util::ProcessBuilder; use std::path::Path; // Check if we are in an existing repo. We define that to be true if either: @@ -41,11 +42,15 @@ impl HgRepo { pub fn init(path: &Path, cwd: &Path) -> CargoResult { - process("hg").cwd(cwd).arg("init").arg(path).exec()?; + ProcessBuilder::new("hg") + .cwd(cwd) + .arg("init") + .arg(path) + .exec()?; Ok(HgRepo) } pub fn discover(path: &Path, cwd: &Path) -> CargoResult { - process("hg") + ProcessBuilder::new("hg") .cwd(cwd) .arg("--cwd") .arg(path) @@ -57,7 +62,11 @@ impl PijulRepo { pub fn init(path: &Path, cwd: &Path) -> CargoResult { - process("pijul").cwd(cwd).arg("init").arg(path).exec()?; + ProcessBuilder::new("pijul") + .cwd(cwd) + .arg("init") + .arg(path) + .exec()?; Ok(PijulRepo) } } @@ -73,28 +82,28 @@ db_path.push(db_fname); // then create the fossil DB in that location - process("fossil") + ProcessBuilder::new("fossil") .cwd(cwd) .arg("init") .arg(&db_path) .exec()?; // open it in that new directory - process("fossil") + ProcessBuilder::new("fossil") .cwd(&path) .arg("open") .arg(db_fname) .exec()?; // set `target` as ignoreable and cleanable - process("fossil") + ProcessBuilder::new("fossil") .cwd(cwd) .arg("settings") .arg("ignore-glob") .arg("target") .exec()?; - process("fossil") + ProcessBuilder::new("fossil") .cwd(cwd) .arg("settings") .arg("clean-glob") diff -Nru cargo-0.52.0/src/cargo/util/workspace.rs cargo-0.54.0/src/cargo/util/workspace.rs --- cargo-0.52.0/src/cargo/util/workspace.rs 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/cargo/util/workspace.rs 2021-04-27 14:35:53.000000000 +0000 @@ -1,8 +1,12 @@ +use crate::core::compiler::Unit; +use crate::core::manifest::TargetSourcePath; use crate::core::{Target, Workspace}; use crate::ops::CompileOptions; use crate::util::CargoResult; use anyhow::bail; +use cargo_util::ProcessBuilder; use std::fmt::Write; +use std::path::PathBuf; fn get_available_targets<'a>( filter_fn: fn(&Target) -> bool, @@ -89,3 +93,38 @@ pub fn print_available_tests(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> { print_available_targets(Target::is_test, ws, options, "--test", "tests") } + +/// The path that we pass to rustc is actually fairly important because it will +/// show up in error messages (important for readability), debug information +/// (important for caching), etc. As a result we need to be pretty careful how we +/// actually invoke rustc. +/// +/// In general users don't expect `cargo build` to cause rebuilds if you change +/// directories. That could be if you just change directories in the package or +/// if you literally move the whole package wholesale to a new directory. As a +/// result we mostly don't factor in `cwd` to this calculation. Instead we try to +/// track the workspace as much as possible and we update the current directory +/// of rustc/rustdoc where appropriate. +/// +/// The first returned value here is the argument to pass to rustc, and the +/// second is the cwd that rustc should operate in. +pub fn path_args(ws: &Workspace<'_>, unit: &Unit) -> (PathBuf, PathBuf) { + let ws_root = ws.root(); + let src = match unit.target.src_path() { + TargetSourcePath::Path(path) => path.to_path_buf(), + TargetSourcePath::Metabuild => unit.pkg.manifest().metabuild_path(ws.target_dir()), + }; + assert!(src.is_absolute()); + if unit.pkg.package_id().source_id().is_path() { + if let Ok(path) = src.strip_prefix(ws_root) { + return (path.to_path_buf(), ws_root.to_path_buf()); + } + } + (src, unit.pkg.root().to_path_buf()) +} + +pub fn add_path_args(ws: &Workspace<'_>, unit: &Unit, cmd: &mut ProcessBuilder) { + let (arg, cwd) = path_args(ws, unit); + cmd.arg(arg); + cmd.cwd(cwd); +} diff -Nru cargo-0.52.0/src/doc/contrib/src/index.md cargo-0.54.0/src/doc/contrib/src/index.md --- cargo-0.52.0/src/doc/contrib/src/index.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/contrib/src/index.md 2021-04-27 14:35:53.000000000 +0000 @@ -8,7 +8,7 @@ issues], [improving the documentation], [fixing bugs], and working on [small] and [large features]. -If you have a general question about Cargo or it's internals, feel free to ask +If you have a general question about Cargo or its internals, feel free to ask on [Zulip]. This guide assumes you have some familiarity with Rust, and how to use Cargo, diff -Nru cargo-0.52.0/src/doc/man/cargo-build.md cargo-0.54.0/src/doc/man/cargo-build.md --- cargo-0.52.0/src/doc/man/cargo-build.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/cargo-build.md 2021-04-27 14:35:53.000000000 +0000 @@ -48,7 +48,7 @@ This option is unstable and available only on the [nightly channel](https://doc.rust-lang.org/book/appendix-07-nightly-rust.html) and requires the `-Z unstable-options` flag to enable. -See https://github.com/rust-lang/cargo/issues/6790 for more information. +See for more information. {{/option}} {{/options}} diff -Nru cargo-0.52.0/src/doc/man/cargo-fix.md cargo-0.54.0/src/doc/man/cargo-fix.md --- cargo-0.52.0/src/doc/man/cargo-fix.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/cargo-fix.md 2021-04-27 14:35:53.000000000 +0000 @@ -14,17 +14,13 @@ This Cargo subcommand will automatically take rustc's suggestions from diagnostics like warnings and apply them to your source code. This is intended to help automate tasks that rustc itself already knows how to tell you to fix! -The `cargo fix` subcommand is also being developed for the Rust 2018 edition -to provide code the ability to easily opt-in to the new edition without having -to worry about any breakage. Executing `cargo fix` will under the hood execute {{man "cargo-check" 1}}. Any warnings applicable to your crate will be automatically fixed (if possible) and all remaining warnings will be displayed when the check process is finished. For -example if you'd like to prepare for the 2018 edition, you can do so by -executing: +example if you'd like to apply all fixes to the current package, you can run: - cargo fix --edition + cargo fix which behaves the same as `cargo check --all-targets`. @@ -32,16 +28,40 @@ `cargo check`. If code is conditionally enabled with optional features, you will need to enable those features for that code to be analyzed: - cargo fix --edition --features foo + cargo fix --features foo Similarly, other `cfg` expressions like platform-specific code will need to pass `--target` to fix code for the given target. - cargo fix --edition --target x86_64-pc-windows-gnu + cargo fix --target x86_64-pc-windows-gnu If you encounter any problems with `cargo fix` or otherwise have any questions or feature requests please don't hesitate to file an issue at - +. + +### Edition migration + +The `cargo fix` subcommand can also be used to migrate a package from one +[edition] to the next. The general procedure is: + +1. Run `cargo fix --edition`. Consider also using the `--all-features` flag if + your project has multiple features. You may also want to run `cargo fix + --edition` multiple times with different `--target` flags if your project + has platform-specific code gated by `cfg` attributes. +2. Modify `Cargo.toml` to set the [edition field] to the new edition. +3. Run your project tests to verify that everything still works. If new + warnings are issued, you may want to consider running `cargo fix` again + (without the `--edition` flag) to apply any suggestions given by the + compiler. + +And hopefully that's it! Just keep in mind of the caveats mentioned above that +`cargo fix` cannot update code for inactive features or `cfg` expressions. +Also, in some rare cases the compiler is unable to automatically migrate all +code to the new edition, and this may require manual changes after building +with the new edition. + +[edition]: https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html +[edition field]: ../reference/manifest.html#the-edition-field ## OPTIONS @@ -56,9 +76,9 @@ {{/option}} {{#option "`--edition`" }} -Apply changes that will update the code to the latest edition. This will not +Apply changes that will update the code to the next edition. This will not update the edition in the `Cargo.toml` manifest, which must be updated -manually. +manually after `cargo fix --edition` has finished. {{/option}} {{#option "`--edition-idioms`" }} @@ -146,7 +166,7 @@ cargo fix -2. Convert a 2015 edition to 2018: +2. Update a package to prepare it for the next edition: cargo fix --edition diff -Nru cargo-0.52.0/src/doc/man/cargo-init.md cargo-0.54.0/src/doc/man/cargo-init.md --- cargo-0.52.0/src/doc/man/cargo-init.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/cargo-init.md 2021-04-27 14:35:53.000000000 +0000 @@ -20,8 +20,6 @@ If the directory is not already in a VCS repository, then a new repository is created (see `--vcs` below). -{{> description-new-authors }} - See {{man "cargo-new" 1}} for a similar command which will create a new package in a new directory. diff -Nru cargo-0.52.0/src/doc/man/cargo-install.md cargo-0.54.0/src/doc/man/cargo-install.md --- cargo-0.52.0/src/doc/man/cargo-install.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/cargo-install.md 2021-04-27 14:35:53.000000000 +0000 @@ -1,5 +1,6 @@ # cargo-install(1) {{*set actionverb="Install"}} +{{*set temp-target-dir=true}} ## NAME diff -Nru cargo-0.52.0/src/doc/man/cargo-new.md cargo-0.54.0/src/doc/man/cargo-new.md --- cargo-0.52.0/src/doc/man/cargo-new.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/cargo-new.md 2021-04-27 14:35:53.000000000 +0000 @@ -15,8 +15,6 @@ and a VCS ignore file. If the directory is not already in a VCS repository, then a new repository is created (see `--vcs` below). -{{> description-new-authors }} - See {{man "cargo-init" 1}} for a similar command which will create a new manifest in an existing directory. diff -Nru cargo-0.52.0/src/doc/man/cargo-pkgid.md cargo-0.54.0/src/doc/man/cargo-pkgid.md --- cargo-0.52.0/src/doc/man/cargo-pkgid.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/cargo-pkgid.md 2021-04-27 14:35:53.000000000 +0000 @@ -81,5 +81,9 @@ cargo pkgid https://github.com/rust-lang/crates.io-index#foo +4. Retrieve package specification for `foo` from a local package: + + cargo pkgid file:///path/to/local/package#foo + ## SEE ALSO {{man "cargo" 1}}, {{man "cargo-generate-lockfile" 1}}, {{man "cargo-metadata" 1}} diff -Nru cargo-0.52.0/src/doc/man/generated_txt/cargo-bench.txt cargo-0.54.0/src/doc/man/generated_txt/cargo-bench.txt --- cargo-0.52.0/src/doc/man/generated_txt/cargo-bench.txt 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/generated_txt/cargo-bench.txt 2021-04-27 14:35:53.000000000 +0000 @@ -241,24 +241,28 @@ times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. + Conflicts with short and json. - o short: Emit shorter, human-readable text messages. + o short: Emit shorter, human-readable text messages. Conflicts with + human and json. o json: Emit JSON messages to stdout. See the reference - for more details. + for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. + contains the "short" rendering from rustc. Cannot be used with + human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting - rustc's default color scheme. + rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. + Cannot be used with human or short. Manifest Options --manifest-path path diff -Nru cargo-0.52.0/src/doc/man/generated_txt/cargo-build.txt cargo-0.54.0/src/doc/man/generated_txt/cargo-build.txt --- cargo-0.52.0/src/doc/man/generated_txt/cargo-build.txt 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/generated_txt/cargo-build.txt 2021-04-27 14:35:53.000000000 +0000 @@ -160,7 +160,8 @@ This option is unstable and available only on the nightly channel and requires the -Z unstable-options flag to enable. See - https://github.com/rust-lang/cargo/issues/6790 for more information. + for more + information. Display Options -v, --verbose @@ -190,24 +191,28 @@ times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. + Conflicts with short and json. - o short: Emit shorter, human-readable text messages. + o short: Emit shorter, human-readable text messages. Conflicts with + human and json. o json: Emit JSON messages to stdout. See the reference - for more details. + for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. + contains the "short" rendering from rustc. Cannot be used with + human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting - rustc's default color scheme. + rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. + Cannot be used with human or short. --build-plan Outputs a series of JSON messages to stdout that indicate the diff -Nru cargo-0.52.0/src/doc/man/generated_txt/cargo-check.txt cargo-0.54.0/src/doc/man/generated_txt/cargo-check.txt --- cargo-0.52.0/src/doc/man/generated_txt/cargo-check.txt 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/generated_txt/cargo-check.txt 2021-04-27 14:35:53.000000000 +0000 @@ -194,24 +194,28 @@ times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. + Conflicts with short and json. - o short: Emit shorter, human-readable text messages. + o short: Emit shorter, human-readable text messages. Conflicts with + human and json. o json: Emit JSON messages to stdout. See the reference - for more details. + for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. + contains the "short" rendering from rustc. Cannot be used with + human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting - rustc's default color scheme. + rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. + Cannot be used with human or short. Manifest Options --manifest-path path diff -Nru cargo-0.52.0/src/doc/man/generated_txt/cargo-doc.txt cargo-0.54.0/src/doc/man/generated_txt/cargo-doc.txt --- cargo-0.52.0/src/doc/man/generated_txt/cargo-doc.txt 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/generated_txt/cargo-doc.txt 2021-04-27 14:35:53.000000000 +0000 @@ -156,24 +156,28 @@ times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. + Conflicts with short and json. - o short: Emit shorter, human-readable text messages. + o short: Emit shorter, human-readable text messages. Conflicts with + human and json. o json: Emit JSON messages to stdout. See the reference - for more details. + for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. + contains the "short" rendering from rustc. Cannot be used with + human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting - rustc's default color scheme. + rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. + Cannot be used with human or short. Manifest Options --manifest-path path diff -Nru cargo-0.52.0/src/doc/man/generated_txt/cargo-fix.txt cargo-0.54.0/src/doc/man/generated_txt/cargo-fix.txt --- cargo-0.52.0/src/doc/man/generated_txt/cargo-fix.txt 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/generated_txt/cargo-fix.txt 2021-04-27 14:35:53.000000000 +0000 @@ -10,17 +10,15 @@ This Cargo subcommand will automatically take rustc's suggestions from diagnostics like warnings and apply them to your source code. This is intended to help automate tasks that rustc itself already knows how to - tell you to fix! The cargo fix subcommand is also being developed for - the Rust 2018 edition to provide code the ability to easily opt-in to - the new edition without having to worry about any breakage. + tell you to fix! Executing cargo fix will under the hood execute cargo-check(1). Any warnings applicable to your crate will be automatically fixed (if possible) and all remaining warnings will be displayed when the check - process is finished. For example if you'd like to prepare for the 2018 - edition, you can do so by executing: + process is finished. For example if you'd like to apply all fixes to the + current package, you can run: - cargo fix --edition + cargo fix which behaves the same as cargo check --all-targets. @@ -28,16 +26,42 @@ cargo check. If code is conditionally enabled with optional features, you will need to enable those features for that code to be analyzed: - cargo fix --edition --features foo + cargo fix --features foo Similarly, other cfg expressions like platform-specific code will need to pass --target to fix code for the given target. - cargo fix --edition --target x86_64-pc-windows-gnu + cargo fix --target x86_64-pc-windows-gnu If you encounter any problems with cargo fix or otherwise have any questions or feature requests please don't hesitate to file an issue at - + . + + Edition migration + The cargo fix subcommand can also be used to migrate a package from one + edition + + to the next. The general procedure is: + + 1. Run cargo fix --edition. Consider also using the --all-features flag + if your project has multiple features. You may also want to run cargo + fix --edition multiple times with different --target flags if your + project has platform-specific code gated by cfg attributes. + + 2. Modify Cargo.toml to set the edition field + + to the new edition. + + 3. Run your project tests to verify that everything still works. If new + warnings are issued, you may want to consider running cargo fix again + (without the --edition flag) to apply any suggestions given by the + compiler. + + And hopefully that's it! Just keep in mind of the caveats mentioned + above that cargo fix cannot update code for inactive features or cfg + expressions. Also, in some rare cases the compiler is unable to + automatically migrate all code to the new edition, and this may require + manual changes after building with the new edition. OPTIONS Fix options @@ -48,9 +72,9 @@ and manually fix. --edition - Apply changes that will update the code to the latest edition. This + Apply changes that will update the code to the next edition. This will not update the edition in the Cargo.toml manifest, which must - be updated manually. + be updated manually after cargo fix --edition has finished. --edition-idioms Apply suggestions that will update code to the preferred style for @@ -243,24 +267,28 @@ times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. + Conflicts with short and json. - o short: Emit shorter, human-readable text messages. + o short: Emit shorter, human-readable text messages. Conflicts with + human and json. o json: Emit JSON messages to stdout. See the reference - for more details. + for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. + contains the "short" rendering from rustc. Cannot be used with + human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting - rustc's default color scheme. + rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. + Cannot be used with human or short. Manifest Options --manifest-path path @@ -352,7 +380,7 @@ cargo fix - 2. Convert a 2015 edition to 2018: + 2. Update a package to prepare it for the next edition: cargo fix --edition diff -Nru cargo-0.52.0/src/doc/man/generated_txt/cargo-init.txt cargo-0.54.0/src/doc/man/generated_txt/cargo-init.txt --- cargo-0.52.0/src/doc/man/generated_txt/cargo-init.txt 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/generated_txt/cargo-init.txt 2021-04-27 14:35:53.000000000 +0000 @@ -17,44 +17,6 @@ If the directory is not already in a VCS repository, then a new repository is created (see --vcs below). - The "authors" field in the manifest is determined from the environment - or configuration settings. A name is required and is determined from - (first match wins): - - o cargo-new.name Cargo config value - - o CARGO_NAME environment variable - - o GIT_AUTHOR_NAME environment variable - - o GIT_COMMITTER_NAME environment variable - - o user.name git configuration value - - o USER environment variable - - o USERNAME environment variable - - o NAME environment variable - - The email address is optional and is determined from: - - o cargo-new.email Cargo config value - - o CARGO_EMAIL environment variable - - o GIT_AUTHOR_EMAIL environment variable - - o GIT_COMMITTER_EMAIL environment variable - - o user.email git configuration value - - o EMAIL environment variable - - See the reference - for more - information about configuration files. - See cargo-new(1) for a similar command which will create a new package in a new directory. diff -Nru cargo-0.52.0/src/doc/man/generated_txt/cargo-install.txt cargo-0.54.0/src/doc/man/generated_txt/cargo-install.txt --- cargo-0.52.0/src/doc/man/generated_txt/cargo-install.txt 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/generated_txt/cargo-install.txt 2021-04-27 14:35:53.000000000 +0000 @@ -190,7 +190,11 @@ also be specified with the CARGO_TARGET_DIR environment variable, or the build.target-dir config value . Defaults to - target in the root of the workspace. + a new temporary folder located in the temporary directory of the + platform. + + When using --path, by default it will use target directory in the + workspace of the local crate unless --target-dir is specified. --debug Build with the dev profile instead the release profile. diff -Nru cargo-0.52.0/src/doc/man/generated_txt/cargo-new.txt cargo-0.54.0/src/doc/man/generated_txt/cargo-new.txt --- cargo-0.52.0/src/doc/man/generated_txt/cargo-new.txt 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/generated_txt/cargo-new.txt 2021-04-27 14:35:53.000000000 +0000 @@ -12,44 +12,6 @@ source file, and a VCS ignore file. If the directory is not already in a VCS repository, then a new repository is created (see --vcs below). - The "authors" field in the manifest is determined from the environment - or configuration settings. A name is required and is determined from - (first match wins): - - o cargo-new.name Cargo config value - - o CARGO_NAME environment variable - - o GIT_AUTHOR_NAME environment variable - - o GIT_COMMITTER_NAME environment variable - - o user.name git configuration value - - o USER environment variable - - o USERNAME environment variable - - o NAME environment variable - - The email address is optional and is determined from: - - o cargo-new.email Cargo config value - - o CARGO_EMAIL environment variable - - o GIT_AUTHOR_EMAIL environment variable - - o GIT_COMMITTER_EMAIL environment variable - - o user.email git configuration value - - o EMAIL environment variable - - See the reference - for more - information about configuration files. - See cargo-init(1) for a similar command which will create a new manifest in an existing directory. diff -Nru cargo-0.52.0/src/doc/man/generated_txt/cargo-pkgid.txt cargo-0.54.0/src/doc/man/generated_txt/cargo-pkgid.txt --- cargo-0.52.0/src/doc/man/generated_txt/cargo-pkgid.txt 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/generated_txt/cargo-pkgid.txt 2021-04-27 14:35:53.000000000 +0000 @@ -137,6 +137,10 @@ cargo pkgid https://github.com/rust-lang/crates.io-index#foo + 4. Retrieve package specification for foo from a local package: + + cargo pkgid file:///path/to/local/package#foo + SEE ALSO cargo(1), cargo-generate-lockfile(1), cargo-metadata(1) diff -Nru cargo-0.52.0/src/doc/man/generated_txt/cargo-run.txt cargo-0.54.0/src/doc/man/generated_txt/cargo-run.txt --- cargo-0.52.0/src/doc/man/generated_txt/cargo-run.txt 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/generated_txt/cargo-run.txt 2021-04-27 14:35:53.000000000 +0000 @@ -111,24 +111,28 @@ times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. + Conflicts with short and json. - o short: Emit shorter, human-readable text messages. + o short: Emit shorter, human-readable text messages. Conflicts with + human and json. o json: Emit JSON messages to stdout. See the reference - for more details. + for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. + contains the "short" rendering from rustc. Cannot be used with + human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting - rustc's default color scheme. + rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. + Cannot be used with human or short. Manifest Options --manifest-path path diff -Nru cargo-0.52.0/src/doc/man/generated_txt/cargo-rustc.txt cargo-0.54.0/src/doc/man/generated_txt/cargo-rustc.txt --- cargo-0.52.0/src/doc/man/generated_txt/cargo-rustc.txt 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/generated_txt/cargo-rustc.txt 2021-04-27 14:35:53.000000000 +0000 @@ -173,24 +173,28 @@ times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. + Conflicts with short and json. - o short: Emit shorter, human-readable text messages. + o short: Emit shorter, human-readable text messages. Conflicts with + human and json. o json: Emit JSON messages to stdout. See the reference - for more details. + for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. + contains the "short" rendering from rustc. Cannot be used with + human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting - rustc's default color scheme. + rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. + Cannot be used with human or short. Manifest Options --manifest-path path diff -Nru cargo-0.52.0/src/doc/man/generated_txt/cargo-rustdoc.txt cargo-0.54.0/src/doc/man/generated_txt/cargo-rustdoc.txt --- cargo-0.52.0/src/doc/man/generated_txt/cargo-rustdoc.txt 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/generated_txt/cargo-rustdoc.txt 2021-04-27 14:35:53.000000000 +0000 @@ -180,24 +180,28 @@ times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. + Conflicts with short and json. - o short: Emit shorter, human-readable text messages. + o short: Emit shorter, human-readable text messages. Conflicts with + human and json. o json: Emit JSON messages to stdout. See the reference - for more details. + for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. + contains the "short" rendering from rustc. Cannot be used with + human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting - rustc's default color scheme. + rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. + Cannot be used with human or short. Manifest Options --manifest-path path diff -Nru cargo-0.52.0/src/doc/man/generated_txt/cargo-test.txt cargo-0.54.0/src/doc/man/generated_txt/cargo-test.txt --- cargo-0.52.0/src/doc/man/generated_txt/cargo-test.txt 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/generated_txt/cargo-test.txt 2021-04-27 14:35:53.000000000 +0000 @@ -263,24 +263,28 @@ times and consists of comma-separated values. Valid values: o human (default): Display in a human-readable text format. + Conflicts with short and json. - o short: Emit shorter, human-readable text messages. + o short: Emit shorter, human-readable text messages. Conflicts with + human and json. o json: Emit JSON messages to stdout. See the reference - for more details. + for more details. Conflicts with human and short. o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. + contains the "short" rendering from rustc. Cannot be used with + human or short. o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting - rustc's default color scheme. + rustc's default color scheme. Cannot be used with human or short. o json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others coming from rustc are still emitted. + Cannot be used with human or short. Manifest Options --manifest-path path diff -Nru cargo-0.52.0/src/doc/man/includes/description-new-authors.md cargo-0.54.0/src/doc/man/includes/description-new-authors.md --- cargo-0.52.0/src/doc/man/includes/description-new-authors.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/includes/description-new-authors.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -The "authors" field in the manifest is determined from the environment or -configuration settings. A name is required and is determined from (first match -wins): - -- `cargo-new.name` Cargo config value -- `CARGO_NAME` environment variable -- `GIT_AUTHOR_NAME` environment variable -- `GIT_COMMITTER_NAME` environment variable -- `user.name` git configuration value -- `USER` environment variable -- `USERNAME` environment variable -- `NAME` environment variable - -The email address is optional and is determined from: - -- `cargo-new.email` Cargo config value -- `CARGO_EMAIL` environment variable -- `GIT_AUTHOR_EMAIL` environment variable -- `GIT_COMMITTER_EMAIL` environment variable -- `user.email` git configuration value -- `EMAIL` environment variable - -See [the reference](../reference/config.html) for more information about -configuration files. diff -Nru cargo-0.52.0/src/doc/man/includes/options-message-format.md cargo-0.54.0/src/doc/man/includes/options-message-format.md --- cargo-0.52.0/src/doc/man/includes/options-message-format.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/includes/options-message-format.md 2021-04-27 14:35:53.000000000 +0000 @@ -2,18 +2,20 @@ The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values: -- `human` (default): Display in a human-readable text format. -- `short`: Emit shorter, human-readable text messages. +- `human` (default): Display in a human-readable text format. Conflicts with + `short` and `json`. +- `short`: Emit shorter, human-readable text messages. Conflicts with `human` + and `json`. - `json`: Emit JSON messages to stdout. See [the reference](../reference/external-tools.html#json-messages) - for more details. + for more details. Conflicts with `human` and `short`. - `json-diagnostic-short`: Ensure the `rendered` field of JSON messages contains - the "short" rendering from rustc. + the "short" rendering from rustc. Cannot be used with `human` or `short`. - `json-diagnostic-rendered-ansi`: Ensure the `rendered` field of JSON messages contains embedded ANSI color codes for respecting rustc's default color - scheme. + scheme. Cannot be used with `human` or `short`. - `json-render-diagnostics`: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others - coming from rustc are still emitted. + coming from rustc are still emitted. Cannot be used with `human` or `short`. {{/option}} diff -Nru cargo-0.52.0/src/doc/man/includes/options-target-dir.md cargo-0.54.0/src/doc/man/includes/options-target-dir.md --- cargo-0.52.0/src/doc/man/includes/options-target-dir.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/man/includes/options-target-dir.md 2021-04-27 14:35:53.000000000 +0000 @@ -1,6 +1,13 @@ {{#option "`--target-dir` _directory_"}} Directory for all generated artifacts and intermediate files. May also be specified with the `CARGO_TARGET_DIR` environment variable, or the -`build.target-dir` [config value](../reference/config.html). Defaults -to `target` in the root of the workspace. +`build.target-dir` [config value](../reference/config.html). +{{#if temp-target-dir}} Defaults to a new temporary folder located in the +temporary directory of the platform. + +When using `--path`, by default it will use `target` directory in the workspace +of the local crate unless `--target-dir` +is specified. +{{else}} Defaults to `target` in the root of the workspace. +{{/if}} {{/option}} diff -Nru cargo-0.52.0/src/doc/src/appendix/git-authentication.md cargo-0.54.0/src/doc/src/appendix/git-authentication.md --- cargo-0.52.0/src/doc/src/appendix/git-authentication.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/appendix/git-authentication.md 2021-04-27 14:35:53.000000000 +0000 @@ -53,4 +53,4 @@ [`credential.helper`]: https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage [`net.git-fetch-with-cli`]: ../reference/config.md#netgit-fetch-with-cli -[GCM]: https://github.com/Microsoft/Git-Credential-Manager-for-Windows/ +[GCM]: https://github.com/microsoft/Git-Credential-Manager-Core/ diff -Nru cargo-0.52.0/src/doc/src/commands/cargo-bench.md cargo-0.54.0/src/doc/src/commands/cargo-bench.md --- cargo-0.52.0/src/doc/src/commands/cargo-bench.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-bench.md 2021-04-27 14:35:53.000000000 +0000 @@ -250,8 +250,8 @@

--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. Defaults -to target in the root of the workspace.
+build.target-dir config value. +Defaults to target in the root of the workspace. @@ -296,20 +296,22 @@
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

    -
  • human (default): Display in a human-readable text format.
  • -
  • short: Emit shorter, human-readable text messages.
  • +
  • human (default): Display in a human-readable text format. Conflicts with +short and json.
  • +
  • short: Emit shorter, human-readable text messages. Conflicts with human +and json.
  • json: Emit JSON messages to stdout. See the reference -for more details.
  • +for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc.
  • +the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color -scheme.
  • +scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted.
  • +coming from rustc are still emitted. Cannot be used with human or short.
diff -Nru cargo-0.52.0/src/doc/src/commands/cargo-build.md cargo-0.54.0/src/doc/src/commands/cargo-build.md --- cargo-0.52.0/src/doc/src/commands/cargo-build.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-build.md 2021-04-27 14:35:53.000000000 +0000 @@ -196,8 +196,8 @@
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. Defaults -to target in the root of the workspace.
+build.target-dir config value. +Defaults to target in the root of the workspace. @@ -206,7 +206,7 @@

This option is unstable and available only on the nightly channel and requires the -Z unstable-options flag to enable. -See https://github.com/rust-lang/cargo/issues/6790 for more information. +See https://github.com/rust-lang/cargo/issues/6790 for more information. @@ -244,20 +244,22 @@

The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

    -
  • human (default): Display in a human-readable text format.
  • -
  • short: Emit shorter, human-readable text messages.
  • +
  • human (default): Display in a human-readable text format. Conflicts with +short and json.
  • +
  • short: Emit shorter, human-readable text messages. Conflicts with human +and json.
  • json: Emit JSON messages to stdout. See the reference -for more details.
  • +for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc.
  • +the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color -scheme.
  • +scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted.
  • +coming from rustc are still emitted. Cannot be used with human or short.
diff -Nru cargo-0.52.0/src/doc/src/commands/cargo-check.md cargo-0.54.0/src/doc/src/commands/cargo-check.md --- cargo-0.52.0/src/doc/src/commands/cargo-check.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-check.md 2021-04-27 14:35:53.000000000 +0000 @@ -210,8 +210,8 @@
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. Defaults -to target in the root of the workspace.
+build.target-dir config value. +Defaults to target in the root of the workspace. @@ -249,20 +249,22 @@
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

    -
  • human (default): Display in a human-readable text format.
  • -
  • short: Emit shorter, human-readable text messages.
  • +
  • human (default): Display in a human-readable text format. Conflicts with +short and json.
  • +
  • short: Emit shorter, human-readable text messages. Conflicts with human +and json.
  • json: Emit JSON messages to stdout. See the reference -for more details.
  • +for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc.
  • +the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color -scheme.
  • +scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted.
  • +coming from rustc are still emitted. Cannot be used with human or short.
diff -Nru cargo-0.52.0/src/doc/src/commands/cargo-clean.md cargo-0.54.0/src/doc/src/commands/cargo-clean.md --- cargo-0.52.0/src/doc/src/commands/cargo-clean.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-clean.md 2021-04-27 14:35:53.000000000 +0000 @@ -47,8 +47,8 @@
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. Defaults -to target in the root of the workspace.
+build.target-dir config value. +Defaults to target in the root of the workspace. diff -Nru cargo-0.52.0/src/doc/src/commands/cargo-doc.md cargo-0.54.0/src/doc/src/commands/cargo-doc.md --- cargo-0.52.0/src/doc/src/commands/cargo-doc.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-doc.md 2021-04-27 14:35:53.000000000 +0000 @@ -167,8 +167,8 @@
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. Defaults -to target in the root of the workspace.
+build.target-dir config value. +Defaults to target in the root of the workspace. @@ -206,20 +206,22 @@
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

    -
  • human (default): Display in a human-readable text format.
  • -
  • short: Emit shorter, human-readable text messages.
  • +
  • human (default): Display in a human-readable text format. Conflicts with +short and json.
  • +
  • short: Emit shorter, human-readable text messages. Conflicts with human +and json.
  • json: Emit JSON messages to stdout. See the reference -for more details.
  • +for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc.
  • +the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color -scheme.
  • +scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted.
  • +coming from rustc are still emitted. Cannot be used with human or short.
diff -Nru cargo-0.52.0/src/doc/src/commands/cargo-fix.md cargo-0.54.0/src/doc/src/commands/cargo-fix.md --- cargo-0.52.0/src/doc/src/commands/cargo-fix.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-fix.md 2021-04-27 14:35:53.000000000 +0000 @@ -14,17 +14,13 @@ This Cargo subcommand will automatically take rustc's suggestions from diagnostics like warnings and apply them to your source code. This is intended to help automate tasks that rustc itself already knows how to tell you to fix! -The `cargo fix` subcommand is also being developed for the Rust 2018 edition -to provide code the ability to easily opt-in to the new edition without having -to worry about any breakage. Executing `cargo fix` will under the hood execute [cargo-check(1)](cargo-check.html). Any warnings applicable to your crate will be automatically fixed (if possible) and all remaining warnings will be displayed when the check process is finished. For -example if you'd like to prepare for the 2018 edition, you can do so by -executing: +example if you'd like to apply all fixes to the current package, you can run: - cargo fix --edition + cargo fix which behaves the same as `cargo check --all-targets`. @@ -32,16 +28,40 @@ `cargo check`. If code is conditionally enabled with optional features, you will need to enable those features for that code to be analyzed: - cargo fix --edition --features foo + cargo fix --features foo Similarly, other `cfg` expressions like platform-specific code will need to pass `--target` to fix code for the given target. - cargo fix --edition --target x86_64-pc-windows-gnu + cargo fix --target x86_64-pc-windows-gnu If you encounter any problems with `cargo fix` or otherwise have any questions or feature requests please don't hesitate to file an issue at - +. + +### Edition migration + +The `cargo fix` subcommand can also be used to migrate a package from one +[edition] to the next. The general procedure is: + +1. Run `cargo fix --edition`. Consider also using the `--all-features` flag if + your project has multiple features. You may also want to run `cargo fix + --edition` multiple times with different `--target` flags if your project + has platform-specific code gated by `cfg` attributes. +2. Modify `Cargo.toml` to set the [edition field] to the new edition. +3. Run your project tests to verify that everything still works. If new + warnings are issued, you may want to consider running `cargo fix` again + (without the `--edition` flag) to apply any suggestions given by the + compiler. + +And hopefully that's it! Just keep in mind of the caveats mentioned above that +`cargo fix` cannot update code for inactive features or `cfg` expressions. +Also, in some rare cases the compiler is unable to automatically migrate all +code to the new edition, and this may require manual changes after building +with the new edition. + +[edition]: https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html +[edition field]: ../reference/manifest.html#the-edition-field ## OPTIONS @@ -56,9 +76,9 @@
--edition
-
Apply changes that will update the code to the latest edition. This will not +
Apply changes that will update the code to the next edition. This will not update the edition in the Cargo.toml manifest, which must be updated -manually.
+manually after cargo fix --edition has finished.
--edition-idioms
@@ -270,8 +290,8 @@
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. Defaults -to target in the root of the workspace.
+build.target-dir config value. +Defaults to target in the root of the workspace. @@ -309,20 +329,22 @@
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

    -
  • human (default): Display in a human-readable text format.
  • -
  • short: Emit shorter, human-readable text messages.
  • +
  • human (default): Display in a human-readable text format. Conflicts with +short and json.
  • +
  • short: Emit shorter, human-readable text messages. Conflicts with human +and json.
  • json: Emit JSON messages to stdout. See the reference -for more details.
  • +for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc.
  • +the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color -scheme.
  • +scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted.
  • +coming from rustc are still emitted. Cannot be used with human or short.
@@ -435,7 +457,7 @@ cargo fix -2. Convert a 2015 edition to 2018: +2. Update a package to prepare it for the next edition: cargo fix --edition diff -Nru cargo-0.52.0/src/doc/src/commands/cargo-init.md cargo-0.54.0/src/doc/src/commands/cargo-init.md --- cargo-0.52.0/src/doc/src/commands/cargo-init.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-init.md 2021-04-27 14:35:53.000000000 +0000 @@ -20,32 +20,6 @@ If the directory is not already in a VCS repository, then a new repository is created (see `--vcs` below). -The "authors" field in the manifest is determined from the environment or -configuration settings. A name is required and is determined from (first match -wins): - -- `cargo-new.name` Cargo config value -- `CARGO_NAME` environment variable -- `GIT_AUTHOR_NAME` environment variable -- `GIT_COMMITTER_NAME` environment variable -- `user.name` git configuration value -- `USER` environment variable -- `USERNAME` environment variable -- `NAME` environment variable - -The email address is optional and is determined from: - -- `cargo-new.email` Cargo config value -- `CARGO_EMAIL` environment variable -- `GIT_AUTHOR_EMAIL` environment variable -- `GIT_COMMITTER_EMAIL` environment variable -- `user.email` git configuration value -- `EMAIL` environment variable - -See [the reference](../reference/config.html) for more information about -configuration files. - - See [cargo-new(1)](cargo-new.html) for a similar command which will create a new package in a new directory. diff -Nru cargo-0.52.0/src/doc/src/commands/cargo-install.md cargo-0.54.0/src/doc/src/commands/cargo-install.md --- cargo-0.52.0/src/doc/src/commands/cargo-install.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-install.md 2021-04-27 14:35:53.000000000 +0000 @@ -1,6 +1,7 @@ # cargo-install(1) + ## NAME cargo-install - Build and install a Rust binary @@ -215,8 +216,12 @@
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. Defaults -to target in the root of the workspace.
+build.target-dir config value. +Defaults to a new temporary folder located in the +temporary directory of the platform.

+

When using --path, by default it will use target directory in the workspace +of the local crate unless --target-dir +is specified. diff -Nru cargo-0.52.0/src/doc/src/commands/cargo-new.md cargo-0.54.0/src/doc/src/commands/cargo-new.md --- cargo-0.52.0/src/doc/src/commands/cargo-new.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-new.md 2021-04-27 14:35:53.000000000 +0000 @@ -15,32 +15,6 @@ and a VCS ignore file. If the directory is not already in a VCS repository, then a new repository is created (see `--vcs` below). -The "authors" field in the manifest is determined from the environment or -configuration settings. A name is required and is determined from (first match -wins): - -- `cargo-new.name` Cargo config value -- `CARGO_NAME` environment variable -- `GIT_AUTHOR_NAME` environment variable -- `GIT_COMMITTER_NAME` environment variable -- `user.name` git configuration value -- `USER` environment variable -- `USERNAME` environment variable -- `NAME` environment variable - -The email address is optional and is determined from: - -- `cargo-new.email` Cargo config value -- `CARGO_EMAIL` environment variable -- `GIT_AUTHOR_EMAIL` environment variable -- `GIT_COMMITTER_EMAIL` environment variable -- `user.email` git configuration value -- `EMAIL` environment variable - -See [the reference](../reference/config.html) for more information about -configuration files. - - See [cargo-init(1)](cargo-init.html) for a similar command which will create a new manifest in an existing directory. diff -Nru cargo-0.52.0/src/doc/src/commands/cargo-package.md cargo-0.54.0/src/doc/src/commands/cargo-package.md --- cargo-0.52.0/src/doc/src/commands/cargo-package.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-package.md 2021-04-27 14:35:53.000000000 +0000 @@ -88,8 +88,8 @@

--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. Defaults -to target in the root of the workspace.
+build.target-dir config value. +Defaults to target in the root of the workspace. diff -Nru cargo-0.52.0/src/doc/src/commands/cargo-pkgid.md cargo-0.54.0/src/doc/src/commands/cargo-pkgid.md --- cargo-0.52.0/src/doc/src/commands/cargo-pkgid.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-pkgid.md 2021-04-27 14:35:53.000000000 +0000 @@ -163,5 +163,9 @@ cargo pkgid https://github.com/rust-lang/crates.io-index#foo +4. Retrieve package specification for `foo` from a local package: + + cargo pkgid file:///path/to/local/package#foo + ## SEE ALSO [cargo(1)](cargo.html), [cargo-generate-lockfile(1)](cargo-generate-lockfile.html), [cargo-metadata(1)](cargo-metadata.html) diff -Nru cargo-0.52.0/src/doc/src/commands/cargo-publish.md cargo-0.54.0/src/doc/src/commands/cargo-publish.md --- cargo-0.52.0/src/doc/src/commands/cargo-publish.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-publish.md 2021-04-27 14:35:53.000000000 +0000 @@ -96,8 +96,8 @@
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. Defaults -to target in the root of the workspace.
+build.target-dir config value. +Defaults to target in the root of the workspace. diff -Nru cargo-0.52.0/src/doc/src/commands/cargo-run.md cargo-0.54.0/src/doc/src/commands/cargo-run.md --- cargo-0.52.0/src/doc/src/commands/cargo-run.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-run.md 2021-04-27 14:35:53.000000000 +0000 @@ -114,8 +114,8 @@
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. Defaults -to target in the root of the workspace.
+build.target-dir config value. +Defaults to target in the root of the workspace. @@ -154,20 +154,22 @@
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

    -
  • human (default): Display in a human-readable text format.
  • -
  • short: Emit shorter, human-readable text messages.
  • +
  • human (default): Display in a human-readable text format. Conflicts with +short and json.
  • +
  • short: Emit shorter, human-readable text messages. Conflicts with human +and json.
  • json: Emit JSON messages to stdout. See the reference -for more details.
  • +for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc.
  • +the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color -scheme.
  • +scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted.
  • +coming from rustc are still emitted. Cannot be used with human or short.
diff -Nru cargo-0.52.0/src/doc/src/commands/cargo-rustc.md cargo-0.54.0/src/doc/src/commands/cargo-rustc.md --- cargo-0.52.0/src/doc/src/commands/cargo-rustc.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-rustc.md 2021-04-27 14:35:53.000000000 +0000 @@ -183,8 +183,8 @@
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. Defaults -to target in the root of the workspace.
+build.target-dir config value. +Defaults to target in the root of the workspace. @@ -223,20 +223,22 @@
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

    -
  • human (default): Display in a human-readable text format.
  • -
  • short: Emit shorter, human-readable text messages.
  • +
  • human (default): Display in a human-readable text format. Conflicts with +short and json.
  • +
  • short: Emit shorter, human-readable text messages. Conflicts with human +and json.
  • json: Emit JSON messages to stdout. See the reference -for more details.
  • +for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc.
  • +the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color -scheme.
  • +scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted.
  • +coming from rustc are still emitted. Cannot be used with human or short.
diff -Nru cargo-0.52.0/src/doc/src/commands/cargo-rustdoc.md cargo-0.54.0/src/doc/src/commands/cargo-rustdoc.md --- cargo-0.52.0/src/doc/src/commands/cargo-rustdoc.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-rustdoc.md 2021-04-27 14:35:53.000000000 +0000 @@ -196,8 +196,8 @@
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. Defaults -to target in the root of the workspace.
+build.target-dir config value. +Defaults to target in the root of the workspace. @@ -235,20 +235,22 @@
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

    -
  • human (default): Display in a human-readable text format.
  • -
  • short: Emit shorter, human-readable text messages.
  • +
  • human (default): Display in a human-readable text format. Conflicts with +short and json.
  • +
  • short: Emit shorter, human-readable text messages. Conflicts with human +and json.
  • json: Emit JSON messages to stdout. See the reference -for more details.
  • +for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc.
  • +the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color -scheme.
  • +scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted.
  • +coming from rustc are still emitted. Cannot be used with human or short.
diff -Nru cargo-0.52.0/src/doc/src/commands/cargo-test.md cargo-0.54.0/src/doc/src/commands/cargo-test.md --- cargo-0.52.0/src/doc/src/commands/cargo-test.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/commands/cargo-test.md 2021-04-27 14:35:53.000000000 +0000 @@ -276,8 +276,8 @@
--target-dir directory
Directory for all generated artifacts and intermediate files. May also be specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. Defaults -to target in the root of the workspace.
+build.target-dir config value. +Defaults to target in the root of the workspace. @@ -322,20 +322,22 @@
The output format for diagnostic messages. Can be specified multiple times and consists of comma-separated values. Valid values:

    -
  • human (default): Display in a human-readable text format.
  • -
  • short: Emit shorter, human-readable text messages.
  • +
  • human (default): Display in a human-readable text format. Conflicts with +short and json.
  • +
  • short: Emit shorter, human-readable text messages. Conflicts with human +and json.
  • json: Emit JSON messages to stdout. See the reference -for more details.
  • +for more details. Conflicts with human and short.
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc.
  • +the "short" rendering from rustc. Cannot be used with human or short.
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages contains embedded ANSI color codes for respecting rustc's default color -scheme.
  • +scheme. Cannot be used with human or short.
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in in JSON messages printed, but instead Cargo itself should render the JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted.
  • +coming from rustc are still emitted. Cannot be used with human or short.
diff -Nru cargo-0.52.0/src/doc/src/faq.md cargo-0.54.0/src/doc/src/faq.md --- cargo-0.52.0/src/doc/src/faq.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/faq.md 2021-04-27 14:35:53.000000000 +0000 @@ -257,4 +257,4 @@ If after trying to debug your issue, however, you're still running into problems then feel free to [open an -issue](https://github.com/rust-lang/cargo/issuses/new)! +issue](https://github.com/rust-lang/cargo/issues/new)! diff -Nru cargo-0.52.0/src/doc/src/getting-started/first-steps.md cargo-0.54.0/src/doc/src/getting-started/first-steps.md --- cargo-0.52.0/src/doc/src/getting-started/first-steps.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/getting-started/first-steps.md 2021-04-27 14:35:53.000000000 +0000 @@ -33,7 +33,6 @@ [package] name = "hello_world" version = "0.1.0" -authors = ["Your Name "] edition = "2018" [dependencies] diff -Nru cargo-0.52.0/src/doc/src/guide/build-cache.md cargo-0.54.0/src/doc/src/guide/build-cache.md --- cargo-0.52.0/src/doc/src/guide/build-cache.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/guide/build-cache.md 2021-04-27 14:35:53.000000000 +0000 @@ -2,7 +2,7 @@ Cargo stores the output of a build into the "target" directory. By default, this is the directory named `target` in the root of your -[*workspace*][def-worksapce]. To change the location, you can set the +[*workspace*][def-workspace]. To change the location, you can set the `CARGO_TARGET_DIR` [environment variable], the [`build.target-dir`] config value, or the `--target-dir` command-line flag. diff -Nru cargo-0.52.0/src/doc/src/guide/cargo-toml-vs-cargo-lock.md cargo-0.54.0/src/doc/src/guide/cargo-toml-vs-cargo-lock.md --- cargo-0.52.0/src/doc/src/guide/cargo-toml-vs-cargo-lock.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/guide/cargo-toml-vs-cargo-lock.md 2021-04-27 14:35:53.000000000 +0000 @@ -27,7 +27,6 @@ [package] name = "hello_world" version = "0.1.0" -authors = ["Your Name "] [dependencies] rand = { git = "https://github.com/rust-lang-nursery/rand.git" } @@ -63,7 +62,6 @@ [package] name = "hello_world" version = "0.1.0" -authors = ["Your Name "] [dependencies] rand = { git = "https://github.com/rust-lang-nursery/rand.git" } diff -Nru cargo-0.52.0/src/doc/src/guide/creating-a-new-project.md cargo-0.54.0/src/doc/src/guide/creating-a-new-project.md --- cargo-0.52.0/src/doc/src/guide/creating-a-new-project.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/guide/creating-a-new-project.md 2021-04-27 14:35:53.000000000 +0000 @@ -29,7 +29,6 @@ [package] name = "hello_world" version = "0.1.0" -authors = ["Your Name "] edition = "2018" [dependencies] diff -Nru cargo-0.52.0/src/doc/src/guide/dependencies.md cargo-0.54.0/src/doc/src/guide/dependencies.md --- cargo-0.52.0/src/doc/src/guide/dependencies.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/guide/dependencies.md 2021-04-27 14:35:53.000000000 +0000 @@ -35,7 +35,6 @@ [package] name = "hello_world" version = "0.1.0" -authors = ["Your Name "] edition = "2018" [dependencies] diff -Nru cargo-0.52.0/src/doc/src/reference/cargo-targets.md cargo-0.54.0/src/doc/src/reference/cargo-targets.md --- cargo-0.52.0/src/doc/src/reference/cargo-targets.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/cargo-targets.md 2021-04-27 14:35:53.000000000 +0000 @@ -27,7 +27,7 @@ ### Binaries -Binary targets are executables programs that can be run after being compiled. +Binary targets are executable programs that can be run after being compiled. The default binary filename is `src/main.rs`, which defaults to the name of the package. Additional binaries are stored in the [`src/bin/` directory][package layout]. The settings for each binary can be [customized] diff -Nru cargo-0.52.0/src/doc/src/reference/config.md cargo-0.54.0/src/doc/src/reference/config.md --- cargo-0.52.0/src/doc/src/reference/config.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/config.md 2021-04-27 14:35:53.000000000 +0000 @@ -68,8 +68,6 @@ pipelining = true # rustc pipelining [cargo-new] -name = "Your Name" # name to use in `authors` field -email = "you@example.com" # email address to use in `authors` field vcs = "none" # VCS to use ('git', 'hg', 'pijul', 'fossil', 'none') [http] @@ -96,6 +94,7 @@ [profile.] # Modify profile settings via config. opt-level = 0 # Optimization level. debug = true # Include debug info. +split-debuginfo = '...' # Debug info splitting behavior. debug-assertions = true # Enables debug assertions. overflow-checks = true # Enables runtime integer overflow checks. lto = false # Sets link-time optimization. @@ -402,25 +401,12 @@ The `[cargo-new]` table defines defaults for the [`cargo new`] command. ##### `cargo-new.name` -* Type: string -* Default: from environment -* Environment: `CARGO_NAME` or `CARGO_CARGO_NEW_NAME` -Defines the name to use in the `authors` field when creating a new -`Cargo.toml` file. If not specified in the config, Cargo searches the -environment or your `git` configuration as described in the [`cargo new`] -documentation. +This option is deprecated and unused. ##### `cargo-new.email` -* Type: string -* Default: from environment -* Environment: `CARGO_EMAIL` or `CARGO_CARGO_NEW_EMAIL` -Defines the email address used in the `authors` field when creating a new -`Cargo.toml` file. If not specified in the config, Cargo searches the -environment or your `git` configuration as described in the [`cargo new`] -documentation. The `email` value may be set to an empty string to prevent -Cargo from placing an address in the authors field. +This option is deprecated and unused. ##### `cargo-new.vcs` * Type: string @@ -618,6 +604,13 @@ See [debug](profiles.md#debug). +##### `profile..split-debuginfo` +* Type: string +* Default: See profile docs. +* Environment: `CARGO_PROFILE__SPLIT_DEBUGINFO` + +See [split-debuginfo](profiles.md#split-debuginfo). + ##### `profile..debug-assertions` * Type: boolean * Default: See profile docs. diff -Nru cargo-0.52.0/src/doc/src/reference/environment-variables.md cargo-0.54.0/src/doc/src/reference/environment-variables.md --- cargo-0.52.0/src/doc/src/reference/environment-variables.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/environment-variables.md 2021-04-27 14:35:53.000000000 +0000 @@ -46,8 +46,6 @@ will otherwise be used. See also [`build.incremental`] config value. * `CARGO_CACHE_RUSTC_INFO` — If this is set to 0 then Cargo will not try to cache compiler version information. -* `CARGO_NAME` — The author name to use for [`cargo new`]. -* `CARGO_EMAIL` — The author email to use for [`cargo new`]. * `HTTPS_PROXY` or `https_proxy` or `http_proxy` — The HTTP proxy to use, see [`http.proxy`] for more detail. * `HTTP_TIMEOUT` — The HTTP timeout in seconds, see [`http.timeout`] for more @@ -78,8 +76,6 @@ * `CARGO_BUILD_INCREMENTAL` — Incremental compilation, see [`build.incremental`]. * `CARGO_BUILD_DEP_INFO_BASEDIR` — Dep-info relative directory, see [`build.dep-info-basedir`]. * `CARGO_BUILD_PIPELINING` — Whether or not to use `rustc` pipelining, see [`build.pipelining`]. -* `CARGO_CARGO_NEW_NAME` — The author name to use with [`cargo new`], see [`cargo-new.name`]. -* `CARGO_CARGO_NEW_EMAIL` — The author email to use with [`cargo new`], see [`cargo-new.email`]. * `CARGO_CARGO_NEW_VCS` — The default source control system with [`cargo new`], see [`cargo-new.vcs`]. * `CARGO_HTTP_DEBUG` — Enables HTTP debugging, see [`http.debug`]. * `CARGO_HTTP_PROXY` — Enables HTTP proxy, see [`http.proxy`]. @@ -334,6 +330,7 @@ changed by editing `.cargo/config.toml`; see the documentation about [cargo configuration][cargo-config] for more information. +* `CARGO_PKG_` - The package information variables, with the same names and values as are [provided during crate building][variables set for crates]. [unix-like platforms]: ../../reference/conditional-compilation.html#unix-and-windows [windows-like platforms]: ../../reference/conditional-compilation.html#unix-and-windows @@ -350,6 +347,7 @@ [jobserver]: https://www.gnu.org/software/make/manual/html_node/Job-Slots.html [cargo-config]: config.md [Target Triple]: ../appendix/glossary.md#target +[variables set for crates]: #environment-variables-cargo-sets-for-crates ### Environment variables Cargo sets for 3rd party subcommands diff -Nru cargo-0.52.0/src/doc/src/reference/external-tools.md cargo-0.54.0/src/doc/src/reference/external-tools.md --- cargo-0.52.0/src/doc/src/reference/external-tools.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/external-tools.md 2021-04-27 14:35:53.000000000 +0000 @@ -67,6 +67,8 @@ "reason": "compiler-message", /* The Package ID, a unique identifier for referring to the package. */ "package_id": "my-package 0.1.0 (path+file:///path/to/my-package)", + /* Absolute path to the package manifest. */ + "manifest_path": "/path/to/my-package/Cargo.toml", /* The Cargo target (lib, bin, example, etc.) that generated the message. */ "target": { /* Array of target kinds. @@ -129,6 +131,8 @@ "reason": "compiler-artifact", /* The Package ID, a unique identifier for referring to the package. */ "package_id": "my-package 0.1.0 (path+file:///path/to/my-package)", + /* Absolute path to the package manifest. */ + "manifest_path": "/path/to/my-package/Cargo.toml", /* The Cargo target (lib, bin, example, etc.) that generated the artifacts. See the definition above for `compiler-message` for details. */ diff -Nru cargo-0.52.0/src/doc/src/reference/features-examples.md cargo-0.54.0/src/doc/src/reference/features-examples.md --- cargo-0.52.0/src/doc/src/reference/features-examples.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/features-examples.md 2021-04-27 14:35:53.000000000 +0000 @@ -62,7 +62,7 @@ [example2][wasm-bindgen-cfg2]), it uses `#[cfg(feature = "std")]` attributes to conditionally enable extra functionality that requires `std`. -[`no_std`]: ../../reference/crates-and-source-files.html#preludes-and-no_std +[`no_std`]: ../../reference/names/preludes.html#the-no_std-attribute [`wasm-bindgen`]: https://crates.io/crates/wasm-bindgen [`std` prelude]: ../../std/prelude/index.html [wasm-bindgen-std]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/Cargo.toml#L25 diff -Nru cargo-0.52.0/src/doc/src/reference/features.md cargo-0.54.0/src/doc/src/reference/features.md --- cargo-0.52.0/src/doc/src/reference/features.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/features.md 2021-04-27 14:35:53.000000000 +0000 @@ -252,7 +252,7 @@ } ``` -[`no_std`]: ../../reference/crates-and-source-files.html#preludes-and-no_std +[`no_std`]: ../../reference/names/preludes.html#the-no_std-attribute [features section]: resolver.md#features #### Mutually exclusive features @@ -369,6 +369,9 @@ # This command is allowed with resolver = "2", regardless of which directory # you are in. cargo build -p foo -p bar --features foo-feat,bar-feat + +# This explicit equivalent works with any resolver version: +cargo build -p foo -p bar --features foo/foo-feat,bar/bar-feat ``` Additionally, with `resolver = "1"`, the `--no-default-features` flag only diff -Nru cargo-0.52.0/src/doc/src/reference/manifest.md cargo-0.54.0/src/doc/src/reference/manifest.md --- cargo-0.52.0/src/doc/src/reference/manifest.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/manifest.md 2021-04-27 14:35:53.000000000 +0000 @@ -107,26 +107,27 @@ #### The `authors` field -The `authors` field lists people or organizations that are considered the -"authors" of the package. The exact meaning is open to interpretation — it may -list the original or primary authors, current maintainers, or owners of the -package. These names will be listed on the crate's page on -[crates.io]. An optional email address may be included within angled -brackets at the end of each author. - -> **Note**: [crates.io] requires at least one author to be listed. +The optional `authors` field lists people or organizations that are considered +the "authors" of the package. The exact meaning is open to interpretation — it +may list the original or primary authors, current maintainers, or owners of the +package. An optional email address may be included within angled brackets at +the end of each author entry. + +This field is only surfaced in package metadata and in the `CARGO_PKG_AUTHORS` +environment variable within `build.rs`. It is not displayed in the [crates.io] +user interface. + +> **Warning**: Package manifests cannot be changed once published, so this +> field cannot be changed or removed in already-published versions of a +> package. #### The `edition` field -The `edition` key is an optional key that affects which edition your package -is compiled with. [`cargo new`] will generate a package with the `edition` key -set to the latest edition. Setting the `edition` key in -`[package]` will affect all targets/crates in the package, including test -suites, benchmarks, binaries, examples, etc. - -If the `edition` key is not set to a specific [Rust Edition] in your -`Cargo.toml`, Cargo will default to 2015. +The `edition` key is an optional key that affects which [Rust Edition] your package +is compiled with. Setting the `edition` key in `[package]` will affect all +targets/crates in the package, including test suites, benchmarks, binaries, +examples, etc. ```toml [package] @@ -134,6 +135,15 @@ edition = '2018' ``` +Most manifests have the `edition` field filled in automatically by [`cargo new`] +with the latest stable edition. By default `cargo new` creates a manifest with +the 2018 edition currently. + +If the `edition` field is not present in `Cargo.toml`, then the 2015 edition is +assumed for backwards compatibility. Note that all manifests +created with [`cargo new`] will not use this historical fallback because they +will have `edition` explicitly specified to a newer value. + #### The `description` field The description is a short blurb about the package. [crates.io] will display @@ -209,7 +219,7 @@ [crates.io] interprets the `license` field as an [SPDX 2.1 license expression][spdx-2.1-license-expressions]. The name must be a known license -from the [SPDX license list 3.6][spdx-license-list-3.6]. Parentheses are not +from the [SPDX license list 3.11][spdx-license-list-3.11]. Parentheses are not currently supported. See the [SPDX site] for more information. SPDX license expressions support AND and OR operators to combine multiple @@ -514,7 +524,7 @@ [publishing]: publishing.md [Rust Edition]: ../../edition-guide/index.html [spdx-2.1-license-expressions]: https://spdx.org/spdx-specification-21-web-version#h.jxpfx0ykyb60 -[spdx-license-list-3.6]: https://github.com/spdx/license-list-data/tree/v3.6 +[spdx-license-list-3.11]: https://github.com/spdx/license-list-data/tree/v3.11 [SPDX site]: https://spdx.org/license-list [TOML]: https://toml.io/ diff -Nru cargo-0.52.0/src/doc/src/reference/overriding-dependencies.md cargo-0.54.0/src/doc/src/reference/overriding-dependencies.md --- cargo-0.52.0/src/doc/src/reference/overriding-dependencies.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/overriding-dependencies.md 2021-04-27 14:35:53.000000000 +0000 @@ -49,7 +49,6 @@ [package] name = "my-library" version = "0.1.0" -authors = ["..."] [dependencies] uuid = "1.0" @@ -131,7 +130,6 @@ [package] name = "my-library" version = "0.1.0" -authors = ["..."] [dependencies] uuid = "1.0.1" @@ -157,7 +155,6 @@ [package] name = "my-binary" version = "0.1.0" -authors = ["..."] [dependencies] my-library = { git = 'https://example.com/git/my-library' } @@ -212,7 +209,6 @@ [package] name = "my-binary" version = "0.1.0" -authors = ["..."] [dependencies] my-library = { git = 'https://example.com/git/my-library' } diff -Nru cargo-0.52.0/src/doc/src/reference/pkgid-spec.md cargo-0.54.0/src/doc/src/reference/pkgid-spec.md --- cargo-0.52.0/src/doc/src/reference/pkgid-spec.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/pkgid-spec.md 2021-04-27 14:35:53.000000000 +0000 @@ -4,17 +4,25 @@ Subcommands of Cargo frequently need to refer to a particular package within a dependency graph for various operations like updating, cleaning, building, etc. -To solve this problem, Cargo supports Package ID Specifications. A specification +To solve this problem, Cargo supports *Package ID Specifications*. A specification is a string which is used to uniquely refer to one package within a graph of packages. +The specification may be fully qualified, such as +`https://github.com/rust-lang/crates.io-index#regex:1.4.3` or it may be +abbreviated, such as `regex`. The abbreviated form may be used as long as it +uniquely identifies a single package in the dependency graph. If there is +ambiguity, additional qualifiers can be added to make it unique. For example, +if there are two versions of the `regex` package in the graph, then it can be +qualified with a version to make it unique, such as `regex:1.4.3`. + #### Specification grammar The formal grammar for a Package Id Specification is: ```notrust -pkgid := pkgname - | [ proto "://" ] hostname-and-path [ "#" ( pkgname | semver ) ] +spec := pkgname + | proto "://" hostname-and-path [ "#" ( pkgname | semver ) ] pkgname := name [ ":" semver ] proto := "http" | "git" | ... @@ -22,19 +30,34 @@ Here, brackets indicate that the contents are optional. +The URL form can be used for git dependencies, or to differentiate packages +that come from different sources such as different registries. + #### Example specifications -These could all be references to a package `foo` version `1.2.3` from the -registry at `crates.io` +The following are references to the `regex` package on `crates.io`: -| pkgid | name | version | url | -|:-----------------------------|:-----:|:-------:|:----------------------:| -| `foo` | `foo` | `*` | `*` | -| `foo:1.2.3` | `foo` | `1.2.3` | `*` | -| `crates.io/foo` | `foo` | `*` | `*://crates.io/foo` | -| `crates.io/foo#1.2.3` | `foo` | `1.2.3` | `*://crates.io/foo` | -| `crates.io/bar#foo:1.2.3` | `foo` | `1.2.3` | `*://crates.io/bar` | -| `https://crates.io/foo#1.2.3`| `foo` | `1.2.3` | `https://crates.io/foo` | +| Spec | Name | Version | +|:------------------------------------------------------------|:-------:|:-------:| +| `regex` | `regex` | `*` | +| `regex:1.4.3` | `regex` | `1.4.3` | +| `https://github.com/rust-lang/crates.io-index#regex` | `regex` | `*` | +| `https://github.com/rust-lang/crates.io-index#regex:1.4.3` | `regex` | `1.4.3` | + +The following are some examples of specs for several different git dependencies: + +| Spec | Name | Version | +|:----------------------------------------------------------|:----------------:|:--------:| +| `https://github.com/rust-lang/cargo#0.52.0` | `cargo` | `0.52.0` | +| `https://github.com/rust-lang/cargo#cargo-platform:0.1.1` | `cargo-platform` | `0.1.1` | +| `ssh://git@github.com/rust-lang/regex.git#regex:1.4.3` | `regex` | `1.4.3` | + +Local packages on the filesystem can use `file://` URLs to reference them: + +| Spec | Name | Version | +|:---------------------------------------|:-----:|:-------:| +| `file:///path/to/my/project/foo` | `foo` | `*` | +| `file:///path/to/my/project/foo#1.1.8` | `foo` | `1.1.8` | #### Brevity of specifications diff -Nru cargo-0.52.0/src/doc/src/reference/profiles.md cargo-0.54.0/src/doc/src/reference/profiles.md --- cargo-0.52.0/src/doc/src/reference/profiles.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/profiles.md 2021-04-27 14:35:53.000000000 +0000 @@ -83,10 +83,12 @@ executable itself or adjacent to it. This option is a string and acceptable values are the same as those the -[compiler accepts][`-C split-debuginfo` flag]. See that documentation for the -default behavior, which is platform-specific. Some options are only available -on the [nightly channel]. The default may change in the future once more -testing has been performed, and support for DWARF is stabilized. +[compiler accepts][`-C split-debuginfo` flag]. The default value for this option +is `unpacked` on macOS for profiles that have debug information otherwise +enabled. Otherwise the default for this option is [documented with rustc][`-C +split-debuginfo` flag] and is platform-specific. Some options are only +available on the [nightly channel]. The Cargo default may change in the future +once more testing has been performed, and support for DWARF is stabilized. [nightly channel]: ../../book/appendix-07-nightly-rust.html [`-C split-debuginfo` flag]: ../../rustc/codegen-options/index.html#split-debuginfo @@ -167,7 +169,7 @@ [`panic-abort-tests`] unstable flag which enables `abort` behavior. Additionally, when using the `abort` strategy and building a test, all of the -dependencies will also be forced to built with the `unwind` strategy. +dependencies will also be forced to build with the `unwind` strategy. [`-C panic` flag]: ../../rustc/codegen-options/index.html#panic [`panic-abort-tests`]: unstable.md#panic-abort-tests diff -Nru cargo-0.52.0/src/doc/src/reference/publishing.md cargo-0.54.0/src/doc/src/reference/publishing.md --- cargo-0.52.0/src/doc/src/reference/publishing.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/publishing.md 2021-04-27 14:35:53.000000000 +0000 @@ -34,7 +34,6 @@ ensure your crate can be discovered more easily! Before publishing, make sure you have filled out the following fields: -- [`authors`] - [`license` or `license-file`] - [`description`] - [`homepage`] @@ -258,7 +257,6 @@ [RFC 1105]: https://github.com/rust-lang/rfcs/blob/master/text/1105-api-evolution.md [Rust API Guidelines]: https://rust-lang.github.io/api-guidelines/ -[`authors`]: manifest.md#the-authors-field [`cargo login`]: ../commands/cargo-login.md [`cargo package`]: ../commands/cargo-package.md [`cargo publish`]: ../commands/cargo-publish.md diff -Nru cargo-0.52.0/src/doc/src/reference/registries.md cargo-0.54.0/src/doc/src/reference/registries.md --- cargo-0.52.0/src/doc/src/reference/registries.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/registries.md 2021-04-27 14:35:53.000000000 +0000 @@ -178,7 +178,7 @@ `CrateTwo` have `{prefix}` values of `cr/at` and `Cr/at`; these are distinct on Unix machines but alias to the same directory on Windows. Using directories with normalized case avoids aliasing, but on case-sensitive filesystems it's -harder to suport older versions of Cargo that lack `{prefix}`/`{lowerprefix}`. +harder to support older versions of Cargo that lack `{prefix}`/`{lowerprefix}`. For example, nginx rewrite rules can easily construct `{prefix}` but can't perform case-conversion to construct `{lowerprefix}`. @@ -385,7 +385,7 @@ "extras": ["rand/simd_support"] }, // List of strings of the authors. - // May be empty. crates.io requires at least one entry. + // May be empty. "authors": ["Alice "], // Description field from the manifest. // May be null. crates.io requires at least some content. diff -Nru cargo-0.52.0/src/doc/src/reference/semver.md cargo-0.54.0/src/doc/src/reference/semver.md --- cargo-0.52.0/src/doc/src/reference/semver.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/semver.md 2021-04-27 14:35:53.000000000 +0000 @@ -235,7 +235,7 @@ Mitigation strategies: * Do not add new fields to all-public field structs. * Mark structs as [`#[non_exhaustive]`][non_exhaustive] when first introducing - an struct to prevent users from using struct literal syntax, and instead + a struct to prevent users from using struct literal syntax, and instead provide a constructor method and/or [Default] implementation. @@ -270,7 +270,7 @@ Mitigation strategies: * Do not add new new fields to all-public field structs. * Mark structs as [`#[non_exhaustive]`][non_exhaustive] when first introducing - an struct to prevent users from using struct literal syntax, and instead + a struct to prevent users from using struct literal syntax, and instead provide a constructor method and/or [Default] implementation. @@ -634,7 +634,7 @@ use updated_crate::Trait; struct Foo; -impl Trait for Foo {} // Error: wrong number of type arguments +impl Trait for Foo {} // Error: missing generics ``` Mitigating strategies: @@ -943,7 +943,7 @@ use updated_crate::foo; fn main() { - foo::(); // Error: wrong number of type arguments + foo::(); // Error: this function takes 2 type arguments but only 1 type argument was supplied } ``` @@ -1328,7 +1328,7 @@ document what your commitments are. [`cfg` attribute]: ../../reference/conditional-compilation.md#the-cfg-attribute -[`no_std`]: ../../reference/crates-and-source-files.html#preludes-and-no_std +[`no_std`]: ../../reference/names/preludes.html#the-no_std-attribute [`pub use`]: ../../reference/items/use-declarations.html [Cargo feature]: features.md [Cargo features]: features.md diff -Nru cargo-0.52.0/src/doc/src/reference/unstable.md cargo-0.54.0/src/doc/src/reference/unstable.md --- cargo-0.52.0/src/doc/src/reference/unstable.md 2021-03-16 22:05:51.000000000 +0000 +++ cargo-0.54.0/src/doc/src/reference/unstable.md 2021-04-27 14:35:53.000000000 +0000 @@ -58,6 +58,32 @@ [nightly channel]: ../../book/appendix-07-nightly-rust.html [stabilized]: https://doc.crates.io/contrib/process/unstable.html#stabilization +### allow-features + +This permanently-unstable flag makes it so that only a listed set of +unstable features can be used. Specifically, if you pass +`-Zallow-features=foo,bar`, you'll continue to be able to pass `-Zfoo` +and `-Zbar` to `cargo`, but you will be unable to pass `-Zbaz`. You can +pass an empty string (`-Zallow-features=`) to disallow all unstable +features. + +`-Zallow-features` also restricts which unstable features can be passed +to the `cargo-features` entry in `Cargo.toml`. If, for example, you want +to allow + +```toml +cargo-features = ["test-dummy-unstable"] +``` + +where `test-dummy-unstable` is unstable, that features would also be +disallowed by `-Zallow-features=`, and allowed with +`-Zallow-features=test-dummy-unstable`. + +The list of features passed to cargo's `-Zallow-features` is also passed +to any Rust tools that cargo ends up calling (like `rustc` or +`rustdoc`). Thus, if you run `cargo -Zallow-features=`, no unstable +Cargo _or_ Rust features can be used. + ### extra-link-arg * Original Pull Request: [#7811](https://github.com/rust-lang/cargo/pull/7811) @@ -765,8 +791,11 @@ strip = "debuginfo" ``` -Other possible values of `strip` are `none` and `symbols`. The default is -`none`. +Other possible string values of `strip` are `none`, `symbols`, and `off`. The default is `none`. + +You can also configure this option with the two absolute boolean values +`true` and `false`. The former enables `strip` at its higher level, `symbols`, +whilst the later disables `strip` completely. ### rustdoc-map * Tracking Issue: [#8296](https://github.com/rust-lang/cargo/issues/8296) @@ -866,6 +895,26 @@ dependency. However, unlike the normal `serde/std` syntax, it will not enable the optional dependency `serde` unless something else has included it. +### per-package-target + +The `per-package-target` feature adds two keys to the manifest: +`package.default-target` and `package.forced-target`. The first makes +the package be compiled by default (ie. when no `--target` argument is +passed) for some target. The second one makes the package always be +compiled for the target. + +Example: + +```toml +[package] +forced-target = "wasm32-unknown-unknown" +``` + +In this example, the crate is always built for +`wasm32-unknown-unknown`, for instance because it is going to be used +as a plugin for a main program that runs on the host (or provided on +the command line) target. + ### credential-process * Tracking Issue: [#8933](https://github.com/rust-lang/cargo/issues/8933) * RFC: [#2730](https://github.com/rust-lang/rfcs/pull/2730) @@ -1061,6 +1110,109 @@ rust-version = "1.42" ``` +### edition 2021 + +Support for the 2021 [edition] can be enabled by adding the `edition2021` +unstable feature to the top of `Cargo.toml`: + +```toml +cargo-features = ["edition2021"] + +[package] +name = "my-package" +version = "0.1.0" +edition = "2021" +``` + +If you want to transition an existing project from a previous edition, then +`cargo fix --edition` can be used on the nightly channel. After running `cargo +fix`, you can switch the edition to 2021 as illustrated above. + +This feature is very unstable, and is only intended for early testing and +experimentation. Future nightly releases may introduce changes for the 2021 +edition that may break your build. + +The 2021 edition will set the default [resolver version] to "2". + +[edition]: ../../edition-guide/index.html +[resolver version]: resolver.md#resolver-versions + +### future incompat report +* RFC: [#2834](https://github.com/rust-lang/rfcs/blob/master/text/2834-cargo-report-future-incompat.md) +* rustc Tracking Issue: [#71249](https://github.com/rust-lang/rust/issues/71249) + +The `-Z future-incompat-report` flag enables the creation of a future-incompat report +for all dependencies. This makes users aware if any of their crate's dependencies +might stop compiling with a future version of Rust. + +### configurable-env +* Original Pull Request: [#9175](https://github.com/rust-lang/cargo/pull/9175) + +The `-Z configurable-env` flag enables the `[env]` section in the +`.cargo/config.toml` file. This section allows you to set additional environment +variables for build scripts, rustc invocations, `cargo run` and `cargo build`. + +```toml +[env] +OPENSSL_DIR = "/opt/openssl" +``` + +By default, the variables specified will not override values that already exist +in the environment. This behavior can be changed by setting the `force` flag. + +Setting the `relative` flag evaluates the value as a config-relative path that +is relative to the parent directory of the `.cargo` directory that contains the +`config.toml` file. The value of the environment variable will be the full +absolute path. + +```toml +[env] +TMPDIR = { value = "/home/tmp", force = true } +OPENSSL_DIR = { value = "vendor/openssl", relative = true } +``` + +### patch-in-config +* Original Pull Request: [#9204](https://github.com/rust-lang/cargo/pull/9204) +* Tracking Issue: [#9269](https://github.com/rust-lang/cargo/issues/9269) + +The `-Z patch-in-config` flag enables the use of `[patch]` sections in +cargo configuration files (`.cargo/config.toml`). The format of such +`[patch]` sections is identical to the one used in `Cargo.toml`. + +Since `.cargo/config.toml` files are not usually checked into source +control, you should prefer patching using `Cargo.toml` where possible to +ensure that other developers can compile your crate in their own +environments. Patching through cargo configuration files is generally +only appropriate when the patch section is automatically generated by an +external build tool. + +If a given dependency is patched both in a cargo configuration file and +a `Cargo.toml` file, the patch in `Cargo.toml` is used. If multiple +configuration files patch the same dependency, standard cargo +configuration merging is used, which prefers the value defined closest +to the current directory, with `$HOME/.cargo/config.toml` taking the +lowest precedence. + +Relative `path` dependencies in such a `[patch]` section are resolved +relative to the configuration file they appear in. + +## `cargo config` + +* Original Issue: [#2362](https://github.com/rust-lang/cargo/issues/2362) +* Tracking Issue: [#9301](https://github.com/rust-lang/cargo/issues/9301) + +The `cargo config` subcommand provides a way to display the configuration +files that cargo loads. It currently includes the `get` subcommand which +can take an optional config value to display. + +```console +cargo +nightly -Zunstable-options config get build.rustflags +``` + +If no config value is included, it will display all config values. See the +`--help` output for more options available. + +