mirror of
https://github.com/rust-lang/cargo.git
synced 2026-05-06 08:36:38 -04:00
chore: Address typos
This commit is contained in:
@@ -357,7 +357,7 @@ fn build_dir_ignored_path_patterns() -> Vec<String> {
|
||||
// Ignore MacOS debug symbols as there are many files/directories that would clutter up
|
||||
// tests few not a lot of benefit.
|
||||
"[..].dSYM/[..]",
|
||||
// Ignore Windows debub symbols files (.pdb)
|
||||
// Ignore Windows debug symbols files (.pdb)
|
||||
"[..].pdb",
|
||||
]
|
||||
.into_iter()
|
||||
|
||||
@@ -110,7 +110,7 @@ impl TomlLockfileSourceId {
|
||||
.ok_or_else(|| TomlLockfileSourceIdErrorKind::InvalidSource(source.clone()))?;
|
||||
|
||||
// Sparse URLs store the kind prefix (sparse+) in the URL. Therefore, for sparse kinds, we
|
||||
// want to use the raw `source` instead of the splitted `url`.
|
||||
// want to use the raw `source` instead of the split `url`.
|
||||
let url = Url::parse(if kind == "sparse" { &source } else { url }).map_err(|msg| {
|
||||
TomlLockfileSourceIdErrorKind::InvalidUrl {
|
||||
url: url.to_string(),
|
||||
|
||||
@@ -235,16 +235,16 @@ impl<'a, 'gctx: 'a> CompilationFiles<'a, 'gctx> {
|
||||
/// Note that some units may share the same directory, so care should be
|
||||
/// taken in those cases!
|
||||
fn pkg_dir(&self, unit: &Unit) -> String {
|
||||
let seperator = match self.ws.gctx().cli_unstable().build_dir_new_layout {
|
||||
let separator = match self.ws.gctx().cli_unstable().build_dir_new_layout {
|
||||
true => "/",
|
||||
false => "-",
|
||||
};
|
||||
let name = unit.pkg.package_id().name();
|
||||
let meta = self.metas[unit];
|
||||
if let Some(c_extra_filename) = meta.c_extra_filename() {
|
||||
format!("{}{}{}", name, seperator, c_extra_filename)
|
||||
format!("{}{}{}", name, separator, c_extra_filename)
|
||||
} else {
|
||||
format!("{}{}{}", name, seperator, self.target_short_hash(unit))
|
||||
format!("{}{}{}", name, separator, self.target_short_hash(unit))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1069,18 +1069,18 @@ impl Fingerprint {
|
||||
}
|
||||
(
|
||||
LocalFingerprint::CheckDepInfo {
|
||||
dep_info: adep,
|
||||
dep_info: a_dep,
|
||||
checksum: checksum_a,
|
||||
},
|
||||
LocalFingerprint::CheckDepInfo {
|
||||
dep_info: bdep,
|
||||
dep_info: b_dep,
|
||||
checksum: checksum_b,
|
||||
},
|
||||
) => {
|
||||
if adep != bdep {
|
||||
if a_dep != b_dep {
|
||||
return DirtyReason::DepInfoOutputChanged {
|
||||
old: bdep.clone(),
|
||||
new: adep.clone(),
|
||||
old: b_dep.clone(),
|
||||
new: a_dep.clone(),
|
||||
};
|
||||
}
|
||||
if checksum_a != checksum_b {
|
||||
@@ -1089,48 +1089,48 @@ impl Fingerprint {
|
||||
}
|
||||
(
|
||||
LocalFingerprint::RerunIfChanged {
|
||||
output: aout,
|
||||
paths: apaths,
|
||||
output: a_out,
|
||||
paths: a_paths,
|
||||
},
|
||||
LocalFingerprint::RerunIfChanged {
|
||||
output: bout,
|
||||
paths: bpaths,
|
||||
output: b_out,
|
||||
paths: b_paths,
|
||||
},
|
||||
) => {
|
||||
if aout != bout {
|
||||
if a_out != b_out {
|
||||
return DirtyReason::RerunIfChangedOutputFileChanged {
|
||||
old: bout.clone(),
|
||||
new: aout.clone(),
|
||||
old: b_out.clone(),
|
||||
new: a_out.clone(),
|
||||
};
|
||||
}
|
||||
if apaths != bpaths {
|
||||
if a_paths != b_paths {
|
||||
return DirtyReason::RerunIfChangedOutputPathsChanged {
|
||||
old: bpaths.clone(),
|
||||
new: apaths.clone(),
|
||||
old: b_paths.clone(),
|
||||
new: a_paths.clone(),
|
||||
};
|
||||
}
|
||||
}
|
||||
(
|
||||
LocalFingerprint::RerunIfEnvChanged {
|
||||
var: akey,
|
||||
val: avalue,
|
||||
var: a_key,
|
||||
val: a_value,
|
||||
},
|
||||
LocalFingerprint::RerunIfEnvChanged {
|
||||
var: bkey,
|
||||
val: bvalue,
|
||||
var: b_key,
|
||||
val: b_value,
|
||||
},
|
||||
) => {
|
||||
if *akey != *bkey {
|
||||
if *a_key != *b_key {
|
||||
return DirtyReason::EnvVarsChanged {
|
||||
old: bkey.clone(),
|
||||
new: akey.clone(),
|
||||
old: b_key.clone(),
|
||||
new: a_key.clone(),
|
||||
};
|
||||
}
|
||||
if *avalue != *bvalue {
|
||||
if *a_value != *b_value {
|
||||
return DirtyReason::EnvVarChanged {
|
||||
name: akey.clone(),
|
||||
old_value: bvalue.clone(),
|
||||
new_value: avalue.clone(),
|
||||
name: a_key.clone(),
|
||||
old_value: b_value.clone(),
|
||||
new_value: a_value.clone(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1656,7 +1656,7 @@ fn build_deps_args(
|
||||
if build_runner.bcx.gctx.cli_unstable().build_dir_new_layout {
|
||||
let mut map = BTreeMap::new();
|
||||
|
||||
// Recursively add all depenendency args to rustc process
|
||||
// Recursively add all dependency args to rustc process
|
||||
add_dep_arg(&mut map, build_runner, unit);
|
||||
|
||||
let paths = map.into_iter().map(|(_, path)| path).sorted_unstable();
|
||||
|
||||
@@ -634,7 +634,7 @@ impl<'gctx> Timings<'gctx> {
|
||||
AggregatedSections::Sections(mut sections) => {
|
||||
// We draw the sections in the pipeline graph in a way where the frontend
|
||||
// section has the "default" build color, and then additional sections
|
||||
// (codegen, link) are overlayed on top with a different color.
|
||||
// (codegen, link) are overlaid on top with a different color.
|
||||
// However, there might be some time after the final (usually link) section,
|
||||
// which definitely shouldn't be classified as "Frontend". We thus try to
|
||||
// detect this situation and add a final "Other" section.
|
||||
|
||||
@@ -573,7 +573,7 @@ syntax so it does not have an implicit feature with that name{}",
|
||||
None => ActivateError::Fatal(anyhow::format_err!(
|
||||
"package `{}` does not have feature `{}`
|
||||
|
||||
help: a depednency with that name exists but it is required dependency and only optional dependencies can be used as features.",
|
||||
help: a dependency with that name exists but it is required dependency and only optional dependencies can be used as features.",
|
||||
summary.package_id(),
|
||||
feat,
|
||||
)),
|
||||
|
||||
@@ -366,7 +366,7 @@ impl Shell {
|
||||
fn file_hyperlink(&mut self, path: &std::path::Path) -> Option<url::Url> {
|
||||
let mut url = url::Url::from_file_path(path).ok()?;
|
||||
// Do a best-effort of setting the host in the URL to avoid issues with opening a link
|
||||
// scoped to the computer you've SSHed into
|
||||
// scoped to the computer you've SSH'ed into
|
||||
let hostname = if cfg!(windows) {
|
||||
// Not supported correctly on windows
|
||||
None
|
||||
|
||||
@@ -1327,7 +1327,7 @@ impl<'gctx> Workspace<'gctx> {
|
||||
|
||||
// This is a short term hack to allow `blanket_hint_mostly_unused`
|
||||
// to run without requiring `-Zcargo-lints`, which should hopefully
|
||||
// improve the testing expierience while we are collecting feedback
|
||||
// improve the testing experience while we are collecting feedback
|
||||
if self.gctx.cli_unstable().profile_hint_mostly_unused {
|
||||
blanket_hint_mostly_unused(
|
||||
self.root_maybe(),
|
||||
|
||||
@@ -102,12 +102,12 @@ impl CompileFilter {
|
||||
lib_only: bool,
|
||||
bins: Vec<String>,
|
||||
all_bins: bool,
|
||||
tsts: Vec<String>,
|
||||
all_tsts: bool,
|
||||
exms: Vec<String>,
|
||||
all_exms: bool,
|
||||
bens: Vec<String>,
|
||||
all_bens: bool,
|
||||
tests: Vec<String>,
|
||||
all_tests: bool,
|
||||
examples: Vec<String>,
|
||||
all_examples: bool,
|
||||
benches: Vec<String>,
|
||||
all_benches: bool,
|
||||
all_targets: bool,
|
||||
) -> CompileFilter {
|
||||
if all_targets {
|
||||
@@ -119,34 +119,34 @@ impl CompileFilter {
|
||||
LibRule::False
|
||||
};
|
||||
let rule_bins = FilterRule::new(bins, all_bins);
|
||||
let rule_tsts = FilterRule::new(tsts, all_tsts);
|
||||
let rule_exms = FilterRule::new(exms, all_exms);
|
||||
let rule_bens = FilterRule::new(bens, all_bens);
|
||||
let rule_tests = FilterRule::new(tests, all_tests);
|
||||
let rule_examples = FilterRule::new(examples, all_examples);
|
||||
let rule_benches = FilterRule::new(benches, all_benches);
|
||||
|
||||
CompileFilter::new(rule_lib, rule_bins, rule_tsts, rule_exms, rule_bens)
|
||||
CompileFilter::new(rule_lib, rule_bins, rule_tests, rule_examples, rule_benches)
|
||||
}
|
||||
|
||||
/// Constructs a filter from underlying primitives.
|
||||
pub fn new(
|
||||
rule_lib: LibRule,
|
||||
rule_bins: FilterRule,
|
||||
rule_tsts: FilterRule,
|
||||
rule_exms: FilterRule,
|
||||
rule_bens: FilterRule,
|
||||
rule_tests: FilterRule,
|
||||
rule_examples: FilterRule,
|
||||
rule_benches: FilterRule,
|
||||
) -> CompileFilter {
|
||||
if rule_lib == LibRule::True
|
||||
|| rule_bins.is_specific()
|
||||
|| rule_tsts.is_specific()
|
||||
|| rule_exms.is_specific()
|
||||
|| rule_bens.is_specific()
|
||||
|| rule_tests.is_specific()
|
||||
|| rule_examples.is_specific()
|
||||
|| rule_benches.is_specific()
|
||||
{
|
||||
CompileFilter::Only {
|
||||
all_targets: false,
|
||||
lib: rule_lib,
|
||||
bins: rule_bins,
|
||||
examples: rule_exms,
|
||||
benches: rule_bens,
|
||||
tests: rule_tsts,
|
||||
examples: rule_examples,
|
||||
benches: rule_benches,
|
||||
tests: rule_tests,
|
||||
}
|
||||
} else {
|
||||
CompileFilter::Default {
|
||||
|
||||
@@ -919,7 +919,7 @@ fn tar(
|
||||
header.set_size(contents.len() as u64);
|
||||
// We need to have the same DETERMINISTIC_TIMESTAMP for generated files
|
||||
// https://github.com/alexcrichton/tar-rs/blob/d0261f1f6cc959ba0758e7236b3fd81e90dd1dc6/src/header.rs#L18-L24
|
||||
// Unfortunately tar-rs doesn't expose that so we harcode the timestamp here.
|
||||
// Unfortunately tar-rs doesn't expose that so we hardcode the timestamp here.
|
||||
// Hardcoded value be removed once alexcrichton/tar-rs#420 is merged and released.
|
||||
// See also rust-lang/cargo#16237
|
||||
header.set_mtime(1153704088);
|
||||
|
||||
@@ -308,7 +308,7 @@ fn sync(
|
||||
// This fallback is worked for sometimes `fs::rename` failed in a specific situation, such as:
|
||||
// - In Windows 10 versions earlier than 1607, the destination of `fs::rename` can't be a directory in older versions.
|
||||
// - `from` and `to` are on separate filesystems.
|
||||
// - AntiVirus or our system indexer are doing stuf simutaneously.
|
||||
// - AntiVirus or our system indexer are doing stuf simultaneously.
|
||||
// - Any other reasons documented in std::fs::rename.
|
||||
tracing::warn!("failed to `mv {unpacked_src:?} {dst:?}`: {e}");
|
||||
let paths: Vec<_> = walkdir(&unpacked_src).map(|e| e.into_path()).collect();
|
||||
|
||||
@@ -200,7 +200,7 @@ impl<'de, 'gctx> de::Deserializer<'de> for Deserializer<'gctx> {
|
||||
let vals: Vec<String> = res
|
||||
.into_iter()
|
||||
.map(|val| match val {
|
||||
CV::String(s, _defintion) => Ok(s),
|
||||
CV::String(s, _definition) => Ok(s),
|
||||
other => Err(ConfigError::expected(&self.key, "string", &other)),
|
||||
})
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
@@ -118,7 +118,7 @@ pub enum KeyOrIdx {
|
||||
Idx(usize),
|
||||
}
|
||||
|
||||
/// Tracks the key path to an item in an array for detailed errro context.
|
||||
/// Tracks the key path to an item in an array for detailed error context.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ArrayItemKeyPath {
|
||||
base: ConfigKey,
|
||||
|
||||
@@ -1310,7 +1310,7 @@ pub fn to_real_manifest(
|
||||
let edition_msrv = RustVersion::try_from(edition_msrv).unwrap();
|
||||
if !edition_msrv.is_compatible_with(pkg_msrv.as_partial()) {
|
||||
bail!(
|
||||
"rust-version {} is imcompatible with the version ({}) required by \
|
||||
"rust-version {} is incompatible with the version ({}) required by \
|
||||
the specified edition ({})",
|
||||
pkg_msrv,
|
||||
edition_msrv,
|
||||
|
||||
@@ -3206,7 +3206,7 @@
|
||||
Some notable changes:
|
||||
- Renamed `credential-process` to `credential-provider` in Cargo configurations.
|
||||
- New JSON protocol for communicating with external credential providers via stdin/stdout.
|
||||
- The GNOME Secert provider now dynamically loads `libsecert`.
|
||||
- The GNOME Secret provider now dynamically loads `libsecert`.
|
||||
- The 1password provider is no longer built-in.
|
||||
- Changed the unstable key for asymmetric tokens from `registry-auth` to `credential-process`.
|
||||
- ❗️ Removed `--keep-going` flag support from `cargo test` and `cargo bench`.
|
||||
|
||||
@@ -132,7 +132,7 @@ When invoking `cargo`,
|
||||
However, when contributing to an application,
|
||||
you may need to build and test various packages within the application,
|
||||
which can cause extraneous rebuilds because different sets of features may be activated for common dependencies.
|
||||
With [`feauture-unification`][feature-unification],
|
||||
With [`feature-unification`][feature-unification],
|
||||
you can reuse more dependency builds by ensuring the same set of dependency features are activated,
|
||||
independent of which package you are currently building and testing.
|
||||
|
||||
|
||||
@@ -383,7 +383,7 @@ Here's an example:
|
||||
# Cargo.toml
|
||||
|
||||
[package]
|
||||
name = "zuser"
|
||||
name = "z_user"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
@@ -403,12 +403,12 @@ script:
|
||||
|
||||
fn main() {
|
||||
let mut cfg = cc::Build::new();
|
||||
cfg.file("src/zuser.c");
|
||||
cfg.file("src/z_user.c");
|
||||
if let Some(include) = std::env::var_os("DEP_Z_INCLUDE") {
|
||||
cfg.include(include);
|
||||
}
|
||||
cfg.compile("zuser");
|
||||
println!("cargo::rerun-if-changed=src/zuser.c");
|
||||
cfg.compile("z_user");
|
||||
println!("cargo::rerun-if-changed=src/z_user.c");
|
||||
}
|
||||
```
|
||||
|
||||
@@ -417,7 +417,7 @@ the zlib header, and it should find the header, even on systems where it isn't
|
||||
already installed.
|
||||
|
||||
```c
|
||||
// src/zuser.c
|
||||
// src/z_user.c
|
||||
|
||||
#include "zlib.h"
|
||||
|
||||
|
||||
@@ -804,14 +804,14 @@ fn unused_keys() {
|
||||
version = "0.5.0"
|
||||
edition = "2015"
|
||||
authors = ["wycats@example.com"]
|
||||
bulid = "foo"
|
||||
unused = "foo"
|
||||
"#,
|
||||
)
|
||||
.file("src/lib.rs", "pub fn foo() {}")
|
||||
.build();
|
||||
p.cargo("check")
|
||||
.with_stderr_data(str![[r#"
|
||||
[WARNING] unused manifest key: package.bulid
|
||||
[WARNING] unused manifest key: package.unused
|
||||
[CHECKING] foo v0.5.0 ([ROOT]/foo)
|
||||
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
|
||||
|
||||
@@ -854,7 +854,7 @@ fn unused_keys_in_virtual_manifest() {
|
||||
r#"
|
||||
[workspace]
|
||||
members = ["bar"]
|
||||
bulid = "foo"
|
||||
unused = "foo"
|
||||
"#,
|
||||
)
|
||||
.file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
|
||||
@@ -862,7 +862,7 @@ fn unused_keys_in_virtual_manifest() {
|
||||
.build();
|
||||
p.cargo("check --workspace")
|
||||
.with_stderr_data(str![[r#"
|
||||
[WARNING] [ROOT]/foo/Cargo.toml: unused manifest key: workspace.bulid
|
||||
[WARNING] [ROOT]/foo/Cargo.toml: unused manifest key: workspace.unused
|
||||
[CHECKING] bar v0.0.1 ([ROOT]/foo/bar)
|
||||
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
|
||||
|
||||
|
||||
@@ -448,7 +448,7 @@ fn cli_activates_required_dependency() {
|
||||
[LOCKING] 1 package to latest compatible version
|
||||
[ERROR] package `foo v0.0.1 ([ROOT]/foo)` does not have feature `bar`
|
||||
|
||||
[HELP] a depednency with that name exists but it is required dependency and only optional dependencies can be used as features.
|
||||
[HELP] a dependency with that name exists but it is required dependency and only optional dependencies can be used as features.
|
||||
|
||||
"#]])
|
||||
.with_status(101)
|
||||
|
||||
@@ -1459,16 +1459,16 @@ fn fingerprint_cleaner(mut dir: PathBuf, timestamp: filetime::FileTime) {
|
||||
// effecting any builds that happened since that time stamp.
|
||||
let mut cleaned = false;
|
||||
dir.push(".fingerprint");
|
||||
for fing in fs::read_dir(&dir).unwrap() {
|
||||
let fing = fing.unwrap();
|
||||
for fingerprint in fs::read_dir(&dir).unwrap() {
|
||||
let fingerprint = fingerprint.unwrap();
|
||||
|
||||
let outdated = |f: io::Result<fs::DirEntry>| {
|
||||
filetime::FileTime::from_last_modification_time(&f.unwrap().metadata().unwrap())
|
||||
<= timestamp
|
||||
};
|
||||
if fs::read_dir(fing.path()).unwrap().all(outdated) {
|
||||
fs::remove_dir_all(fing.path()).unwrap();
|
||||
println!("remove: {:?}", fing.path());
|
||||
if fs::read_dir(fingerprint.path()).unwrap().all(outdated) {
|
||||
fs::remove_dir_all(fingerprint.path()).unwrap();
|
||||
println!("remove: {:?}", fingerprint.path());
|
||||
// a real cleaner would remove the big files in deps and build as well
|
||||
// but fingerprint is sufficient for our tests
|
||||
cleaned = true;
|
||||
|
||||
@@ -2990,7 +2990,7 @@ fn templatedir_doesnt_cause_problems() {
|
||||
&format!(
|
||||
r#"
|
||||
[package]
|
||||
name = "fo"
|
||||
name = "foo"
|
||||
version = "0.5.0"
|
||||
edition = "2015"
|
||||
authors = []
|
||||
|
||||
@@ -445,7 +445,7 @@ fn fetch_index_then_fetch(
|
||||
.run();
|
||||
|
||||
let repo = gix::open_opts(find_remote_index(mode_1st), gix::open::Options::isolated())?;
|
||||
let complete_depth = 2; // initial commmit, bar@1.0.0
|
||||
let complete_depth = 2; // initial commit, bar@1.0.0
|
||||
mode_1st.assert_index(&repo, 1, complete_depth);
|
||||
|
||||
Package::new("bar", "1.1.0").publish();
|
||||
@@ -459,7 +459,7 @@ fn fetch_index_then_fetch(
|
||||
.run();
|
||||
|
||||
let repo = gix::open_opts(find_remote_index(mode_2nd), gix::open::Options::isolated())?;
|
||||
let complete_depth = 3; // initial commmit, bar@1.0.0, and bar@1.1.0
|
||||
let complete_depth = 3; // initial commit, bar@1.0.0, and bar@1.1.0
|
||||
mode_2nd.assert_index(&repo, 1, complete_depth);
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -74,7 +74,7 @@ fn simple() {
|
||||
fn not_found() {
|
||||
setup();
|
||||
// Publish a package so that the directory hierarchy is created.
|
||||
// Note, however, that we declare a dependency on baZ.
|
||||
// Note, however, that we declare a dependency on baz.
|
||||
Package::new("bar", "0.0.1").local(true).publish();
|
||||
|
||||
let p = project()
|
||||
|
||||
@@ -82,7 +82,7 @@ fn rust_version_older_than_edition() {
|
||||
[ERROR] failed to parse manifest at `[ROOT]/foo/Cargo.toml`
|
||||
|
||||
Caused by:
|
||||
rust-version 1.1 is imcompatible with the version (1.31.0) required by the specified edition (2018)
|
||||
rust-version 1.1 is incompatible with the version (1.31.0) required by the specified edition (2018)
|
||||
|
||||
"#]])
|
||||
.run();
|
||||
|
||||
@@ -239,7 +239,7 @@ fn requires_z_flag() {
|
||||
|
||||
#[cargo_test(nightly, reason = "-Zscript is unstable")]
|
||||
fn manifest_parse_error() {
|
||||
// Exagerate the newlines to make it more obvious if the error's line number is off
|
||||
// Exaggerate the newlines to make it more obvious if the error's line number is off
|
||||
let script = r#"#!/usr/bin/env cargo
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user